text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""
WSGI config for quantvc project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "quantvc.settings.product")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
{
"content_hash": "5b74b3b12f64f4a3a353f838d691be21",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 78,
"avg_line_length": 28.357142857142858,
"alnum_prop": 0.7758186397984886,
"repo_name": "quantvc/simple-django",
"id": "9ba288df1ad85e06d64f7eddd7cd28702206ee4b",
"size": "397",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web/quantvc/wsgi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1495"
},
{
"name": "JavaScript",
"bytes": "484"
},
{
"name": "Python",
"bytes": "10223"
},
{
"name": "Shell",
"bytes": "2538"
}
],
"symlink_target": ""
}
|
"""
WSGI config for sandbox project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sandbox.settings")
application = get_wsgi_application()
|
{
"content_hash": "3b122a5dd347a572876d38c4ad31b21b",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 24.5,
"alnum_prop": 0.7704081632653061,
"repo_name": "nshafer/django-hashid-field",
"id": "45f74d65bcdee343435c4d12880b12f52083273a",
"size": "392",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sandbox/sandbox/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2358"
},
{
"name": "Python",
"bytes": "115952"
}
],
"symlink_target": ""
}
|
from nova.openstack.common import log as logging
from webob import Request, Response
import json
import copy
from cimibase import Controller, Consts
from cimibase import make_response_data
from cimiutils import concat, match_up, remove_member
from cimiutils import map_image_state
LOG = logging.getLogger(__name__)
class MachineImageCtrler(Controller):
"""
Handles machine image request.
"""
def __init__(self, conf, app, req, tenant_id, *args):
super(MachineImageCtrler, self).__init__(conf, app, req, tenant_id,
*args)
self.os_path = '/%s/images' % (tenant_id)
self.image_id = args[0] if len(args) > 0 else ''
self.entity_uri = 'MachineImage'
self.metadata = Consts.MACHINEIMAGE_METADATA
# Use GET to handle all container read related operations.
def GET(self, req, *parts):
"""
Handle GET Container (List Objects) request
"""
env = self._fresh_env(req)
env['PATH_INFO'] = '/'.join([self.os_path, self.image_id])
new_req = Request(env)
res = new_req.get_response(self.app)
if res.status_int == 200:
image = json.loads(res.body).get('image')
if image:
body = {}
body['type'] = 'IMAGE'
body['id'] = '/'.join([self.tenant_id, self.entity_uri,
self.image_id])
match_up(body, image, 'name', 'name')
match_up(body, image, 'created', 'created')
match_up(body, image, 'updated', 'updated')
body['state'] = map_image_state(image['status'])
body['imageLocation'] = body['id']
if self.res_content_type == 'application/xml':
response_data = {self.entity_uri: body}
else:
body['resourceURI'] = '/'.join([self.uri_prefix,
self.entity_uri])
response_data = body
new_content = make_response_data(response_data,
self.res_content_type,
self.metadata,
self.uri_prefix)
resp = Response()
self._fixup_cimi_header(resp)
resp.headers['Content-Type'] = self.res_content_type
resp.status = 200
resp.body = new_content
return resp
else:
return res
return res
class MachineImageColCtrler(Controller):
"""
Handles machine image collection request.
"""
def __init__(self, conf, app, req, tenant_id, *args):
super(MachineImageColCtrler, self).__init__(conf, app, req, tenant_id,
*args)
self.os_path = '/%s/images/detail' % (tenant_id)
self.entity_uri = 'MachineImageCollection'
self.metadata = Consts.MACHINEIMAGE_COL_METADATA
# Use GET to handle all container read related operations.
def GET(self, req, *parts):
"""
Handle GET Container (List Objects) request
"""
env = copy.copy(req.environ)
env['SCRIPT_NAME'] = self.os_version
env['PATH_INFO'] = self.os_path
# we will always use json format to get Nova information
env['HTTP_ACCEPT'] = 'application/json'
# need to remove this header, otherwise, it will always take the
# original request accept content type
if env.has_key('nova.best_content_type'):
env.pop('nova.best_content_type')
new_req = Request(env)
res = new_req.get_response(self.app)
if res.status_int == 200:
content = json.loads(res.body)
body = {}
body['id'] = '/'.join([self.tenant_id, self.entity_uri])
body['machineImages'] = []
images = content.get('images', [])
for image in images:
entry = {}
entry['resourceURI'] = '/'.join([self.uri_prefix,
'MachineImage'])
entry['id'] = '/'.join([self.tenant_id,
'MachineImage',
image['id']])
entry['type'] = 'IMAGE'
entry['name'] = image['name']
entry['created'] = image['created']
entry['updated'] = image['updated']
entry['state'] = map_image_state(image['status'])
entry['imageLocation'] = entry['id']
body['machineImages'].append(entry)
body['count'] = len(body['machineImages'])
if self.res_content_type == 'application/xml':
remove_member(body, 'resourceURI')
body['resourceURI'] = '/'.join([self.uri_prefix,
self.entity_uri])
response_data = {'Collection': body}
else:
body['resourceURI'] = '/'.join([self.uri_prefix,
self.entity_uri])
response_data = body
new_content = make_response_data(response_data,
self.res_content_type,
self.metadata,
self.uri_prefix)
resp = Response()
self._fixup_cimi_header(resp)
resp.headers['Content-Type'] = self.res_content_type
resp.status = 200
resp.body = new_content
return resp
else:
return res
|
{
"content_hash": "af5f134c29708fb936563b6d8cc633b5",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 78,
"avg_line_length": 38.718120805369125,
"alnum_prop": 0.4924596983879355,
"repo_name": "osaddon/cimi",
"id": "d63ab3aa5e9eb7fa5becea3059f5dd305fba6505",
"size": "6370",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cimi/cimiapp/machineimage.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "31"
},
{
"name": "Python",
"bytes": "141565"
},
{
"name": "Shell",
"bytes": "374"
}
],
"symlink_target": ""
}
|
from datetime import datetime, timedelta
import urllib
from django.urls import reverse
from django.test import TestCase
from apps.push.models import PushSubscription, PushSubscriptionManager
from apps.push.signals import pre_subscribe, verified, updated
class MockResponse(object):
def __init__(self, status, data=None):
self.status = status
self.data = data
def info(self):
return self
def read(self):
if self.data is None:
return ''
data, self.data = self.data, None
return data
class PSHBTestBase:
urls = 'apps.push.urls'
def setUp(self):
self._old_send_request = PushSubscriptionManager._send_request
PushSubscriptionManager._send_request = self._send_request
self.responses = []
self.requests = []
self.signals = []
for connecter in pre_subscribe, verified, updated:
def callback(signal=None, **kwargs):
self.signals.append((signal, kwargs))
connecter.connect(callback, dispatch_uid=connecter, weak=False)
def tearDown(self):
PushSubscriptionManager._send_request = self._old_send_request
del self._old_send_request
for signal in pre_subscribe, verified:
signal.disconnect(dispatch_uid=signal)
def _send_request(self, url, data):
self.requests.append((url, data))
return self.responses.pop()
class Test_PSHBSubscriptionManagerTest(PSHBTestBase, TestCase):
def test_sync_verify(self):
"""
If the hub returns a 204 response, the subscription is verified and
active.
"""
self.responses.append(MockResponse(204))
sub = PushSubscription.objects.subscribe('topic', 'hub', 'callback', 2000)
self.assertEquals(len(self.signals), 2)
self.assertEquals(self.signals[0], (pre_subscribe, {'sender': sub,
'created': True}))
self.assertEquals(self.signals[1], (verified, {'sender': sub}))
self.assertEquals(sub.hub, 'hub')
self.assertEquals(sub.topic, 'topic')
self.assertEquals(sub.verified, True)
rough_expires = datetime.now() + timedelta(seconds=2000)
self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5,
'lease more than 5 seconds off')
self.assertEquals(len(self.requests), 1)
request = self.requests[0]
self.assertEquals(request[0], 'hub')
self.assertEquals(request[1]['mode'], 'subscribe')
self.assertEquals(request[1]['topic'], 'topic')
self.assertEquals(request[1]['callback'], 'callback')
self.assertEquals(request[1]['verify'], ('async', 'sync'))
self.assertEquals(request[1]['verify_token'], sub.verify_token)
self.assertEquals(request[1]['lease_seconds'], 2000)
def test_async_verify(self):
"""
If the hub returns a 202 response, we should not assume the
subscription is verified.
"""
self.responses.append(MockResponse(202))
sub = PushSubscription.objects.subscribe('topic', 'hub', 'callback', 2000)
self.assertEquals(len(self.signals), 1)
self.assertEquals(self.signals[0], (pre_subscribe, {'sender': sub,
'created': True}))
self.assertEquals(sub.hub, 'hub')
self.assertEquals(sub.topic, 'topic')
self.assertEquals(sub.verified, False)
rough_expires = datetime.now() + timedelta(seconds=2000)
self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5,
'lease more than 5 seconds off')
self.assertEquals(len(self.requests), 1)
request = self.requests[0]
self.assertEquals(request[0], 'hub')
self.assertEquals(request[1]['mode'], 'subscribe')
self.assertEquals(request[1]['topic'], 'topic')
self.assertEquals(request[1]['callback'], 'callback')
self.assertEquals(request[1]['verify'], ('async', 'sync'))
self.assertEquals(request[1]['verify_token'], sub.verify_token)
self.assertEquals(request[1]['lease_seconds'], 2000)
def test_least_seconds_default(self):
"""
If the number of seconds to lease the subscription is not specified, it
should default to 2592000 (30 days).
"""
self.responses.append(MockResponse(202))
sub = PushSubscription.objects.subscribe('topic', 'hub', 'callback')
rough_expires = datetime.now() + timedelta(seconds=2592000)
self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5,
'lease more than 5 seconds off')
self.assertEquals(len(self.requests), 1)
request = self.requests[0]
self.assertEquals(request[1]['lease_seconds'], 2592000)
def test_error_on_subscribe_raises_URLError(self):
"""
If a non-202/204 status is returned, raise a URLError.
"""
self.responses.append(MockResponse(500, 'error data'))
try:
PushSubscription.objects.subscribe('topic', 'hub', 'callback')
except urllib.error.URLError as e:
self.assertEquals(e.reason,
'error subscribing to topic on hub:\nerror data')
else:
self.fail('subscription did not raise URLError exception')
class Test_PSHBCallbackViewCase(PSHBTestBase, TestCase):
def test_verify(self):
"""
Getting the callback from the server should verify the subscription.
"""
sub = PushSubscription.objects.create(
topic='topic',
hub='hub',
verified=False)
verify_token = sub.generate_token('subscribe')
response = self.client.get(reverse('pubsubhubbub_callback',
args=(sub.pk,)),
{'hub.mode': 'subscribe',
'hub.topic': sub.topic,
'hub.challenge': 'challenge',
'hub.lease_seconds': 2000,
'hub.verify_token': verify_token})
self.assertEquals(response.status_code, 200)
self.assertEquals(response.content, 'challenge')
sub = PushSubscription.objects.get(pk=sub.pk)
self.assertEquals(sub.verified, True)
self.assertEquals(len(self.signals), 1)
self.assertEquals(self.signals[0], (verified, {'sender': sub}))
def test_404(self):
"""
Various things sould return a 404:
* invalid primary key in the URL
* token doesn't start with 'subscribe'
* subscription doesn't exist
* token doesn't match the subscription
"""
sub = PushSubscription.objects.create(
topic='topic',
hub='hub',
verified=False)
verify_token = sub.generate_token('subscribe')
response = self.client.get(reverse('pubsubhubbub_callback',
args=(0,)),
{'hub.mode': 'subscribe',
'hub.topic': sub.topic,
'hub.challenge': 'challenge',
'hub.lease_seconds': 2000,
'hub.verify_token': verify_token[1:]})
self.assertEquals(response.status_code, 404)
self.assertEquals(len(self.signals), 0)
response = self.client.get(reverse('pubsubhubbub_callback',
args=(sub.pk,)),
{'hub.mode': 'subscribe',
'hub.topic': sub.topic,
'hub.challenge': 'challenge',
'hub.lease_seconds': 2000,
'hub.verify_token': verify_token[1:]})
self.assertEquals(response.status_code, 404)
self.assertEquals(len(self.signals), 0)
response = self.client.get(reverse('pubsubhubbub_callback',
args=(sub.pk,)),
{'hub.mode': 'subscribe',
'hub.topic': sub.topic + 'extra',
'hub.challenge': 'challenge',
'hub.lease_seconds': 2000,
'hub.verify_token': verify_token})
self.assertEquals(response.status_code, 404)
self.assertEquals(len(self.signals), 0)
response = self.client.get(reverse('pubsubhubbub_callback',
args=(sub.pk,)),
{'hub.mode': 'subscribe',
'hub.topic': sub.topic,
'hub.challenge': 'challenge',
'hub.lease_seconds': 2000,
'hub.verify_token': verify_token[:-5]})
self.assertEquals(response.status_code, 404)
self.assertEquals(len(self.signals), 0)
class Test_PSHBUpdateCase(PSHBTestBase, TestCase):
def test_update(self):
# this data comes from
# http://pubsubhubbub.googlecode.com/svn/trunk/pubsubhubbub-core-0.1.html#anchor3
update_data = """<?xml version="1.0"?>
<atom:feed>
<!-- Normally here would be source, title, etc ... -->
<link rel="hub" href="http://myhub.example.com/endpoint" />
<link rel="self" href="http://publisher.example.com/happycats.xml" />
<updated>2008-08-11T02:15:01Z</updated>
<!-- Example of a full entry. -->
<entry>
<title>Heathcliff</title>
<link href="http://publisher.example.com/happycat25.xml" />
<id>http://publisher.example.com/happycat25.xml</id>
<updated>2008-08-11T02:15:01Z</updated>
<content>
What a happy cat. Full content goes here.
</content>
</entry>
<!-- Example of an entity that isn't full/is truncated. This is implied
by the lack of a <content> element and a <summary> element instead. -->
<entry >
<title>Heathcliff</title>
<link href="http://publisher.example.com/happycat25.xml" />
<id>http://publisher.example.com/happycat25.xml</id>
<updated>2008-08-11T02:15:01Z</updated>
<summary>
What a happy cat!
</summary>
</entry>
<!-- Meta-data only; implied by the lack of <content> and
<summary> elements. -->
<entry>
<title>Garfield</title>
<link rel="alternate" href="http://publisher.example.com/happycat24.xml" />
<id>http://publisher.example.com/happycat25.xml</id>
<updated>2008-08-11T02:15:01Z</updated>
</entry>
<!-- Context entry that's meta-data only and not new. Implied because the
update time on this entry is before the //atom:feed/updated time. -->
<entry>
<title>Nermal</title>
<link rel="alternate" href="http://publisher.example.com/happycat23s.xml" />
<id>http://publisher.example.com/happycat25.xml</id>
<updated>2008-07-10T12:28:13Z</updated>
</entry>
</atom:feed>
"""
sub = PushSubscription.objects.create(
hub="http://myhub.example.com/endpoint",
topic="http://publisher.example.com/happycats.xml")
callback_data = []
updated.connect(
lambda sender=None, update=None, **kwargs: callback_data.append(
(sender, update)),
weak=False)
response = self.client.post(reverse('pubsubhubbub_callback',
args=(sub.pk,)),
update_data, 'application/atom+xml')
self.assertEquals(response.status_code, 200)
self.assertEquals(len(callback_data), 1)
sender, update = callback_data[0]
self.assertEquals(sender, sub)
self.assertEquals(len(update.entries), 4)
self.assertEquals(update.entries[0].id,
'http://publisher.example.com/happycat25.xml')
self.assertEquals(update.entries[1].id,
'http://publisher.example.com/happycat25.xml')
self.assertEquals(update.entries[2].id,
'http://publisher.example.com/happycat25.xml')
self.assertEquals(update.entries[3].id,
'http://publisher.example.com/happycat25.xml')
def test_update_with_changed_hub(self):
update_data = """<?xml version="1.0"?>
<atom:feed>
<!-- Normally here would be source, title, etc ... -->
<link rel="hub" href="http://myhub.example.com/endpoint" />
<link rel="self" href="http://publisher.example.com/happycats.xml" />
<updated>2008-08-11T02:15:01Z</updated>
<entry>
<title>Heathcliff</title>
<link href="http://publisher.example.com/happycat25.xml" />
<id>http://publisher.example.com/happycat25.xml</id>
<updated>2008-08-11T02:15:01Z</updated>
<content>
What a happy cat. Full content goes here.
</content>
</entry>
</atom:feed>
"""
sub = PushSubscription.objects.create(
hub="hub",
topic="http://publisher.example.com/happycats.xml",
lease_expires=datetime.now() + timedelta(days=1))
callback_data = []
updated.connect(
lambda sender=None, update=None, **kwargs: callback_data.append(
(sender, update)),
weak=False)
self.responses.append(MockResponse(204))
response = self.client.post(reverse('pubsubhubbub_callback',
args=(sub.pk,)),
update_data, 'application/atom+xml')
self.assertEquals(response.status_code, 200)
self.assertEquals(
PushSubscription.objects.filter(
hub='http://myhub.example.com/endpoint',
topic='http://publisher.example.com/happycats.xml',
verified=True).count(), 1)
self.assertEquals(len(self.requests), 1)
self.assertEquals(self.requests[0][0],
'http://myhub.example.com/endpoint')
self.assertEquals(self.requests[0][1]['callback'],
'http://test.nb.local.com/1/')
self.assert_((self.requests[0][1]['lease_seconds'] - 86400) < 5)
def test_update_with_changed_self(self):
update_data = """<?xml version="1.0"?>
<atom:feed>
<!-- Normally here would be source, title, etc ... -->
<link rel="hub" href="http://myhub.example.com/endpoint" />
<link rel="self" href="http://publisher.example.com/happycats.xml" />
<updated>2008-08-11T02:15:01Z</updated>
<entry>
<title>Heathcliff</title>
<link href="http://publisher.example.com/happycat25.xml" />
<id>http://publisher.example.com/happycat25.xml</id>
<updated>2008-08-11T02:15:01Z</updated>
<content>
What a happy cat. Full content goes here.
</content>
</entry>
</atom:feed>
"""
sub = PushSubscription.objects.create(
hub="http://myhub.example.com/endpoint",
topic="topic",
lease_expires=datetime.now() + timedelta(days=1))
callback_data = []
updated.connect(
lambda sender=None, update=None, **kwargs: callback_data.append(
(sender, update)),
weak=False)
self.responses.append(MockResponse(204))
response = self.client.post(reverse('pubsubhubbub_callback', kwargs={'push_id': sub.pk}),
update_data, 'application/atom+xml')
self.assertEquals(response.status_code, 200)
self.assertEquals(
PushSubscription.objects.filter(
hub='http://myhub.example.com/endpoint',
topic='http://publisher.example.com/happycats.xml',
verified=True).count(), 1)
self.assertEquals(len(self.requests), 1)
self.assertEquals(self.requests[0][0],
'http://myhub.example.com/endpoint')
self.assertEquals(self.requests[0][1]['callback'],
'http://test.nb.local.com/1/')
self.assert_((self.requests[0][1]['lease_seconds'] - 86400) < 5)
def test_update_with_changed_hub_and_self(self):
update_data = """<?xml version="1.0"?>
<atom:feed>
<!-- Normally here would be source, title, etc ... -->
<link rel="hub" href="http://myhub.example.com/endpoint" />
<link rel="self" href="http://publisher.example.com/happycats.xml" />
<updated>2008-08-11T02:15:01Z</updated>
<entry>
<title>Heathcliff</title>
<link href="http://publisher.example.com/happycat25.xml" />
<id>http://publisher.example.com/happycat25.xml</id>
<updated>2008-08-11T02:15:01Z</updated>
<content>
What a happy cat. Full content goes here.
</content>
</entry>
</atom:feed>
"""
sub = PushSubscription.objects.create(
hub="hub",
topic="topic",
lease_expires=datetime.now() + timedelta(days=1))
callback_data = []
updated.connect(
lambda sender=None, update=None, **kwargs: callback_data.append(
(sender, update)),
weak=False)
self.responses.append(MockResponse(204))
response = self.client.post(reverse('pubsubhubbub_callback',
args=(sub.pk,)),
update_data, 'application/atom+xml')
self.assertEquals(response.status_code, 200)
self.assertEquals(
PushSubscription.objects.filter(
hub='http://myhub.example.com/endpoint',
topic='http://publisher.example.com/happycats.xml',
verified=True).count(), 1)
self.assertEquals(len(self.requests), 1)
self.assertEquals(self.requests[0][0],
'http://myhub.example.com/endpoint')
self.assertEquals(self.requests[0][1]['callback'],
'http://test.nb.local.com/1/')
self.assert_((self.requests[0][1]['lease_seconds'] - 86400) < 5)
|
{
"content_hash": "f1d4331bc5cd4c0a7bc1a3101211f249",
"timestamp": "",
"source": "github",
"line_count": 442,
"max_line_length": 97,
"avg_line_length": 41.1764705882353,
"alnum_prop": 0.5691758241758241,
"repo_name": "samuelclay/NewsBlur",
"id": "8aac0d88287a9579e90d9f6a079afb4e6ce8deeb",
"size": "19530",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/push/test_push.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "454"
},
{
"name": "CSS",
"bytes": "776813"
},
{
"name": "CoffeeScript",
"bytes": "13093"
},
{
"name": "Dockerfile",
"bytes": "3704"
},
{
"name": "HCL",
"bytes": "29303"
},
{
"name": "HTML",
"bytes": "1921563"
},
{
"name": "Java",
"bytes": "853216"
},
{
"name": "JavaScript",
"bytes": "1803770"
},
{
"name": "Jinja",
"bytes": "89121"
},
{
"name": "Kotlin",
"bytes": "298281"
},
{
"name": "Makefile",
"bytes": "8909"
},
{
"name": "Objective-C",
"bytes": "2565934"
},
{
"name": "Perl",
"bytes": "55606"
},
{
"name": "Python",
"bytes": "2067295"
},
{
"name": "R",
"bytes": "527"
},
{
"name": "Ruby",
"bytes": "2094"
},
{
"name": "SCSS",
"bytes": "47069"
},
{
"name": "Shell",
"bytes": "51526"
},
{
"name": "Swift",
"bytes": "136021"
}
],
"symlink_target": ""
}
|
formatter = "%r %r %r %r"
# put each variable/string into the string defined as 'formatter'
print formatter % (1,2,3,4)
print formatter % ("one", "two", "three", "four")
print formatter % (True, False, False, True)
# line 8 will insert the 'formatter' string (exactly as it looks above) into each space in the 'formatter' string
print formatter % (formatter, formatter, formatter, formatter)
# need to include commas after each string to make the code work (same as for the shorter strings and variables above)
print formatter % (
"I had this thing",
"That you could type up right",
"But it didn't sing",
"So I said goodnight"
)
# line 13 is printed with double quotes, while lines 11,12,14 are printed with single quotes >> why is that?
# seems to be bc %r was used (which displays the raw data; python prints the strings "in the most efficient way it can, [...] doesn't have to be pretty")
|
{
"content_hash": "fe61ae5c0c56375075716fa1af34ec70",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 153,
"avg_line_length": 53.470588235294116,
"alnum_prop": 0.7117711771177118,
"repo_name": "madeleinel/Cheat-Sheets",
"id": "7a5be85960671f2f071980a6b08cd5a0156ebd21",
"size": "909",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pythonExercises/ex8.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9011"
},
{
"name": "HTML",
"bytes": "21136"
},
{
"name": "JavaScript",
"bytes": "138902"
},
{
"name": "Python",
"bytes": "18065"
}
],
"symlink_target": ""
}
|
import collections
import os
import eventlet
from neutron_lib import constants
from neutron_lib import exceptions
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_service import loopingcall
from oslo_utils import importutils
from neutron._i18n import _, _LE, _LI, _LW
from neutron.agent.linux import dhcp
from neutron.agent.linux import external_process
from neutron.agent.metadata import driver as metadata_driver
from neutron.agent import rpc as agent_rpc
from neutron.common import constants as n_const
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils
from neutron import context
from neutron import manager
LOG = logging.getLogger(__name__)
class DhcpAgent(manager.Manager):
"""DHCP agent service manager.
Note that the public methods of this class are exposed as the server side
of an rpc interface. The neutron server uses
neutron.api.rpc.agentnotifiers.dhcp_rpc_agent_api.DhcpAgentNotifyApi as the
client side to execute the methods here. For more information about
changing rpc interfaces, see doc/source/devref/rpc_api.rst.
"""
target = oslo_messaging.Target(version='1.0')
def __init__(self, host=None, conf=None):
super(DhcpAgent, self).__init__(host=host)
self.needs_resync_reasons = collections.defaultdict(list)
self.dhcp_ready_ports = set()
self.conf = conf or cfg.CONF
self.cache = NetworkCache()
self.dhcp_driver_cls = importutils.import_class(self.conf.dhcp_driver)
ctx = context.get_admin_context_without_session()
self.plugin_rpc = DhcpPluginApi(topics.PLUGIN, ctx, self.conf.host)
# create dhcp dir to store dhcp info
dhcp_dir = os.path.dirname("/%s/dhcp/" % self.conf.state_path)
utils.ensure_dir(dhcp_dir)
self.dhcp_version = self.dhcp_driver_cls.check_version()
self._populate_networks_cache()
# keep track of mappings between networks and routers for
# metadata processing
self._metadata_routers = {} # {network_id: router_id}
self._process_monitor = external_process.ProcessMonitor(
config=self.conf,
resource_type='dhcp')
def init_host(self):
self.sync_state()
def _populate_networks_cache(self):
"""Populate the networks cache when the DHCP-agent starts."""
try:
existing_networks = self.dhcp_driver_cls.existing_dhcp_networks(
self.conf
)
for net_id in existing_networks:
net = dhcp.NetModel({"id": net_id, "subnets": [], "ports": []})
self.cache.put(net)
except NotImplementedError:
# just go ahead with an empty networks cache
LOG.debug("The '%s' DHCP-driver does not support retrieving of a "
"list of existing networks",
self.conf.dhcp_driver)
def after_start(self):
self.run()
LOG.info(_LI("DHCP agent started"))
def run(self):
"""Activate the DHCP agent."""
self.sync_state()
self.periodic_resync()
self.start_ready_ports_loop()
def call_driver(self, action, network, **action_kwargs):
"""Invoke an action on a DHCP driver instance."""
LOG.debug('Calling driver for network: %(net)s action: %(action)s',
{'net': network.id, 'action': action})
try:
# the Driver expects something that is duck typed similar to
# the base models.
driver = self.dhcp_driver_cls(self.conf,
network,
self._process_monitor,
self.dhcp_version,
self.plugin_rpc)
getattr(driver, action)(**action_kwargs)
return True
except exceptions.Conflict:
# No need to resync here, the agent will receive the event related
# to a status update for the network
LOG.warning(_LW('Unable to %(action)s dhcp for %(net_id)s: there '
'is a conflict with its current state; please '
'check that the network and/or its subnet(s) '
'still exist.'),
{'net_id': network.id, 'action': action})
except Exception as e:
if getattr(e, 'exc_type', '') != 'IpAddressGenerationFailure':
# Don't resync if port could not be created because of an IP
# allocation failure. When the subnet is updated with a new
# allocation pool or a port is deleted to free up an IP, this
# will automatically be retried on the notification
self.schedule_resync(e, network.id)
if (isinstance(e, oslo_messaging.RemoteError)
and e.exc_type == 'NetworkNotFound'
or isinstance(e, exceptions.NetworkNotFound)):
LOG.debug("Network %s has been deleted.", network.id)
else:
LOG.exception(_LE('Unable to %(action)s dhcp for %(net_id)s.'),
{'net_id': network.id, 'action': action})
def schedule_resync(self, reason, network_id=None):
"""Schedule a resync for a given network and reason. If no network is
specified, resync all networks.
"""
self.needs_resync_reasons[network_id].append(reason)
@utils.synchronized('dhcp-agent')
def sync_state(self, networks=None):
"""Sync the local DHCP state with Neutron. If no networks are passed,
or 'None' is one of the networks, sync all of the networks.
"""
only_nets = set([] if (not networks or None in networks) else networks)
LOG.info(_LI('Synchronizing state'))
pool = eventlet.GreenPool(self.conf.num_sync_threads)
known_network_ids = set(self.cache.get_network_ids())
try:
active_networks = self.plugin_rpc.get_active_networks_info()
LOG.info(_LI('All active networks have been fetched through RPC.'))
active_network_ids = set(network.id for network in active_networks)
for deleted_id in known_network_ids - active_network_ids:
try:
self.disable_dhcp_helper(deleted_id)
except Exception as e:
self.schedule_resync(e, deleted_id)
LOG.exception(_LE('Unable to sync network state on '
'deleted network %s'), deleted_id)
for network in active_networks:
if (not only_nets or # specifically resync all
network.id not in known_network_ids or # missing net
network.id in only_nets): # specific network to sync
pool.spawn(self.safe_configure_dhcp_for_network, network)
pool.waitall()
# we notify all ports in case some were created while the agent
# was down
self.dhcp_ready_ports |= set(self.cache.get_port_ids())
LOG.info(_LI('Synchronizing state complete'))
except Exception as e:
if only_nets:
for network_id in only_nets:
self.schedule_resync(e, network_id)
else:
self.schedule_resync(e)
LOG.exception(_LE('Unable to sync network state.'))
def _dhcp_ready_ports_loop(self):
"""Notifies the server of any ports that had reservations setup."""
while True:
# this is just watching a set so we can do it really frequently
eventlet.sleep(0.1)
if self.dhcp_ready_ports:
ports_to_send = self.dhcp_ready_ports
self.dhcp_ready_ports = set()
try:
self.plugin_rpc.dhcp_ready_on_ports(ports_to_send)
continue
except oslo_messaging.MessagingTimeout:
LOG.error(_LE("Timeout notifying server of ports ready. "
"Retrying..."))
except Exception as e:
if (isinstance(e, oslo_messaging.RemoteError)
and e.exc_type == 'NoSuchMethod'):
LOG.info(_LI("Server does not support port ready "
"notifications. Waiting for 5 minutes "
"before retrying."))
eventlet.sleep(300)
continue
LOG.exception(_LE("Failure notifying DHCP server of "
"ready DHCP ports. Will retry on next "
"iteration."))
self.dhcp_ready_ports |= ports_to_send
def start_ready_ports_loop(self):
"""Spawn a thread to push changed ports to server."""
eventlet.spawn(self._dhcp_ready_ports_loop)
@utils.exception_logger()
def _periodic_resync_helper(self):
"""Resync the dhcp state at the configured interval."""
while True:
eventlet.sleep(self.conf.resync_interval)
if self.needs_resync_reasons:
# be careful to avoid a race with additions to list
# from other threads
reasons = self.needs_resync_reasons
self.needs_resync_reasons = collections.defaultdict(list)
for net, r in reasons.items():
if not net:
net = "*"
LOG.debug("resync (%(network)s): %(reason)s",
{"reason": r, "network": net})
self.sync_state(reasons.keys())
def periodic_resync(self):
"""Spawn a thread to periodically resync the dhcp state."""
eventlet.spawn(self._periodic_resync_helper)
def safe_get_network_info(self, network_id):
try:
network = self.plugin_rpc.get_network_info(network_id)
if not network:
LOG.debug('Network %s has been deleted.', network_id)
return network
except Exception as e:
self.schedule_resync(e, network_id)
LOG.exception(_LE('Network %s info call failed.'), network_id)
def enable_dhcp_helper(self, network_id):
"""Enable DHCP for a network that meets enabling criteria."""
network = self.safe_get_network_info(network_id)
if network:
self.configure_dhcp_for_network(network)
@utils.exception_logger()
def safe_configure_dhcp_for_network(self, network):
try:
network_id = network.get('id')
LOG.info(_LI('Starting network %s dhcp configuration'), network_id)
self.configure_dhcp_for_network(network)
LOG.info(_LI('Finished network %s dhcp configuration'), network_id)
except (exceptions.NetworkNotFound, RuntimeError):
LOG.warning(_LW('Network %s may have been deleted and '
'its resources may have already been disposed.'),
network.id)
def configure_dhcp_for_network(self, network):
if not network.admin_state_up:
return
enable_metadata = self.dhcp_driver_cls.should_enable_metadata(
self.conf, network)
dhcp_network_enabled = False
for subnet in network.subnets:
if subnet.enable_dhcp:
if self.call_driver('enable', network):
dhcp_network_enabled = True
self.cache.put(network)
break
if enable_metadata and dhcp_network_enabled:
for subnet in network.subnets:
if subnet.ip_version == 4 and subnet.enable_dhcp:
self.enable_isolated_metadata_proxy(network)
break
elif (not self.conf.force_metadata and
not self.conf.enable_isolated_metadata):
# In the case that the dhcp agent ran with metadata enabled,
# and dhcp agent now starts with metadata disabled, check and
# delete any metadata_proxy.
self.disable_isolated_metadata_proxy(network)
def disable_dhcp_helper(self, network_id):
"""Disable DHCP for a network known to the agent."""
network = self.cache.get_network_by_id(network_id)
if network:
if self.conf.enable_isolated_metadata:
# NOTE(jschwarz): In the case where a network is deleted, all
# the subnets and ports are deleted before this function is
# called, so checking if 'should_enable_metadata' is True
# for any subnet is false logic here.
self.disable_isolated_metadata_proxy(network)
if self.call_driver('disable', network):
self.cache.remove(network)
def refresh_dhcp_helper(self, network_id):
"""Refresh or disable DHCP for a network depending on the current state
of the network.
"""
old_network = self.cache.get_network_by_id(network_id)
if not old_network:
# DHCP current not running for network.
return self.enable_dhcp_helper(network_id)
network = self.safe_get_network_info(network_id)
if not network:
return
if not any(s for s in network.subnets if s.enable_dhcp):
self.disable_dhcp_helper(network.id)
return
# NOTE(kevinbenton): we don't exclude dhcp disabled subnets because
# they still change the indexes used for tags
old_cidrs = [s.cidr for s in network.subnets]
new_cidrs = [s.cidr for s in old_network.subnets]
if old_cidrs == new_cidrs:
self.call_driver('reload_allocations', network)
self.cache.put(network)
elif self.call_driver('restart', network):
self.cache.put(network)
@utils.synchronized('dhcp-agent')
def network_create_end(self, context, payload):
"""Handle the network.create.end notification event."""
network_id = payload['network']['id']
self.enable_dhcp_helper(network_id)
@utils.synchronized('dhcp-agent')
def network_update_end(self, context, payload):
"""Handle the network.update.end notification event."""
network_id = payload['network']['id']
if payload['network']['admin_state_up']:
self.enable_dhcp_helper(network_id)
else:
self.disable_dhcp_helper(network_id)
@utils.synchronized('dhcp-agent')
def network_delete_end(self, context, payload):
"""Handle the network.delete.end notification event."""
self.disable_dhcp_helper(payload['network_id'])
@utils.synchronized('dhcp-agent')
def subnet_update_end(self, context, payload):
"""Handle the subnet.update.end notification event."""
network_id = payload['subnet']['network_id']
self.refresh_dhcp_helper(network_id)
# Use the update handler for the subnet create event.
subnet_create_end = subnet_update_end
@utils.synchronized('dhcp-agent')
def subnet_delete_end(self, context, payload):
"""Handle the subnet.delete.end notification event."""
subnet_id = payload['subnet_id']
network = self.cache.get_network_by_subnet_id(subnet_id)
if network:
self.refresh_dhcp_helper(network.id)
@utils.synchronized('dhcp-agent')
def port_update_end(self, context, payload):
"""Handle the port.update.end notification event."""
updated_port = dhcp.DictModel(payload['port'])
network = self.cache.get_network_by_id(updated_port.network_id)
if network:
LOG.info(_LI("Trigger reload_allocations for port %s"),
updated_port)
driver_action = 'reload_allocations'
if self._is_port_on_this_agent(updated_port):
orig = self.cache.get_port_by_id(updated_port['id'])
# assume IP change if not in cache
old_ips = {i['ip_address'] for i in orig['fixed_ips'] or []}
new_ips = {i['ip_address'] for i in updated_port['fixed_ips']}
if old_ips != new_ips:
driver_action = 'restart'
self.cache.put_port(updated_port)
self.call_driver(driver_action, network)
self.dhcp_ready_ports.add(updated_port.id)
def _is_port_on_this_agent(self, port):
thishost = utils.get_dhcp_agent_device_id(
port['network_id'], self.conf.host)
return port['device_id'] == thishost
# Use the update handler for the port create event.
port_create_end = port_update_end
@utils.synchronized('dhcp-agent')
def port_delete_end(self, context, payload):
"""Handle the port.delete.end notification event."""
port = self.cache.get_port_by_id(payload['port_id'])
if port:
network = self.cache.get_network_by_id(port.network_id)
self.cache.remove_port(port)
self.call_driver('reload_allocations', network)
def enable_isolated_metadata_proxy(self, network):
# The proxy might work for either a single network
# or all the networks connected via a router
# to the one passed as a parameter
kwargs = {'network_id': network.id}
# When the metadata network is enabled, the proxy might
# be started for the router attached to the network
if self.conf.enable_metadata_network:
router_ports = [port for port in network.ports
if (port.device_owner in
constants.ROUTER_INTERFACE_OWNERS)]
if router_ports:
# Multiple router ports should not be allowed
if len(router_ports) > 1:
LOG.warning(_LW("%(port_num)d router ports found on the "
"metadata access network. Only the port "
"%(port_id)s, for router %(router_id)s "
"will be considered"),
{'port_num': len(router_ports),
'port_id': router_ports[0].id,
'router_id': router_ports[0].device_id})
kwargs = {'router_id': router_ports[0].device_id}
self._metadata_routers[network.id] = router_ports[0].device_id
metadata_driver.MetadataDriver.spawn_monitored_metadata_proxy(
self._process_monitor, network.namespace, dhcp.METADATA_PORT,
self.conf, **kwargs)
def disable_isolated_metadata_proxy(self, network):
if (self.conf.enable_metadata_network and
network.id in self._metadata_routers):
uuid = self._metadata_routers[network.id]
is_router_id = True
else:
uuid = network.id
is_router_id = False
metadata_driver.MetadataDriver.destroy_monitored_metadata_proxy(
self._process_monitor, uuid, self.conf)
if is_router_id:
del self._metadata_routers[network.id]
class DhcpPluginApi(object):
"""Agent side of the dhcp rpc API.
This class implements the client side of an rpc interface. The server side
of this interface can be found in
neutron.api.rpc.handlers.dhcp_rpc.DhcpRpcCallback. For more information
about changing rpc interfaces, see doc/source/devref/rpc_api.rst.
API version history:
1.0 - Initial version.
1.1 - Added get_active_networks_info, create_dhcp_port,
and update_dhcp_port methods.
1.5 - Added dhcp_ready_on_ports
"""
def __init__(self, topic, context, host):
self.context = context
self.host = host
target = oslo_messaging.Target(
topic=topic,
namespace=n_const.RPC_NAMESPACE_DHCP_PLUGIN,
version='1.0')
self.client = n_rpc.get_client(target)
def get_active_networks_info(self):
"""Make a remote process call to retrieve all network info."""
cctxt = self.client.prepare(version='1.1')
networks = cctxt.call(self.context, 'get_active_networks_info',
host=self.host)
return [dhcp.NetModel(n) for n in networks]
def get_network_info(self, network_id):
"""Make a remote process call to retrieve network info."""
cctxt = self.client.prepare()
network = cctxt.call(self.context, 'get_network_info',
network_id=network_id, host=self.host)
if network:
return dhcp.NetModel(network)
def create_dhcp_port(self, port):
"""Make a remote process call to create the dhcp port."""
cctxt = self.client.prepare(version='1.1')
port = cctxt.call(self.context, 'create_dhcp_port',
port=port, host=self.host)
if port:
return dhcp.DictModel(port)
def update_dhcp_port(self, port_id, port):
"""Make a remote process call to update the dhcp port."""
cctxt = self.client.prepare(version='1.1')
port = cctxt.call(self.context, 'update_dhcp_port',
port_id=port_id, port=port, host=self.host)
if port:
return dhcp.DictModel(port)
def release_dhcp_port(self, network_id, device_id):
"""Make a remote process call to release the dhcp port."""
cctxt = self.client.prepare()
return cctxt.call(self.context, 'release_dhcp_port',
network_id=network_id, device_id=device_id,
host=self.host)
def dhcp_ready_on_ports(self, port_ids):
"""Notify the server that DHCP is configured for the port."""
cctxt = self.client.prepare(version='1.5')
return cctxt.call(self.context, 'dhcp_ready_on_ports',
port_ids=port_ids)
class NetworkCache(object):
"""Agent cache of the current network state."""
def __init__(self):
self.cache = {}
self.subnet_lookup = {}
self.port_lookup = {}
def get_port_ids(self):
return self.port_lookup.keys()
def get_network_ids(self):
return self.cache.keys()
def get_network_by_id(self, network_id):
return self.cache.get(network_id)
def get_network_by_subnet_id(self, subnet_id):
return self.cache.get(self.subnet_lookup.get(subnet_id))
def get_network_by_port_id(self, port_id):
return self.cache.get(self.port_lookup.get(port_id))
def put(self, network):
if network.id in self.cache:
self.remove(self.cache[network.id])
self.cache[network.id] = network
for subnet in network.subnets:
self.subnet_lookup[subnet.id] = network.id
for port in network.ports:
self.port_lookup[port.id] = network.id
def remove(self, network):
del self.cache[network.id]
for subnet in network.subnets:
del self.subnet_lookup[subnet.id]
for port in network.ports:
del self.port_lookup[port.id]
def put_port(self, port):
network = self.get_network_by_id(port.network_id)
for index in range(len(network.ports)):
if network.ports[index].id == port.id:
network.ports[index] = port
break
else:
network.ports.append(port)
self.port_lookup[port.id] = network.id
def remove_port(self, port):
network = self.get_network_by_port_id(port.id)
for index in range(len(network.ports)):
if network.ports[index] == port:
del network.ports[index]
del self.port_lookup[port.id]
break
def get_port_by_id(self, port_id):
network = self.get_network_by_port_id(port_id)
if network:
for port in network.ports:
if port.id == port_id:
return port
def get_state(self):
net_ids = self.get_network_ids()
num_nets = len(net_ids)
num_subnets = 0
num_ports = 0
for net_id in net_ids:
network = self.get_network_by_id(net_id)
num_subnets += len(network.subnets)
num_ports += len(network.ports)
return {'networks': num_nets,
'subnets': num_subnets,
'ports': num_ports}
class DhcpAgentWithStateReport(DhcpAgent):
def __init__(self, host=None, conf=None):
super(DhcpAgentWithStateReport, self).__init__(host=host, conf=conf)
self.state_rpc = agent_rpc.PluginReportStateAPI(topics.REPORTS)
self.agent_state = {
'binary': 'neutron-dhcp-agent',
'host': host,
'availability_zone': self.conf.AGENT.availability_zone,
'topic': topics.DHCP_AGENT,
'configurations': {
'notifies_port_ready': True,
'dhcp_driver': self.conf.dhcp_driver,
'dhcp_lease_duration': self.conf.dhcp_lease_duration,
'log_agent_heartbeats': self.conf.AGENT.log_agent_heartbeats},
'start_flag': True,
'agent_type': constants.AGENT_TYPE_DHCP}
report_interval = self.conf.AGENT.report_interval
if report_interval:
self.heartbeat = loopingcall.FixedIntervalLoopingCall(
self._report_state)
self.heartbeat.start(interval=report_interval)
def _report_state(self):
try:
self.agent_state.get('configurations').update(
self.cache.get_state())
ctx = context.get_admin_context_without_session()
agent_status = self.state_rpc.report_state(
ctx, self.agent_state, True)
if agent_status == n_const.AGENT_REVIVED:
LOG.info(_LI("Agent has just been revived. "
"Scheduling full sync"))
self.schedule_resync("Agent has just been revived")
except AttributeError:
# This means the server does not support report_state
LOG.warning(_LW("Neutron server does not support state report. "
"State report for this agent will be disabled."))
self.heartbeat.stop()
self.run()
return
except Exception:
LOG.exception(_LE("Failed reporting state!"))
return
if self.agent_state.pop('start_flag', None):
self.run()
def agent_updated(self, context, payload):
"""Handle the agent_updated notification event."""
self.schedule_resync(_("Agent updated: %(payload)s") %
{"payload": payload})
LOG.info(_LI("agent_updated by server side %s!"), payload)
def after_start(self):
LOG.info(_LI("DHCP agent started"))
|
{
"content_hash": "c34f5bfcf765b9010c56ae843557fa58",
"timestamp": "",
"source": "github",
"line_count": 642,
"max_line_length": 79,
"avg_line_length": 42.42834890965732,
"alnum_prop": 0.5797569661147619,
"repo_name": "bigswitch/neutron",
"id": "e7c8a517d96eb544953c717b4ddc9be69c27d485",
"size": "27875",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "neutron/agent/dhcp/agent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "8468247"
},
{
"name": "Shell",
"bytes": "14648"
}
],
"symlink_target": ""
}
|
vid_stopwords=["introduction",
"appendix",
"contents",
"topic",
"concepts",
"references",
"future work",
"what next",
"basics",
"questions",
"application",
"history",
"thank you",
"conclusion",
"acknowledgement",
"production team"]
|
{
"content_hash": "a83f40bb3d382142c5567431e6d1bab0",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 30,
"avg_line_length": 14.5,
"alnum_prop": 0.6939655172413793,
"repo_name": "amudalab/concept-graphs",
"id": "2e7dc0e85af66667d4b9ff42ac855a143255fb12",
"size": "232",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "keyphrase/keyphrase/stopwords_video.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1357"
},
{
"name": "Processing",
"bytes": "9122"
},
{
"name": "Python",
"bytes": "481015"
},
{
"name": "Shell",
"bytes": "18777"
}
],
"symlink_target": ""
}
|
"""Imports for Python API.
This file is MACHINE GENERATED! Do not edit.
Generated by: tensorflow/tools/api/generator/create_python_api.py script.
"""
from tensorflow.python.keras._impl.keras.datasets.cifar10 import load_data
|
{
"content_hash": "a2e53eeb6e9258e19cdb4050ecefe706",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 74,
"avg_line_length": 37.5,
"alnum_prop": 0.7911111111111111,
"repo_name": "ryfeus/lambda-packs",
"id": "747559887c5c86b4f7f370ad2f70b35470b878a9",
"size": "225",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Keras_tensorflow_nightly/source2.7/tensorflow/tools/api/generator/api/keras/datasets/cifar10/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9768343"
},
{
"name": "C++",
"bytes": "76566960"
},
{
"name": "CMake",
"bytes": "191097"
},
{
"name": "CSS",
"bytes": "153538"
},
{
"name": "Cuda",
"bytes": "61768"
},
{
"name": "Cython",
"bytes": "3110222"
},
{
"name": "Fortran",
"bytes": "110284"
},
{
"name": "HTML",
"bytes": "248658"
},
{
"name": "JavaScript",
"bytes": "62920"
},
{
"name": "MATLAB",
"bytes": "17384"
},
{
"name": "Makefile",
"bytes": "152150"
},
{
"name": "Python",
"bytes": "549307737"
},
{
"name": "Roff",
"bytes": "26398"
},
{
"name": "SWIG",
"bytes": "142"
},
{
"name": "Shell",
"bytes": "7790"
},
{
"name": "Smarty",
"bytes": "4090"
},
{
"name": "TeX",
"bytes": "152062"
},
{
"name": "XSLT",
"bytes": "305540"
}
],
"symlink_target": ""
}
|
from . import colorspace
|
{
"content_hash": "dedd0d45558bc4fb112768ca0e43e78a",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 24,
"avg_line_length": 25,
"alnum_prop": 0.8,
"repo_name": "khdlr/augmax",
"id": "4925dff8725c3f756f341aaacc06560e442e9ad4",
"size": "25",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "augmax/functional/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "52993"
}
],
"symlink_target": ""
}
|
from nose.tools import raises
from nose.plugins.attrib import attr
from requests import HTTPError
from rightscale import RightScale
@raises(ValueError)
def test_empty_api_endpoint():
rs = RightScale(refresh_token='not empty', api_endpoint='')
rs.login()
@raises(ValueError)
def test_empty_refresh_token():
rs = RightScale(refresh_token='', api_endpoint='not empty')
rs.login()
@attr('real_conn')
@raises(HTTPError)
def test_bogus_refresh_token():
rs = RightScale(refresh_token='bogus')
rs.login()
|
{
"content_hash": "b41f50c543cfa3e39751df0a56991122",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 63,
"avg_line_length": 22,
"alnum_prop": 0.7178030303030303,
"repo_name": "brantai/python-rightscale",
"id": "50b3de0d46b750e33a374e6b1bceac75b349a4f9",
"size": "528",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_login.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "46016"
}
],
"symlink_target": ""
}
|
import eventlet
from eventlet import semaphore
from unittest import mock
from mistral.api.controllers.v2 import execution
from mistral import context
from mistral.db.v2 import api as db_api
from mistral.services import workflows as wf_service
from mistral.tests.unit.api import base
from mistral.tests.unit import base as unit_base
from mistral.tests.unit.engine import base as engine_base
WF_TEXT = """---
version: '2.0'
wf:
tasks:
task1:
action: std.noop
"""
class TestParallelOperations(base.APITest, engine_base.EngineTestCase):
def setUp(self):
super(TestParallelOperations, self).setUp()
wf_service.create_workflows(WF_TEXT)
wf_ex = self.engine.start_workflow('wf')
self.await_workflow_success(wf_ex.id)
self.wf_ex_id = wf_ex.id
self.decorator_call_cnt = 0
self.threads = []
self.addCleanup(self.kill_threads)
def kill_threads(self):
for thread in self.threads:
thread.kill()
def test_parallel_api_list_and_delete_operations(self):
# One execution already exists. Let's create another one.
wf_ex = self.engine.start_workflow('wf')
self.await_workflow_success(wf_ex.id)
self.assertEqual(2, len(db_api.get_workflow_executions()))
delete_lock = semaphore.Semaphore(0)
list_lock = semaphore.Semaphore(0)
orig_func = execution._get_workflow_execution_resource
def delete_():
context.set_ctx(unit_base.get_context())
db_api.delete_workflow_execution(self.wf_ex_id)
# Unlocking the "list" operation.
list_lock.release()
def list_():
resp = self.app.get('/v2/executions/')
self.assertEqual(1, len(resp.json['executions']))
# This decorator is needed to halt the thread of the "list"
# operation and wait till the "delete" operation is over.
# That way we'll reproduce the situation when the "list"
# operation has already fetched the execution but it then
# gets deleted before further lazy-loading of the execution
# fields.
def decorate_resource_function_(arg):
self.decorator_call_cnt += 1
# It makes sense to use this trick only once since only
# one object gets deleted.
if self.decorator_call_cnt == 1:
# It's OK now to delete the execution so we release
# the corresponding lock.
delete_lock.release()
# Wait till the "delete" operation has finished.
list_lock.acquire()
return orig_func(arg)
with mock.patch.object(execution, '_get_workflow_execution_resource',
wraps=decorate_resource_function_):
self.threads.append(eventlet.spawn(list_))
# Make sure that the "list" operation came to the right point
# which is just about the call to the resource function.
delete_lock.acquire()
self.threads.append(eventlet.spawn(delete_))
for t in self.threads:
t.wait()
|
{
"content_hash": "6ae895d13ead3fc1b31cb60d104e802d",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 77,
"avg_line_length": 31.058823529411764,
"alnum_prop": 0.6237373737373737,
"repo_name": "openstack/mistral",
"id": "9e5da5601dea95b3b2992830078740d14a185a2d",
"size": "3758",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mistral/tests/unit/api/v2/test_parallel_operations.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2091"
},
{
"name": "Mako",
"bytes": "951"
},
{
"name": "Python",
"bytes": "2617595"
},
{
"name": "Shell",
"bytes": "26731"
}
],
"symlink_target": ""
}
|
from fabkit import filer, sudo, env, Service
from fablib.base import SimpleBase
class Gluster(SimpleBase):
def __init__(self):
self.data_key = 'gluster'
self.data = {
}
self.services = {
'CentOS Linux 7.*': [
'glusterd',
]
}
self.packages = {
'CentOS Linux 7.*': [
'centos-release-gluster36',
'glusterfs-server',
'glusterfs-fuse',
]
}
def init_after(self):
for cluster in self.data.get('clusters', {}).values():
if env.host in cluster['hosts']:
self.data.update(cluster)
def setup(self):
data = self.init()
Service('firewalld').stop().disable()
self.install_packages()
self.start_services().enable_services()
for volume in data['volume_map'].values():
filer.mkdir(volume['brick'])
def setup_peer(self):
"""
require serial task.
"""
data = self.init()
for host in data['hosts']:
if host != env.host:
sudo('gluster peer probe {0}'.format(host))
def setup_volume(self):
"""
require serial task.
"""
data = self.init()
if data['hosts'][0] != env.host:
return
for volume in data['volume_map'].values():
bricks = ''
replica_option = 'replica 2' if len(data['hosts']) > 1 else ''
for host in data['hosts']:
bricks += '{0}:{1} '.format(host, volume['brick'])
sudo('gluster volume info {0[name]} || gluster volume create '
'{0[name]} {1} {2} force'.format(
volume, replica_option, bricks))
sudo('gluster volume info {0[name]} | grep Started'
' || gluster volume start {0[name]}'.format(
volume))
def mount_local(self):
data = self.init()
for volume in data['volume_map'].values():
filer.Editor('/etc/fstab').a('localhost:/{0} /mnt/{0} glusterfs '
'defaults,_netdev 0 0'.format(volume['name']))
filer.mkdir('/mnt/{0}'.format(volume['name']))
sudo('mount -a')
|
{
"content_hash": "0f4192a44f24627387e229a99c7a7145",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 87,
"avg_line_length": 29.705128205128204,
"alnum_prop": 0.4812257229175658,
"repo_name": "fabrickit-fablib/gluster",
"id": "bfb616ffdcd1291cbbb7ba06a6f877a09da778b7",
"size": "2334",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gluster.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2853"
}
],
"symlink_target": ""
}
|
from PyQt4 import QtGui # Import the PyQt4 module we'll need
from PyQt4.QtCore import QElapsedTimer
from PyQt4.QtCore import QThread
from PyQt4.QtCore import SIGNAL
from PyQt4 import QtCore
import sys
import os
import time
import math
import random
#python -m pip install pyaudio
#sudo apt-get install python-pyaudio python3-pyaudio
import subprocess
#Importing my custom libraries
sys.path.insert(1, '../include')
sys.path.insert(1, "../include/pynaoqi-python2.7-2.1.3.3-linux64") #import this module for the nao.py module
import design
import pparser
import nao
import logbook
#Robot Paramaters
SPEED = 0.2
## Class WorkerThread
#
# It is a QThread that send signals
# and receive signals from the GUI
#
class WorkerThread(QThread):
def __init__(self):
QThread.__init__(self)
#Signal variables
self.enable_components_gui_signal = SIGNAL("enable_components_gui_signal")
self.no_robot_signal = SIGNAL("no_robot_signal")
self.yes_robot_signal = SIGNAL("yes_robot_signal")
self.bad_xml_signal = SIGNAL("bad_xml_signal")
self.good_xml_signal = SIGNAL("good_xml_signal")
self.update_gui_signal = SIGNAL("update_gui_signal")
self.show_start_btn_signal = SIGNAL("show_start_btn_signal")
#Misc variables
self.timer = QElapsedTimer()
self.timer_robot = QElapsedTimer()
self.myParser = pparser.Parser()
self.STATE_MACHINE = 0
#Status variables
self._robot_connected = False
self._xml_uploaded = False
self._start_pressed = False
self._confirm_pressed = False
self._confirm_pressed_robot = False
self._participant_decision_taken = False
self._session_info_given = False
#Sub state of state 1
self.SUB_STATE = 0
#Logbook variables
self._log_first_line = ""
self._log_timer = 0
self._log_trial = 0
self._log_person_total = 0.0
self._log_leader_total = 0.0
self._log_player2_total = 0.0
self._log_player3_total = 0.0
self._log_person_investment = 0
self._log_leader_investment = 0
self._log_player2_investment = 0
self._log_player3_investment = 0
self._log_pmf = 0 #person multiplication factor
self._log_bmf = 0 #player b multiplication factor
self._log_gaze = False
self._log_pointing = False
## Main function of the Thread
#
# It runs the State Machine
#
def run(self):
#Init the State machine
#self.emit(self.enable_signal) #GUI enabled
self.emit(self.enable_components_gui_signal, True, False, False) #GUI components
self.timer.start()
self.timer_robot.start()
self.timer_robot_deadline = 0 # deadline for the robot
self.timer_robot_waiting = 2000 # robots waits 2 seconds then look to participant
self.STATE_MACHINE = 0
while True:
time.sleep(0.050) #50 msec sleep to evitate block
#STATE-0 init
if self.STATE_MACHINE == 0:
if self._robot_connected==True and self._xml_uploaded==True and self._start_pressed==True and self._session_info_given==True:
self.logger = logbook.Logbook() #Logbook Init
self.logger.AddTextLine(self._log_first_line) #Add the first line to the logbook
self.emit(self.show_start_btn_signal, False)
self._start_pressed = False
self.STATE_MACHINE = 1 #switching to next state
else:
current_time = time.strftime("%H:%M:%S", time.gmtime())
status = "robot_coonnected = " + str(self._robot_connected) + "\n"
status += "xml_uploaded = " + str(self._xml_uploaded) + "\n"
status += "start_pressed = " + str(self._start_pressed) + "\n"
status += "session_info_given = " + str(self._session_info_given) + "\n"
print "[0] " + current_time + " Waiting... \n" + status
time.sleep(3)
#STATE-1 Hello world
if self.STATE_MACHINE == 1:
#Init all the variables
if self.SUB_STATE == 0:
self._confirm_pressed = False
self._confirm_pressed_robot = False
self._start_pressed = False
self._participant_decision_taken = False
print "[1] Hello world!"
sentence = self.myParser._word0_list[self._log_trial]
if self.myParser._gestures_list[self._log_trial] == "True":
self.myPuppet1.animated_say_something(str(sentence))
else:
self.myPuppet1.say_something(str(sentence))
if self.myParser._gaze_list[self._log_trial] == "True":
self.myPuppet1.look_to("HeadYaw", -60.0, SPEED)
time.sleep(random.randint(1,2))
print "[1] Reset the head pose!"
self.myPuppet1.look_to("HeadYaw", 0.0, SPEED)
self.myPuppet2.look_to("HeadYaw", 0.0, SPEED)
self.myPuppet3.look_to("HeadYaw", 0.0, SPEED)
self.myPuppet1.look_to("HeadPitch", 0.0, SPEED)
self.myPuppet2.look_to("HeadPitch", 0.0, SPEED)
self.myPuppet3.look_to("HeadPitch", 0.0, SPEED)
time.sleep(1)
print "[1] Enablig components"
self.emit(self.enable_components_gui_signal, False, False, True) #GUI components
self.STATE_MACHINE = 2
#STATE-2 Reset variables and Robot talks
if self.STATE_MACHINE == 2:
#RESET all the values at each new cycle
self._log_person_investment = 0
self._log_leader_investment = 0
self._log_player2_investment = 0
self._log_player3_investment = 0
#Reset the GUI and enable component
print "[2] Enabling buttons..."
self.emit(self.enable_components_gui_signal, False, False, True) #GUI components
self.emit(self.update_gui_signal, self._log_person_total, self._log_leader_total, self._log_player2_total, self._log_player3_total, "Please wait...")
#The robot choose and now will look to the participant because the waiting time elapsed
if self.myParser._gaze_list[self._log_trial] == "True":
self.myPuppet2.look_to("HeadYaw", 60.0, SPEED)
self.myPuppet3.look_to("HeadYaw", 60.0, SPEED)
time.sleep(random.randint(0,2))
#self.myPuppet1.look_to("HeadPitch", 5.0, SPEED)
print "[2] Switching to the next state"
self.STATE_MACHINE = 3 #next state
print "[3] Waiting for the subject and robot answer..." #going to state 3
#STATE-3 First interaction: the leader makes his choice
if self.STATE_MACHINE == 3:
self._log_leader_investment = int(self.myParser._pinv1_list[self._log_trial])
if self.myParser._pointing_list[self._log_trial] == "True":
print "[3] pointing == True"
self.myPuppet1.left_arm_pointing(True, SPEED)
time.sleep(1)
if self.myParser._pointing_list[self._log_trial] == "True":
print "[3] pointing == False"
self.myPuppet1.left_arm_pointing(False, SPEED)
sentence = str(self.myParser._word1_list[self._log_trial])
if self.myParser._gestures_list[self._log_trial] == "True":
self.myPuppet1.animated_say_something(str(sentence))
else:
self.myPuppet1.say_something(str(sentence))
if self.myParser._gaze_list[self._log_trial] == "True":
self.myPuppet2.look_to("HeadYaw", -60.0, SPEED) #TODO they have been inverted
self.myPuppet3.look_to("HeadYaw", +60.0, SPEED)
#time.sleep(random.randint(0,2))
#self.myPuppet1.look_to("HeadPitch", 5.0, SPEED)
self.myPuppet1.look_to("HeadYaw", -60.0, SPEED)
#Update with the leader score
local_string = "Pepper invested: " + str(self._log_leader_investment) + '\n'
self.emit(self.update_gui_signal, self._log_person_total, self._log_leader_total, self._log_player2_total, self._log_player3_total, local_string)
#time.sleep(random.randint(1,2))
self.STATE_MACHINE = 4 #next state
#STATE-4 The two NAOs invest
if self.STATE_MACHINE == 4:
self._log_player2_investment = int(self.myParser._pinv2_list[self._log_trial])
self._log_player3_investment = int(self.myParser._pinv3_list[self._log_trial])
if self.myParser._gaze_list[self._log_trial] == "True":
self.myPuppet2.look_to("HeadYaw", 0.0, SPEED)
self.myPuppet3.look_to("HeadYaw", +60.0, SPEED) #TODO have been inverted
#self.myPuppet2.look_to("HeadPitch", 5.0, SPEED)
#self.myPuppet3.look_to("HeadPitch", 5.0, SPEED)
#time.sleep(random.randint(2,3))
#Player 2 investment
sentence = str(self.myParser._word2_list[self._log_trial])
if self.myParser._gestures_list[self._log_trial] == "True":
self.myPuppet2.animated_say_something(str(sentence))
else:
self.myPuppet2.say_something(str(sentence))
local_string = "Pepper invested: " + str(self._log_leader_investment) + '\n'
local_string += "Tommy invested: " + str(self._log_player2_investment) + '\n'
self.emit(self.update_gui_signal, self._log_person_total, self._log_leader_total, self._log_player2_total, self._log_player3_total, local_string)
if self.myParser._gaze_list[self._log_trial] == "True":
self.myPuppet2.look_to("HeadYaw", -40.0, SPEED) #TODO sign inverted
self.myPuppet3.look_to("HeadYaw", 0.0, SPEED)
#time.sleep(random.randint(1,3))
#Player 3 investment
sentence = str(self.myParser._word3_list[self._log_trial])
if self.myParser._gestures_list[self._log_trial] == "True":
self.myPuppet3.animated_say_something(str(sentence))
else:
self.myPuppet3.say_something(str(sentence))
if self.myParser._gaze_list[self._log_trial] == "True":
self.myPuppet2.look_to("HeadYaw", 0.0, SPEED)
print "[4] First interaction"
print("[4] Leader: " + str(self._log_leader_investment))
print("[4] Player 2: " + str(self._log_player2_investment))
print("[4] Player 3: " + str(self._log_player3_investment))
local_string = "Pepper invested: " + str(self._log_leader_investment) + '\n'
local_string += "Tommy invested: " + str(self._log_player2_investment) + '\n'
local_string += "Jones invested: " + str(self._log_player3_investment) + '\n'
local_string += "Please select a value to invest..." + '\n'
self.emit(self.enable_components_gui_signal, False, True, True) #GUI components
self.emit(self.update_gui_signal, self._log_person_total, self._log_leader_total, self._log_player2_total, self._log_player3_total, local_string)
#time.sleep(random.randint(1,2))
if self.myParser._gaze_list[self._log_trial] == "True":
self.myPuppet1.look_to("HeadYaw", 0.0, SPEED)
#self.myPuppet2.look_to("HeadYaw", 0.0, SPEED)
#self.myPuppet3.look_to("HeadYaw", 0.0, SPEED)
#self.myPuppet1.look_to("HeadPitch", 5.0, SPEED)
#self.myPuppet2.look_to("HeadPitch", 0.0, SPEED)
#self.myPuppet3.look_to("HeadPitch", 0.0, SPEED)
self.STATE_MACHINE = 5 #next state
#STATE-5 The participant choose
if self.STATE_MACHINE == 5:
if self._confirm_pressed == True: #when subject gives the answer
self._confirm_pressed = False
print "[5] The participant pressed: " + str(self._log_person_investment)
#Updating the GUI
local_string = "You invested: " + str(self._log_person_investment) + '\n'
local_string += "Pepper invested: " + str(self._log_leader_investment) + '\n'
local_string += "Tommy invested: " + str(self._log_player2_investment) + '\n'
local_string += "Jones invested: " + str(self._log_player3_investment) + '\n'
local_string += "In total has been invested: " + str(self._log_person_investment + self._log_leader_investment + self._log_player2_investment + self._log_player3_investment) + '\n'
#total, player_investment, round_total, robot_investment, text_label=""
self.emit(self.enable_components_gui_signal, False, False, True) #GUI components
self.emit(self.update_gui_signal, self._log_person_total, self._log_leader_total, self._log_player2_total, self._log_player3_total, local_string)
time.sleep(1.0)
#The Leader say the investment
sentence = self.myParser._word4_list[self._log_trial]
sentence = str(sentence) #convert to string
if(sentence != "." and sentence != "" and sentence != "-"):
#Check if XXX is present and replace it with the
has_substring = sentence.find("XXX")
if(has_substring != -1):
print "[5] Found the substring 'XXX' at location: " + str(has_substring)
sentence = sentence.replace("XXX", str(self._log_person_investment))
has_substring = sentence.find("YYY")
if(has_substring != -1):
print "[5] Found the substring 'YYY' at location: " + str(has_substring)
sentence = sentence.replace("YYY", str(self._log_person_investment + self._log_leader_investment + self._log_player2_investment + self._log_player3_investment))
if self.myParser._gestures_list[self._log_trial] == "True":
self.myPuppet1.animated_say_something(str(sentence))
else:
self.myPuppet1.say_something(str(sentence))
else:
print "[5] Saying Nothing because the sentence in the XML file is '" + str(sentence) + "'"
#time.sleep(random.randint(1,2))
if self.myParser._gaze_list[self._log_trial] == "True":
self.myPuppet1.look_to("HeadYaw", 60.0, SPEED)
time.sleep(random.randint(1,2))
self.myPuppet2.look_to("HeadYaw", 60.0, SPEED)
self.myPuppet3.look_to("HeadYaw", 60.0, SPEED)
if (self._log_person_investment + self._log_leader_investment + self._log_player2_investment + self._log_player3_investment) == 0:
self.STATE_MACHINE = 9 #nothing have been invested, jumping to logbook
else:
self.STATE_MACHINE = 6 #investment done, jumping to next state
#STATE-6 The Banker robot gives a reward
if self.STATE_MACHINE == 6:
#Update the TOTAL
banker_investment = (self._log_person_investment + self._log_leader_investment + self._log_player2_investment + self._log_player3_investment) * 3.0 * float(self.myParser._bmf_list[self._log_trial])
banker_investment -= abs( self._log_leader_investment - ((self._log_person_investment + self._log_player2_investment + self._log_player3_investment)/3.0) )
banker_investment /= 3.0
banker_investment = round(banker_investment,1) #round to first decimal place
abs_value = round(abs( self._log_leader_investment - ((self._log_person_investment + self._log_player2_investment + self._log_player3_investment)/3.0) ), 1)
if banker_investment < 0: banker_investment = 0.0
print "[6] The Banker invested: " + str(banker_investment)
local_string = "The banker received: " + str((self._log_person_investment + self._log_leader_investment + self._log_player2_investment + self._log_player3_investment))+ '\n'
local_string += "The investments differed of: " + str(abs_value) + " in average" + '\n'
local_string += "The banker returned: " + str(banker_investment) + " each" + '\n'
local_string += "Please press START to begin a new round..." + '\n'
#total, pinv, round_tot, rinv, rslider, text
self._log_person_total += (10.0 - self._log_person_investment) + banker_investment
self._log_person_total = round(self._log_person_total,1)
self._log_leader_total += (10.0 - self._log_leader_investment) + banker_investment
self._log_leader_total = round(self._log_leader_total,1)
self._log_player2_total += (10.0 - self._log_player2_investment) + banker_investment
self._log_player2_total = round(self._log_player2_total,1)
self._log_player3_total += (10.0 - self._log_player3_investment) + banker_investment
self._log_player3_total = round(self._log_player3_total,1)
self.emit(self.update_gui_signal, self._log_person_total, self._log_leader_total, self._log_player2_total, self._log_player3_total, local_string)
time.sleep(2)
if self.myParser._gaze_list[self._log_trial] == "True":
self.myPuppet1.look_to("HeadYaw", 0.0, SPEED)
self.myPuppet2.look_to("HeadYaw", 0.0, SPEED)
self.myPuppet3.look_to("HeadYaw", 0.0, SPEED)
time.sleep(random.randint(1,2))
#Check the absolute value and decide the sentence to say
if abs_value <= 2.0:
sentence = self.myParser._word6_list[self._log_trial]
else:
sentence = self.myParser._word5_list[self._log_trial]
sentence = str(sentence) #convert to string
if(sentence != "." and sentence != "" and sentence != "-"):
#Check if XXX is present and replace it with the
has_substring = sentence.find("XXX")
if(has_substring != -1):
print "[6] Found the substring 'XXX' at location: " + str(has_substring)
sentence = sentence.replace("XXX", str(banker_investment))
has_substring = sentence.find("YYY")
if(has_substring != -1):
print "[6] Found the substring 'YYY' at location: " + str(has_substring)
sentence = sentence.replace("YYY", str(abs_value))
if self.myParser._gestures_list[self._log_trial] == "True":
self.myPuppet1.animated_say_something(str(sentence))
else:
self.myPuppet1.say_something(str(sentence))
else:
print "[6] Saying Nothing because the sentence in the XML file is '" + str(sentence) + "'"
time.sleep(2)
#if self.myParser._gaze_list[self._log_trial] == "True":
# self.myPuppet1.look_to("HeadPitch", 5.0, SPEED)
print "[7] Switch to the next state"
self.STATE_MACHINE = 9 #next state
#STATE-9 Saving in the logbook
if self.STATE_MACHINE == 9:
print "[9] Saving the trial in the logbook"
self.logger.AddLine(self._log_trial+1, self._log_person_investment, self._log_leader_investment, self._log_player2_investment,
self._log_player3_investment, self._log_bmf, self._log_person_total, self._log_gaze, self._log_pointing, self._log_timer)
print ("[9] trial, person_investment, robot_investment, person_investment_second, log_robot_investment_second, player_b_investment, pmf, bmf, person_total, gaze, pointing, timer, timer_second")
print ("[9] " + str(self._log_trial+1) + "," + str(self._log_person_investment) + "," + str(self._log_leader_investment) +
"," + "," + str(0) + "," + str(self._log_bmf) + "," + str(self._log_person_total) + "," + str(self._log_gaze) +
"," + str(self._log_pointing) + "," + str(self._log_timer) )
if self._log_trial+1 != self.myParser._size:
self.STATE_MACHINE = 10 #cycling to state 12
self.emit(self.enable_components_gui_signal, True, False, False) #Enable the Start Button
self._log_trial = self._log_trial + 1
elif self._log_trial+1 == self.myParser._size:
self.STATE_MACHINE = 11 #experiment finished
#STATE-10 Waiting for the subject pressing START
if self.STATE_MACHINE == 10:
if self._start_pressed == True:
self._start_pressed = False
print "[10] Start pressed..."
self.emit(self.enable_components_gui_signal, False, False, False)
self.STATE_MACHINE = 2 #cycling to state 2
time.sleep(1)
#STATE-11 Final state is called to shutdown the robot
if self.STATE_MACHINE == 11:
print "[11] The game is finished"
self._xml_uploaded = False #reset status variable
self._start_pressed = False
self._log_trial = 0
self.STATE_MACHINE = 0 #cycling to state 0
#total, player_investment, round_total, your_investment, robot_investment
local_string = "Your score is: " + str(self._log_person_total) + '\n'
local_string += "The game is finished. Thank you..."
self.myPuppet1.say_something("Thank you, It was nice to play with you.")
#total, player_investment, round_total, robot_investment, text_label=""
self.emit(self.update_gui_signal, 0, 0, 0, 0, local_string)
self.emit(self.enable_components_gui_signal, False, False, False) #GUI components disabled
time.sleep(5)
def start_experiment(self):
self._start_pressed = True
def confirm(self, person_investment):
self._confirm_pressed = True
self._log_person_investment = int(person_investment)
def confirm_robot(self, robot_investment):
self._confirm_pressed_robot = True
self._log_leader_investment = int(robot_investment)
def ip(self, ip_string, port_string, ip_string_2, port_string_2, ip_string_3, port_string_3):
print "IP: " + str(ip_string)
is_first_connected = False
is_second_connected = False
is_third_connected = False
try:
self.myPuppet1 = nao.Puppet(ip_string, port_string, True)
self.emit(self.yes_robot_signal)
except Exception,e:
print "\nERROR: Impossible to find the FIRST robot!\n"
print "Error was: ", e
self.emit(self.no_robot_signal)
self._robot_connected=False
try:
self.myPuppet2 = nao.Puppet(ip_string_2, port_string_2, True)
self.emit(self.yes_robot_signal)
except Exception,e:
print "\nERROR: Impossible to find the SECOND robot!\n"
print "Error was: ", e
self.emit(self.no_robot_signal)
self._robot_connected=False
try:
self.myPuppet3 = nao.Puppet(ip_string_3, port_string_3, True)
self.emit(self.yes_robot_signal)
except Exception,e:
print "\nERROR: Impossible to find the THIRD robot!\n"
print "Error was: ", e
self.emit(self.no_robot_signal)
self._robot_connected=False
# Both connected
self._robot_connected=True
def xml(self, path):
print("Looking for external files... ")
if not os.path.isfile(str(path)):
print("\n# ERROR: I cannot find the XML file. The programm will be stopped!\n")
self._xml_uploaded = False
return
print("Initializing XML Parser... ")
try:
self.myParser.LoadFile(str(path))
self.myParser.parse_experiment_list()
self._xml_uploaded = True
except:
self.emit(self.bad_xml_signal)
print("\n # ERROR: Impossible to read the XML file! \n")
self._xml_uploaded = False
def wake(self, state):
if state == True:
self.myPuppet1.wake_up()
self.myPuppet2.wake_up()
self.myPuppet3.wake_up()
else:
self.myPuppet1.rest()
self.myPuppet2.rest()
self.myPuppet3.rest()
def face_tracking(self, state):
self.myPuppet1.enable_face_tracking(state)
self.myPuppet2.enable_face_tracking(state)
self.myPuppet3.enable_face_tracking(state)
def session_info_update(self, info1, info2, info3):
my_string = str(info1) + "," + str(info2) + "," + str(info3)
print("SESSION INFO: ", info1, info2, info3)
self._log_first_line = my_string
self._session_info_given = True
def stop(self):
self.stopped = 1
def __del__(self):
self.wait()
## Class ExampleApp
#
# It is a GUI class created in pyQT
# and receive signals from the GUI
#
class ExampleApp(QtGui.QMainWindow, design.Ui_MainWindow):
def __init__(self):
super(self.__class__, self).__init__()
self.setupUi(self)
self.btnBrowse.clicked.connect(self.browse_folder) # When the button is pressed execute browse_folder function
#self.btnStartExperiment.clicked.connect(lambda: self.start_experiment(1))
self.btnStartExperiment.clicked.connect(self.start_experiment)
self.btnConnectToNao.clicked.connect(self.connect_pressed)
self.btnWakeUp.clicked.connect(self.wake_up_pressed)
self.btnRest.clicked.connect(self.rest_pressed)
self.btnFaceTrackingEnable.clicked.connect(lambda: self.face_tracking_pressed(True))
self.btnFaceTrackingDisable.clicked.connect(lambda: self.face_tracking_pressed(False))
self.btnSessionInfoConfirm.clicked.connect(self.session_info_pressed)
#Buttons investment
self.pushButton_0.clicked.connect(lambda: self.confirm_pressed(0))
self.pushButton_1.clicked.connect(lambda: self.confirm_pressed(1))
self.pushButton_2.clicked.connect(lambda: self.confirm_pressed(2))
self.pushButton_3.clicked.connect(lambda: self.confirm_pressed(3))
self.pushButton_4.clicked.connect(lambda: self.confirm_pressed(4))
self.pushButton_5.clicked.connect(lambda: self.confirm_pressed(5))
self.pushButton_6.clicked.connect(lambda: self.confirm_pressed(6))
self.pushButton_7.clicked.connect(lambda: self.confirm_pressed(7))
self.pushButton_8.clicked.connect(lambda: self.confirm_pressed(8))
self.pushButton_9.clicked.connect(lambda: self.confirm_pressed(9))
self.pushButton_10.clicked.connect(lambda: self.confirm_pressed(10))
#Signal to be sent to Thread
self.start_signal = SIGNAL("start_signal")
self.confirm_signal = SIGNAL("confirm_signal")
self.confirm_signal_robot = SIGNAL("confirm_signal_robot")
self.xml_path_signal = SIGNAL("xml_path_signal")
self.ip_signal = SIGNAL("ip_signal")
self.wake_up_signal = SIGNAL("wake_up_signal")
self.face_tracking_signal = SIGNAL("face_tracking_signal")
self.session_info_signal = SIGNAL("session_info_signal")
self.showMaximized()
def start_experiment(self):
self.emit(self.start_signal)
#self.btnStartExperiment.hide() #hiding the start button
def confirm_pressed(self, person_investment):
self.emit(self.confirm_signal, person_investment)
print "CONFIRM: " + str(person_investment)
def confirm_pressed_robot(self, robot_investment):
self.emit(self.confirm_signal_robot, robot_investment)
print "CONFIRM ROBOT: " + str(robot_investment)
def connect_pressed(self):
ip_string = str(self.lineEditNaoIP.text())
port_string = str(self.lineEditNaoPort.text())
ip_string_2 = str(self.lineEditNaoIP_2.text())
port_string_2 = str(self.lineEditNaoPort_2.text())
ip_string_3 = str(self.lineEditNaoIP_3.text())
port_string_3 = str(self.lineEditNaoPort_3.text())
#print "IP: " + ip_string
self.emit(self.ip_signal, ip_string, port_string, ip_string_2, port_string_2, ip_string_3, port_string_3)
def face_tracking_pressed(self, state):
self.emit(self.face_tracking_signal, state)
def wake_up_pressed(self):
self.emit(self.wake_up_signal, True)
def rest_pressed(self):
self.emit(self.wake_up_signal, False)
def session_info_pressed(self):
info1 = str(self.textEditSubjectNumber.toPlainText())
info2 = str(self.textEditSessionNumber.toPlainText())
info3 = str(self.textEditOther.toPlainText())
self.emit(self.session_info_signal, info1, info2, info3)
def show_start_btn(self, is_visible):
if is_visible == True:
self.btnStartExperiment.show()
elif is_visible == False:
self.btnStartExperiment.hide()
#start_btn, confirm_btn, person_slider, show_slider
def enable_components_gui(self, start_btn, confirm_btn, confirm_btn_robot):
if start_btn == True:
self.btnStartExperiment.show()
elif start_btn == False:
self.btnStartExperiment.hide()
#Enabling the confirm buttons
self.pushButton_0.setEnabled(confirm_btn)
self.pushButton_1.setEnabled(confirm_btn)
self.pushButton_2.setEnabled(confirm_btn)
self.pushButton_3.setEnabled(confirm_btn)
self.pushButton_4.setEnabled(confirm_btn)
self.pushButton_5.setEnabled(confirm_btn)
self.pushButton_6.setEnabled(confirm_btn)
self.pushButton_7.setEnabled(confirm_btn)
self.pushButton_8.setEnabled(confirm_btn)
self.pushButton_9.setEnabled(confirm_btn)
self.pushButton_10.setEnabled(confirm_btn)
if confirm_btn == True:
#self.pushButton_0.setStyleSheet("background-color: green")
self.pushButton_0.setStyleSheet("border-style: solid")
self.pushButton_0.setStyleSheet("border-color: green")
elif confirm_btn == False:
#self.pushButton_0.setStyleSheet("background-color: red")
self.pushButton_0.setStyleSheet("border-style: solid")
self.pushButton_0.setStyleSheet("border-color: red")
#total, player_investment, round_total, robot_investment, text_label=""
def update_gui(self, person_total, player1_total, player2_total, player3_total, text_label=""):
#Update the total bar
self.lcdNumberTotal.display(person_total)
self.lcdNumberTotal1.display(player1_total)
self.lcdNumberTotal2.display(player2_total)
self.lcdNumberTotal3.display(player3_total)
#Update the textEdit label
self.textEdit.clear() #clear the textedit
self.textEdit.append(QtCore.QString(text_label))
def browse_folder(self):
selected_file = QtGui.QFileDialog.getOpenFileName(self, "Select a configuration file", "../etc/xml","XML files(*.xml)")
if selected_file: # if user didn't pick a directory don't continue
self.textEditXML.setText(selected_file) # self.listWidget.addItem(selected_file) # add file to the listWidget
self.emit(self.xml_path_signal, selected_file)
else:
msgBox = QtGui.QMessageBox()
msgBox.setIcon(QtGui.QMessageBox.Warning)
msgBox.setWindowTitle("No file selected")
msgBox.setText("ATTENTION: You did not select any XML file.");
msgBox.exec_();
def no_robot_error(self):
msgBox = QtGui.QMessageBox()
msgBox.setIcon(QtGui.QMessageBox.Critical)
msgBox.setWindowTitle("Ops... Connection Error")
msgBox.setText("ERROR: It was not possible to find the robot. \nFollow these tips and try to connect again. \n \n1- Check if the robot is running correctly. \n2- Check if the wifi router is running properly. \n3- Press the button on the robot chest to verify if the IP address is correct. \n4- Check if another software or GUI is connected to the robot. \n");
msgBox.exec_();
self.btnWakeUp.setEnabled(False)
self.btnRest.setEnabled(False)
self.btnFaceTrackingEnable.setEnabled(False)
self.btnFaceTrackingDisable.setEnabled(False)
def yes_robot_confirmation(self):
msgBox = QtGui.QMessageBox()
msgBox.setIcon(QtGui.QMessageBox.Information)
msgBox.setWindowTitle("Well Done!")
msgBox.setText("I found the robot, the connection was successfully established!")
msgBox.exec_();
self.btnWakeUp.setEnabled(True)
self.btnRest.setEnabled(True)
self.btnFaceTrackingEnable.setEnabled(True)
self.btnFaceTrackingDisable.setEnabled(True)
def bad_xml_error(self):
msgBox = QtGui.QMessageBox()
msgBox.setIcon(QtGui.QMessageBox.Critical)
msgBox.setWindowTitle("Ops... malformed XML file")
msgBox.setText("ERROR: It was not possible to read the XML file. \nFollow these tips and try to select again. \n \n1- Verify if you can open correctly the file with a text editor (es. notepad). \n2- Once opened the file, check if for each open bracket <trial> there is a closed bracket </trial>. \n3- Check if the name of the audio files is correct.\n");
msgBox.exec_();
def good_xml_confirmation(self):
msgBox = QtGui.QMessageBox()
msgBox.setIcon(QtGui.QMessageBox.Information)
msgBox.setWindowTitle("Very Good!")
msgBox.setText("I opened the XML file correctly. Be carefull, this does not mean that what you write inside the file is correct...")
msgBox.exec_();
def main():
#New instance of QApplication
app = QtGui.QApplication(sys.argv)
form = ExampleApp()
#Creating the main thread
thread = WorkerThread()
#Connecting: form > thread
thread.connect(form, form.start_signal, thread.start_experiment)
thread.connect(form, form.xml_path_signal, thread.xml) #sending XML path
thread.connect(form, form.confirm_signal, thread.confirm)
thread.connect(form, form.confirm_signal_robot, thread.confirm_robot)
thread.connect(form, form.ip_signal, thread.ip)
thread.connect(form, form.wake_up_signal, thread.wake)
thread.connect(form, form.face_tracking_signal, thread.face_tracking)
thread.connect(form, form.session_info_signal, thread.session_info_update)
#Connecting: thread > form
form.connect(thread, thread.enable_components_gui_signal, form.enable_components_gui)
form.connect(thread, thread.no_robot_signal, form.no_robot_error)
form.connect(thread, thread.yes_robot_signal, form.yes_robot_confirmation)
form.connect(thread, thread.bad_xml_signal, form.bad_xml_error)
form.connect(thread, thread.good_xml_signal, form.good_xml_confirmation)
form.connect(thread, thread.update_gui_signal, form.update_gui)
form.connect(thread, thread.show_start_btn_signal, form.show_start_btn)
#Starting thread
thread.start()
#Show the form and execute the app
form.show()
app.exec_()
if __name__ == '__main__': # if we're running file directly and not importing it
main() # run the main function
|
{
"content_hash": "afd988d7cf9eda23382d432f816abb7d",
"timestamp": "",
"source": "github",
"line_count": 705,
"max_line_length": 371,
"avg_line_length": 50.727659574468085,
"alnum_prop": 0.6100998238402818,
"repo_name": "mpatacchiola/naogui",
"id": "db53747330efec2fb5192cbd399b1ee162a2dd9f",
"size": "36586",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zpgc_2017/src/main.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "17000"
},
{
"name": "CMake",
"bytes": "4992"
},
{
"name": "Python",
"bytes": "946151"
},
{
"name": "Shell",
"bytes": "954"
}
],
"symlink_target": ""
}
|
from __future__ import with_statement
import os
import stat
import sys
import tempfile
import unittest
from filesystem import FileSystem
class FileSystemTest(unittest.TestCase):
def setUp(self):
self._this_dir = os.path.dirname(os.path.abspath(__file__))
self._missing_file = os.path.join(self._this_dir, 'missing_file.py')
self._this_file = os.path.join(self._this_dir, 'filesystem_unittest.py')
def test_chdir(self):
fs = FileSystem()
cwd = fs.getcwd()
newdir = '/'
if sys.platform == 'win32':
newdir = 'c:\\'
fs.chdir(newdir)
self.assertEquals(fs.getcwd(), newdir)
fs.chdir(cwd)
def test_chdir__notexists(self):
fs = FileSystem()
newdir = '/dirdoesnotexist'
if sys.platform == 'win32':
newdir = 'c:\\dirdoesnotexist'
self.assertRaises(OSError, fs.chdir, newdir)
def test_exists__true(self):
fs = FileSystem()
self.assertTrue(fs.exists(self._this_file))
def test_exists__false(self):
fs = FileSystem()
self.assertFalse(fs.exists(self._missing_file))
def test_getcwd(self):
fs = FileSystem()
self.assertTrue(fs.exists(fs.getcwd()))
def test_isdir__true(self):
fs = FileSystem()
self.assertTrue(fs.isdir(self._this_dir))
def test_isdir__false(self):
fs = FileSystem()
self.assertFalse(fs.isdir(self._this_file))
def test_join(self):
fs = FileSystem()
self.assertEqual(fs.join('foo', 'bar'),
os.path.join('foo', 'bar'))
def test_listdir(self):
fs = FileSystem()
with fs.mkdtemp(prefix='filesystem_unittest_') as d:
self.assertEqual(fs.listdir(d), [])
new_file = os.path.join(d, 'foo')
fs.write_text_file(new_file, u'foo')
self.assertEqual(fs.listdir(d), ['foo'])
os.remove(new_file)
def test_maybe_make_directory__success(self):
fs = FileSystem()
with fs.mkdtemp(prefix='filesystem_unittest_') as base_path:
sub_path = os.path.join(base_path, "newdir")
self.assertFalse(os.path.exists(sub_path))
self.assertFalse(fs.isdir(sub_path))
fs.maybe_make_directory(sub_path)
self.assertTrue(os.path.exists(sub_path))
self.assertTrue(fs.isdir(sub_path))
# Make sure we can re-create it.
fs.maybe_make_directory(sub_path)
self.assertTrue(os.path.exists(sub_path))
self.assertTrue(fs.isdir(sub_path))
# Clean up.
os.rmdir(sub_path)
self.assertFalse(os.path.exists(base_path))
self.assertFalse(fs.isdir(base_path))
def test_maybe_make_directory__failure(self):
# FIXME: os.chmod() doesn't work on Windows to set directories
# as readonly, so we skip this test for now.
if sys.platform in ('win32', 'cygwin'):
return
fs = FileSystem()
with fs.mkdtemp(prefix='filesystem_unittest_') as d:
# Remove write permissions on the parent directory.
os.chmod(d, stat.S_IRUSR)
# Now try to create a sub directory - should fail.
sub_dir = fs.join(d, 'subdir')
self.assertRaises(OSError, fs.maybe_make_directory, sub_dir)
# Clean up in case the test failed and we did create the
# directory.
if os.path.exists(sub_dir):
os.rmdir(sub_dir)
def test_read_and_write_file(self):
fs = FileSystem()
text_path = None
binary_path = None
unicode_text_string = u'Ūnĭcōde̽'
hex_equivalent = '\xC5\xAA\x6E\xC4\xAD\x63\xC5\x8D\x64\x65\xCC\xBD'
try:
text_path = tempfile.mktemp(prefix='tree_unittest_')
binary_path = tempfile.mktemp(prefix='tree_unittest_')
fs.write_text_file(text_path, unicode_text_string)
contents = fs.read_binary_file(text_path)
self.assertEqual(contents, hex_equivalent)
fs.write_text_file(binary_path, hex_equivalent)
text_contents = fs.read_text_file(binary_path)
self.assertEqual(text_contents, unicode_text_string)
except:
if text_path:
os.remove(text_path)
if binary_path:
os.remove(binary_path)
def test_read_binary_file__missing(self):
fs = FileSystem()
self.assertRaises(IOError, fs.read_binary_file, self._missing_file)
def test_read_text_file__missing(self):
fs = FileSystem()
self.assertRaises(IOError, fs.read_text_file, self._missing_file)
def test_remove_file_with_retry(self):
FileSystemTest._remove_failures = 2
def remove_with_exception(filename):
FileSystemTest._remove_failures -= 1
if FileSystemTest._remove_failures >= 0:
try:
raise WindowsError
except NameError:
raise FileSystem._WindowsError
fs = FileSystem()
self.assertTrue(fs.remove('filename', remove_with_exception))
self.assertEquals(-1, FileSystemTest._remove_failures)
def test_sep(self):
fs = FileSystem()
self.assertEquals(fs.sep, os.sep)
self.assertEquals(fs.join("foo", "bar"),
os.path.join("foo", "bar"))
def test_sep__is_readonly(self):
def assign_sep():
fs.sep = ' '
fs = FileSystem()
self.assertRaises(AttributeError, assign_sep)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "5cebdec6a982b667d1895b98875ffd96",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 80,
"avg_line_length": 32.895953757225435,
"alnum_prop": 0.579687225443683,
"repo_name": "mogoweb/webkit_for_android5.1",
"id": "8d4f0cb638bddd41a8edfec99e8fd7801f8bc035",
"size": "7438",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "webkit/Tools/Scripts/webkitpy/common/system/filesystem_unittest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AppleScript",
"bytes": "6772"
},
{
"name": "Assembly",
"bytes": "26025"
},
{
"name": "Awk",
"bytes": "2800"
},
{
"name": "Batchfile",
"bytes": "57337"
},
{
"name": "C",
"bytes": "7713030"
},
{
"name": "C++",
"bytes": "153178707"
},
{
"name": "CMake",
"bytes": "192330"
},
{
"name": "CSS",
"bytes": "483041"
},
{
"name": "Common Lisp",
"bytes": "9920"
},
{
"name": "DIGITAL Command Language",
"bytes": "5243"
},
{
"name": "DTrace",
"bytes": "1931"
},
{
"name": "Go",
"bytes": "3744"
},
{
"name": "HTML",
"bytes": "14998422"
},
{
"name": "Java",
"bytes": "1522083"
},
{
"name": "JavaScript",
"bytes": "18008829"
},
{
"name": "Lex",
"bytes": "42554"
},
{
"name": "Lua",
"bytes": "13768"
},
{
"name": "M4",
"bytes": "49839"
},
{
"name": "Makefile",
"bytes": "476166"
},
{
"name": "Module Management System",
"bytes": "9756"
},
{
"name": "Objective-C",
"bytes": "2798053"
},
{
"name": "Objective-C++",
"bytes": "7846322"
},
{
"name": "PHP",
"bytes": "66595"
},
{
"name": "Perl",
"bytes": "1130475"
},
{
"name": "Perl 6",
"bytes": "445215"
},
{
"name": "Python",
"bytes": "5503045"
},
{
"name": "QML",
"bytes": "3331"
},
{
"name": "QMake",
"bytes": "294800"
},
{
"name": "R",
"bytes": "290"
},
{
"name": "Roff",
"bytes": "273562"
},
{
"name": "Ruby",
"bytes": "81928"
},
{
"name": "Scheme",
"bytes": "10604"
},
{
"name": "Shell",
"bytes": "488223"
},
{
"name": "Yacc",
"bytes": "153801"
},
{
"name": "xBase",
"bytes": "328"
}
],
"symlink_target": ""
}
|
from django.forms import ValidationError
from cyder.cydns.domain.models import Domain
from cyder.cydns.views import CydnsCreateView
from cyder.cydns.views import CydnsDeleteView
from cyder.cydns.views import CydnsDetailView
from cyder.cydns.views import CydnsListView
from cyder.cydns.views import CydnsUpdateView
|
{
"content_hash": "847f9f4391a08f9f71ae0e36c7a618b7",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 45,
"avg_line_length": 39.375,
"alnum_prop": 0.8634920634920635,
"repo_name": "ngokevin/cyder",
"id": "2fa9334b7b6a99df95b3a6b112a0dc7a5b25e97f",
"size": "315",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cyder/cydns/nameserver/views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1449"
},
{
"name": "JavaScript",
"bytes": "47875"
},
{
"name": "Puppet",
"bytes": "6422"
},
{
"name": "Python",
"bytes": "1598241"
},
{
"name": "Ruby",
"bytes": "1437"
},
{
"name": "Shell",
"bytes": "4354"
}
],
"symlink_target": ""
}
|
from flexmock import flexmock, flexmock_teardown
from orator.connections import Connection
from orator.schema.grammars import MySQLSchemaGrammar
from orator.schema.blueprint import Blueprint
from orator.connectors import MySQLConnector
from ... import OratorTestCase
class MySQLSchemaGrammarTestCase(OratorTestCase):
def tearDown(self):
flexmock_teardown()
def test_basic_create(self):
blueprint = Blueprint('users')
blueprint.create()
blueprint.increments('id')
blueprint.string('email')
conn = self.get_connection()
conn.should_receive('get_config').once().with_args('charset').and_return('utf8')
conn.should_receive('get_config').once().with_args('collation').and_return('utf8_unicode_ci')
statements = blueprint.to_sql(conn, self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'CREATE TABLE `users` ('
'`id` INT UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY, '
'`email` VARCHAR(255) NOT NULL) '
'DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci',
statements[0]
)
blueprint = Blueprint('users')
blueprint.create()
blueprint.increments('id')
blueprint.string('email')
conn = self.get_connection()
conn.should_receive('get_config').and_return(None)
statements = blueprint.to_sql(conn, self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'CREATE TABLE `users` ('
'`id` INT UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY, '
'`email` VARCHAR(255) NOT NULL)',
statements[0]
)
def test_charset_collation_create(self):
blueprint = Blueprint('users')
blueprint.create()
blueprint.increments('id')
blueprint.string('email')
blueprint.charset = 'utf8mb4'
blueprint.collation = 'utf8mb4_unicode_ci'
conn = self.get_connection()
statements = blueprint.to_sql(conn, self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'CREATE TABLE `users` ('
'`id` INT UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY, '
'`email` VARCHAR(255) NOT NULL) '
'DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci',
statements[0]
)
def test_basic_create_with_prefix(self):
blueprint = Blueprint('users')
blueprint.create()
blueprint.increments('id')
blueprint.string('email')
grammar = self.get_grammar()
grammar.set_table_prefix('prefix_')
conn = self.get_connection()
conn.should_receive('get_config').and_return(None)
statements = blueprint.to_sql(conn, grammar)
self.assertEqual(1, len(statements))
self.assertEqual(
'CREATE TABLE `prefix_users` ('
'`id` INT UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY, '
'`email` VARCHAR(255) NOT NULL)',
statements[0]
)
def test_drop_table(self):
blueprint = Blueprint('users')
blueprint.drop()
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual('DROP TABLE `users`', statements[0])
def test_drop_table_if_exists(self):
blueprint = Blueprint('users')
blueprint.drop_if_exists()
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual('DROP TABLE IF EXISTS `users`', statements[0])
def test_drop_column(self):
blueprint = Blueprint('users')
blueprint.drop_column('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual('ALTER TABLE `users` DROP `foo`', statements[0])
blueprint = Blueprint('users')
blueprint.drop_column('foo', 'bar')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual('ALTER TABLE `users` DROP `foo`, DROP `bar`', statements[0])
def test_drop_primary(self):
blueprint = Blueprint('users')
blueprint.drop_primary('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual('ALTER TABLE `users` DROP PRIMARY KEY', statements[0])
def test_drop_unique(self):
blueprint = Blueprint('users')
blueprint.drop_unique('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual('ALTER TABLE `users` DROP INDEX foo', statements[0])
def test_drop_index(self):
blueprint = Blueprint('users')
blueprint.drop_index('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual('ALTER TABLE `users` DROP INDEX foo', statements[0])
def test_drop_foreign(self):
blueprint = Blueprint('users')
blueprint.drop_foreign('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual('ALTER TABLE `users` DROP FOREIGN KEY foo', statements[0])
def test_drop_timestamps(self):
blueprint = Blueprint('users')
blueprint.drop_timestamps()
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual('ALTER TABLE `users` DROP `created_at`, DROP `updated_at`', statements[0])
def test_rename_table(self):
blueprint = Blueprint('users')
blueprint.rename('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual('RENAME TABLE `users` TO `foo`', statements[0])
def test_adding_primary_key(self):
blueprint = Blueprint('users')
blueprint.primary('foo', 'bar')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual('ALTER TABLE `users` ADD PRIMARY KEY bar(`foo`)', statements[0])
def test_adding_foreign_key(self):
blueprint = Blueprint('users')
blueprint.foreign('order_id').references('id').on('orders')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
expected = [
'ALTER TABLE `users` ADD CONSTRAINT users_order_id_foreign '
'FOREIGN KEY (`order_id`) REFERENCES `orders` (`id`)'
]
self.assertEqual(expected, statements)
def test_adding_unique_key(self):
blueprint = Blueprint('users')
blueprint.unique('foo', 'bar')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD UNIQUE bar(`foo`)',
statements[0]
)
def test_adding_index(self):
blueprint = Blueprint('users')
blueprint.index(['foo', 'bar'], 'baz')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD INDEX baz(`foo`, `bar`)',
statements[0]
)
def test_adding_incrementing_id(self):
blueprint = Blueprint('users')
blueprint.increments('id')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `id` INT UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY',
statements[0]
)
def test_adding_big_incrementing_id(self):
blueprint = Blueprint('users')
blueprint.big_increments('id')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `id` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY',
statements[0]
)
def test_adding_column_after_another(self):
blueprint = Blueprint('users')
blueprint.string('name').after('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `name` VARCHAR(255) NOT NULL AFTER `foo`',
statements[0]
)
def test_adding_string(self):
blueprint = Blueprint('users')
blueprint.string('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` VARCHAR(255) NOT NULL',
statements[0]
)
blueprint = Blueprint('users')
blueprint.string('foo', 100)
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` VARCHAR(100) NOT NULL',
statements[0]
)
blueprint = Blueprint('users')
blueprint.string('foo', 100).nullable().default('bar')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` VARCHAR(100) NULL DEFAULT \'bar\'',
statements[0]
)
def test_adding_text(self):
blueprint = Blueprint('users')
blueprint.text('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` TEXT NOT NULL',
statements[0]
)
def test_adding_big_integer(self):
blueprint = Blueprint('users')
blueprint.big_integer('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` BIGINT NOT NULL',
statements[0]
)
blueprint = Blueprint('users')
blueprint.big_integer('foo', True)
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` BIGINT NOT NULL AUTO_INCREMENT PRIMARY KEY',
statements[0]
)
def test_adding_integer(self):
blueprint = Blueprint('users')
blueprint.integer('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` INT NOT NULL',
statements[0]
)
blueprint = Blueprint('users')
blueprint.integer('foo', True)
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` INT NOT NULL AUTO_INCREMENT PRIMARY KEY',
statements[0]
)
def test_adding_medium_integer(self):
blueprint = Blueprint('users')
blueprint.medium_integer('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` MEDIUMINT NOT NULL',
statements[0]
)
blueprint = Blueprint('users')
blueprint.medium_integer('foo', True)
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` MEDIUMINT NOT NULL AUTO_INCREMENT PRIMARY KEY',
statements[0]
)
def test_adding_tiny_integer(self):
blueprint = Blueprint('users')
blueprint.tiny_integer('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` TINYINT NOT NULL',
statements[0]
)
blueprint = Blueprint('users')
blueprint.tiny_integer('foo', True)
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` TINYINT NOT NULL AUTO_INCREMENT PRIMARY KEY',
statements[0]
)
def test_adding_small_integer(self):
blueprint = Blueprint('users')
blueprint.small_integer('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` SMALLINT NOT NULL',
statements[0]
)
blueprint = Blueprint('users')
blueprint.small_integer('foo', True)
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` SMALLINT NOT NULL AUTO_INCREMENT PRIMARY KEY',
statements[0]
)
def test_adding_float(self):
blueprint = Blueprint('users')
blueprint.float('foo', 5, 2)
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` DOUBLE(5, 2) NOT NULL',
statements[0]
)
def test_adding_double(self):
blueprint = Blueprint('users')
blueprint.double('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` DOUBLE NOT NULL',
statements[0]
)
def test_adding_double_with_precision(self):
blueprint = Blueprint('users')
blueprint.double('foo', 15, 8)
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` DOUBLE(15, 8) NOT NULL',
statements[0]
)
def test_adding_decimal(self):
blueprint = Blueprint('users')
blueprint.decimal('foo', 5, 2)
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` DECIMAL(5, 2) NOT NULL',
statements[0]
)
def test_adding_boolean(self):
blueprint = Blueprint('users')
blueprint.boolean('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` TINYINT(1) NOT NULL',
statements[0]
)
def test_adding_enum(self):
blueprint = Blueprint('users')
blueprint.enum('foo', ['bar', 'baz'])
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` ENUM(\'bar\', \'baz\') NOT NULL',
statements[0]
)
def test_adding_date(self):
blueprint = Blueprint('users')
blueprint.date('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` DATE NOT NULL',
statements[0]
)
def test_adding_datetime(self):
blueprint = Blueprint('users')
blueprint.datetime('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` DATETIME NOT NULL',
statements[0]
)
def test_adding_time(self):
blueprint = Blueprint('users')
blueprint.time('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` TIME NOT NULL',
statements[0]
)
def test_adding_timestamp_mysql_lt_564(self):
blueprint = Blueprint('users')
blueprint.timestamp('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar((5, 6, 0, '')))
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` TIMESTAMP NOT NULL',
statements[0]
)
def test_adding_timestamp_mysql_gte_564(self):
blueprint = Blueprint('users')
blueprint.timestamp('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar((5, 6, 4, '')))
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` TIMESTAMP(6) NOT NULL',
statements[0]
)
def test_adding_timestamp_with_current_mysql_lt_564(self):
blueprint = Blueprint('users')
blueprint.timestamp('foo').use_current()
statements = blueprint.to_sql(self.get_connection(), self.get_grammar((5, 6, 0, '')))
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL',
statements[0]
)
def test_adding_timestamp_with_current_mysql_gte_564(self):
blueprint = Blueprint('users')
blueprint.timestamp('foo').use_current()
statements = blueprint.to_sql(self.get_connection(), self.get_grammar((5, 6, 4, '')))
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL',
statements[0]
)
def test_adding_timestamps_mysql_lt_564(self):
blueprint = Blueprint('users')
blueprint.timestamps()
statements = blueprint.to_sql(self.get_connection(), self.get_grammar((5, 6, 0, '')))
self.assertEqual(1, len(statements))
expected = [
'ALTER TABLE `users` ADD `created_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, '
'ADD `updated_at` TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL'
]
self.assertEqual(
expected[0],
statements[0]
)
def test_adding_timestamps_mysql_gte_564(self):
blueprint = Blueprint('users')
blueprint.timestamps()
statements = blueprint.to_sql(self.get_connection(), self.get_grammar((5, 6, 4, '')))
self.assertEqual(1, len(statements))
expected = [
'ALTER TABLE `users` ADD `created_at` TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL, '
'ADD `updated_at` TIMESTAMP(6) DEFAULT CURRENT_TIMESTAMP(6) NOT NULL'
]
self.assertEqual(
expected[0],
statements[0]
)
def test_adding_timestamps_not_current_mysql_lt_564(self):
blueprint = Blueprint('users')
blueprint.timestamps(use_current=False)
statements = blueprint.to_sql(self.get_connection(), self.get_grammar((5, 6, 0, '')))
self.assertEqual(1, len(statements))
expected = [
'ALTER TABLE `users` ADD `created_at` TIMESTAMP NOT NULL, '
'ADD `updated_at` TIMESTAMP NOT NULL'
]
self.assertEqual(
expected[0],
statements[0]
)
def test_adding_timestamps_not_current_mysql_gte_564(self):
blueprint = Blueprint('users')
blueprint.timestamps(use_current=False)
statements = blueprint.to_sql(self.get_connection(), self.get_grammar((5, 6, 4, '')))
self.assertEqual(1, len(statements))
expected = [
'ALTER TABLE `users` ADD `created_at` TIMESTAMP(6) NOT NULL, '
'ADD `updated_at` TIMESTAMP(6) NOT NULL'
]
self.assertEqual(
expected[0],
statements[0]
)
def test_adding_binary(self):
blueprint = Blueprint('users')
blueprint.binary('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` BLOB NOT NULL',
statements[0]
)
def test_adding_json(self):
blueprint = Blueprint('users')
blueprint.json('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar())
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` JSON NOT NULL',
statements[0]
)
def test_adding_json_mysql_56(self):
blueprint = Blueprint('users')
blueprint.json('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar((5, 6, 0, '')))
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` TEXT NOT NULL',
statements[0]
)
def test_adding_json_mariadb(self):
blueprint = Blueprint('users')
blueprint.json('foo')
statements = blueprint.to_sql(self.get_connection(), self.get_grammar((10, 6, 0, 'mariadb')))
self.assertEqual(1, len(statements))
self.assertEqual(
'ALTER TABLE `users` ADD `foo` TEXT NOT NULL',
statements[0]
)
def get_connection(self, version=None):
if version is None:
version = (5, 7, 11, '')
connector = flexmock(MySQLConnector())
connector.should_receive('get_server_version').and_return(version)
conn = flexmock(Connection(connector))
return conn
def get_grammar(self, version=None):
return MySQLSchemaGrammar(self.get_connection(version))
|
{
"content_hash": "a5a5cc3dd67185f689a7d5d19615abe8",
"timestamp": "",
"source": "github",
"line_count": 658,
"max_line_length": 103,
"avg_line_length": 35.10334346504559,
"alnum_prop": 0.6007446532167288,
"repo_name": "Hanaasagi/sorator",
"id": "b4fe1667850e301b6c692542f38ce75569f08409",
"size": "23123",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/schema/grammars/test_mysql_grammar.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2023"
},
{
"name": "Python",
"bytes": "1070898"
}
],
"symlink_target": ""
}
|
import requests
import logging
class TgbotConnection:
REQUEST_TIMEOUT = 30
def __init__(self, token):
self.token = token
def apiurl(self, method):
return 'https://api.telegram.org/bot{}/{}'.format(self.token, method)
def makeRequest(self, reqname, **params):
retries = 0
while True:
retries += 1
logging.debug('>>> {}: {}'.format(reqname, params))
try:
response = requests.get(self.apiurl(reqname),
params=params, timeout=self.REQUEST_TIMEOUT)
except requests.exceptions.ConnectionError as ex:
logging.warning('Connection error ({}) for {} (try #{}), params: {}'.format(
ex, reqname, retries, params))
continue
except requests.exceptions.Timeout: # XXX install newer version
logging.warning('Timed out {} (try #{}), params: {}'.format(
reqname, retries, params))
continue
except requests.exceptions.ConnectTimeout: # XXX install newer version
logging.warning('Timed out {} (try #{}), params: {}'.format(
reqname, retries, params))
continue
response.encoding = 'utf-8'
# version mismatches in our installs
try:
json = response.json()
except TypeError:
json = response.json
logging.debug('<<< {}'.format(json))
# error 502 happens sometimes
if json is None:
logging.warning('none json response for {} (try #{})'.format(
reqname, retries))
continue
if not json['ok']:
return # tg changes these all the time. i don't even care anymore
if json.get('description') == 'Error: PEER_ID_INVALID': # (FIXME: is this old format? the next one seems o be used, hmm?)
# happens for sendMessage sometimes. FIXME: what makes the peer invalid?
# return value can be ignored here for now
logging.error('FIXME: what is this?')
return
if json.get('description') == '[Error : 400 : PEER_ID_INVALID]':
logging.error('FIXME: what is this?')
return
if json.get('description') == '[Error]: PEER_ID_INVALID':
# happens for sendMessage sometimes. FIXME: what makes the peer invalid?
# return value can be ignored here for now
logging.error('FIXME: what is this?')
return
if json.get('description') == '[Error]: Bad Request: message not found':
# got this for cmdQuote self.conn.forwardMessage(target, response.adder['id'], response.msgid)
# return value can be ignored here for now
logging.error('FIXME: what is this?')
return
if json.get('description') == 'Error: Bot was kicked from a chat':
logging.warning('FIXME: handle this somehow?')
return
if json.get('description') == '[Error]: Bot was blocked by the user':
logging.warning('FIXME: handle this somehow?')
return
raise RuntimeError('Bad request, response: {}'.format(json))
return json['result']
def getMe(self):
return self.makeRequest('getMe')
def getUpdates(self, offset=None, limit=None, timeout=None):
# FIXME handle this stupid stuff like:
# {'error_code': 500, 'ok': False, 'description': 'Internal server error: restart'}
updates = self.makeRequest('getUpdates', offset=offset, limit=limit, timeout=timeout)
if updates is None:
return [] # ON ERROR RESUME NEXT :-D
return updates
def sendMessage(self, chat_id, text):
return self.makeRequest('sendMessage', chat_id=chat_id, text=text)
def forwardMessage(self, chat_id, from_id, msg_id):
return self.makeRequest('forwardMessage', chat_id=chat_id,
from_chat_id=from_id, message_id=msg_id)
|
{
"content_hash": "8f5628c6bc2b6fc3f7eff041d1634fe8",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 137,
"avg_line_length": 46.641304347826086,
"alnum_prop": 0.5422978326730366,
"repo_name": "sooda/askibot-tg",
"id": "bcbc758dd2bc22b90a9eb889f865ad7dcf582af6",
"size": "4291",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tgbot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "30742"
}
],
"symlink_target": ""
}
|
"""CGI-savvy HTTP Server.
This module builds on SimpleHTTPServer by implementing GET and POST
requests to cgi-bin scripts.
If the os.fork() function is not present (e.g. on Windows),
os.popen2() is used as a fallback, with slightly altered semantics; if
that function is not present either (e.g. on Macintosh), only Python
scripts are supported, and they are executed by the current process.
In all cases, the implementation is intentionally naive -- all
requests are executed sychronously.
SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL
-- it may execute arbitrary Python code or external programs.
Note that status code 200 is sent prior to execution of a CGI script, so
scripts cannot send other status codes such as 302 (redirect).
"""
__version__ = "0.4"
__all__ = ["CGIHTTPRequestHandler"]
import os
import sys
import urllib
import BaseHTTPServer
import SimpleHTTPServer
import select
import copy
class CGIHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
"""Complete HTTP server with GET, HEAD and POST commands.
GET and HEAD also support running CGI scripts.
The POST command is *only* implemented for CGI scripts.
"""
# Determine platform specifics
have_fork = hasattr(os, 'fork')
have_popen2 = hasattr(os, 'popen2')
have_popen3 = hasattr(os, 'popen3')
# Make rfile unbuffered -- we need to read one line and then pass
# the rest to a subprocess, so we can't use buffered input.
rbufsize = 0
def do_POST(self):
"""Serve a POST request.
This is only implemented for CGI scripts.
"""
if self.is_cgi():
self.run_cgi()
else:
self.send_error(501, "Can only POST to CGI scripts")
def send_head(self):
"""Version of send_head that support CGI scripts"""
if self.is_cgi():
return self.run_cgi()
else:
return SimpleHTTPServer.SimpleHTTPRequestHandler.send_head(self)
def is_cgi(self):
"""Test whether self.path corresponds to a CGI script.
Returns True and updates the cgi_info attribute to the tuple
(dir, rest) if self.path requires running a CGI script.
Returns False otherwise.
If any exception is raised, the caller should assume that
self.path was rejected as invalid and act accordingly.
The default implementation tests whether the normalized url
path begins with one of the strings in self.cgi_directories
(and the next character is a '/' or the end of the string).
"""
collapsed_path = _url_collapse_path(urllib.unquote(self.path))
dir_sep = collapsed_path.find('/', 1)
head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:]
if head in self.cgi_directories:
self.cgi_info = head, tail
return True
return False
cgi_directories = ['/cgi-bin', '/htbin']
def is_executable(self, path):
"""Test whether argument path is an executable file."""
return executable(path)
def is_python(self, path):
"""Test whether argument path is a Python script."""
head, tail = os.path.splitext(path)
return tail.lower() in (".py", ".pyw")
def run_cgi(self):
"""Execute a CGI script."""
dir, rest = self.cgi_info
i = rest.find('/')
while i >= 0:
nextdir = rest[:i]
nextrest = rest[i+1:]
scriptdir = self.translate_path(nextdir)
if os.path.isdir(scriptdir):
dir, rest = nextdir, nextrest
i = rest.find('/')
else:
break
# find an explicit query string, if present.
i = rest.rfind('?')
if i >= 0:
rest, query = rest[:i], rest[i+1:]
else:
query = ''
# dissect the part after the directory name into a script name &
# a possible additional path, to be stored in PATH_INFO.
i = rest.find('/')
if i >= 0:
script, rest = rest[:i], rest[i:]
else:
script, rest = rest, ''
scriptname = dir + '/' + script
scriptfile = self.translate_path(scriptname)
if not os.path.exists(scriptfile):
self.send_error(404, "No such CGI script (%r)" % scriptname)
return
if not os.path.isfile(scriptfile):
self.send_error(403, "CGI script is not a plain file (%r)" %
scriptname)
return
ispy = self.is_python(scriptname)
if not ispy:
if not (self.have_fork or self.have_popen2 or self.have_popen3):
self.send_error(403, "CGI script is not a Python script (%r)" %
scriptname)
return
if not self.is_executable(scriptfile):
self.send_error(403, "CGI script is not executable (%r)" %
scriptname)
return
# Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html
# XXX Much of the following could be prepared ahead of time!
env = copy.deepcopy(os.environ)
env['SERVER_SOFTWARE'] = self.version_string()
env['SERVER_NAME'] = self.server.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PROTOCOL'] = self.protocol_version
env['SERVER_PORT'] = str(self.server.server_port)
env['REQUEST_METHOD'] = self.command
uqrest = urllib.unquote(rest)
env['PATH_INFO'] = uqrest
env['PATH_TRANSLATED'] = self.translate_path(uqrest)
env['SCRIPT_NAME'] = scriptname
if query:
env['QUERY_STRING'] = query
host = self.address_string()
if host != self.client_address[0]:
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
authorization = self.headers.getheader("authorization")
if authorization:
authorization = authorization.split()
if len(authorization) == 2:
import base64, binascii
env['AUTH_TYPE'] = authorization[0]
if authorization[0].lower() == "basic":
try:
authorization = base64.decodestring(authorization[1])
except binascii.Error:
pass
else:
authorization = authorization.split(':')
if len(authorization) == 2:
env['REMOTE_USER'] = authorization[0]
# XXX REMOTE_IDENT
if self.headers.typeheader is None:
env['CONTENT_TYPE'] = self.headers.type
else:
env['CONTENT_TYPE'] = self.headers.typeheader
length = self.headers.getheader('content-length')
if length:
env['CONTENT_LENGTH'] = length
referer = self.headers.getheader('referer')
if referer:
env['HTTP_REFERER'] = referer
accept = []
for line in self.headers.getallmatchingheaders('accept'):
if line[:1] in "\t\n\r ":
accept.append(line.strip())
else:
accept = accept + line[7:].split(',')
env['HTTP_ACCEPT'] = ','.join(accept)
ua = self.headers.getheader('user-agent')
if ua:
env['HTTP_USER_AGENT'] = ua
co = filter(None, self.headers.getheaders('cookie'))
if co:
env['HTTP_COOKIE'] = ', '.join(co)
# XXX Other HTTP_* headers
# Since we're setting the env in the parent, provide empty
# values to override previously set values
for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH',
'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'):
env.setdefault(k, "")
self.send_response(200, "Script output follows")
decoded_query = query.replace('+', ' ')
if self.have_fork:
# Unix -- fork as we should
args = [script]
if '=' not in decoded_query:
args.append(decoded_query)
nobody = nobody_uid()
self.wfile.flush() # Always flush before forking
pid = os.fork()
if pid != 0:
# Parent
pid, sts = os.waitpid(pid, 0)
# throw away additional data [see bug #427345]
while select.select([self.rfile], [], [], 0)[0]:
if not self.rfile.read(1):
break
if sts:
self.log_error("CGI script exit status %#x", sts)
return
# Child
try:
try:
os.setuid(nobody)
except os.error:
pass
os.dup2(self.rfile.fileno(), 0)
os.dup2(self.wfile.fileno(), 1)
os.execve(scriptfile, args, env)
except:
self.server.handle_error(self.request, self.client_address)
os._exit(127)
else:
# Non Unix - use subprocess
import subprocess
cmdline = [scriptfile]
if self.is_python(scriptfile):
interp = sys.executable
if interp.lower().endswith("w.exe"):
# On Windows, use python.exe, not pythonw.exe
interp = interp[:-5] + interp[-4:]
cmdline = [interp, '-u'] + cmdline
if '=' not in query:
cmdline.append(query)
self.log_message("command: %s", subprocess.list2cmdline(cmdline))
try:
nbytes = int(length)
except (TypeError, ValueError):
nbytes = 0
p = subprocess.Popen(cmdline,
stdin = subprocess.PIPE,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
env = env
)
if self.command.lower() == "post" and nbytes > 0:
data = self.rfile.read(nbytes)
else:
data = None
# throw away additional data [see bug #427345]
while select.select([self.rfile._sock], [], [], 0)[0]:
if not self.rfile._sock.recv(1):
break
stdout, stderr = p.communicate(data)
self.wfile.write(stdout)
if stderr:
self.log_error('%s', stderr)
p.stderr.close()
p.stdout.close()
status = p.returncode
if status:
self.log_error("CGI script exit status %#x", status)
else:
self.log_message("CGI script exited OK")
def _url_collapse_path(path):
"""
Given a URL path, remove extra '/'s and '.' path elements and collapse
any '..' references and returns a colllapsed path.
Implements something akin to RFC-2396 5.2 step 6 to parse relative paths.
The utility of this function is limited to is_cgi method and helps
preventing some security attacks.
Returns: A tuple of (head, tail) where tail is everything after the final /
and head is everything before it. Head will always start with a '/' and,
if it contains anything else, never have a trailing '/'.
Raises: IndexError if too many '..' occur within the path.
"""
# Similar to os.path.split(os.path.normpath(path)) but specific to URL
# path semantics rather than local operating system semantics.
path_parts = path.split('/')
head_parts = []
for part in path_parts[:-1]:
if part == '..':
head_parts.pop() # IndexError if more '..' than prior parts
elif part and part != '.':
head_parts.append( part )
if path_parts:
tail_part = path_parts.pop()
if tail_part:
if tail_part == '..':
head_parts.pop()
tail_part = ''
elif tail_part == '.':
tail_part = ''
else:
tail_part = ''
splitpath = ('/' + '/'.join(head_parts), tail_part)
collapsed_path = "/".join(splitpath)
return collapsed_path
nobody = None
def nobody_uid():
"""Internal routine to get nobody's uid"""
global nobody
if nobody:
return nobody
try:
import pwd
except ImportError:
return -1
try:
nobody = pwd.getpwnam('nobody')[2]
except KeyError:
nobody = 1 + max(map(lambda x: x[2], pwd.getpwall()))
return nobody
def executable(path):
"""Test for executable file."""
try:
st = os.stat(path)
except os.error:
return False
return st.st_mode & 0111 != 0
def test(HandlerClass = CGIHTTPRequestHandler,
ServerClass = BaseHTTPServer.HTTPServer):
SimpleHTTPServer.test(HandlerClass, ServerClass)
if __name__ == '__main__':
test()
|
{
"content_hash": "076b7cc790a98405661b876c472d3be4",
"timestamp": "",
"source": "github",
"line_count": 377,
"max_line_length": 79,
"avg_line_length": 34.745358090185675,
"alnum_prop": 0.5494312542942209,
"repo_name": "shiblon/pytour",
"id": "2acf913155098aeb0acfea77f033e289aa8ac0c9",
"size": "13099",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "static/js/pypyjs/pypy-nojit.js-0.3.1/lib/modules/CGIHTTPServer.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "195977"
},
{
"name": "HTML",
"bytes": "2110262"
},
{
"name": "JavaScript",
"bytes": "5106892"
},
{
"name": "Python",
"bytes": "15081380"
},
{
"name": "Shell",
"bytes": "1018"
}
],
"symlink_target": ""
}
|
import warnings
warnings.warn(
"The wagtail.wagtailcore.util module has been renamed. "
"Use wagtail.wagtailcore.utils instead.", DeprecationWarning)
from .utils import *
|
{
"content_hash": "569e000f1e2a16bd78bd31af2efe0220",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 65,
"avg_line_length": 25.857142857142858,
"alnum_prop": 0.7569060773480663,
"repo_name": "lojack/wagtail",
"id": "842fe24faeaf3d9370e2724c232774b297b77d79",
"size": "181",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wagtail/wagtailcore/util.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "127495"
},
{
"name": "D",
"bytes": "2012"
},
{
"name": "JavaScript",
"bytes": "41818"
},
{
"name": "Python",
"bytes": "831307"
},
{
"name": "Shell",
"bytes": "8483"
}
],
"symlink_target": ""
}
|
import unittest
from unittest.mock import patch, call, Mock
import numpy as np
from pygwyfile._libgwyfile import ffi
from pygwyfile.gwyfile import GwyfileErrorCMsg
from pygwyfile.gwygraph import GwyGraphCurve, GwyGraphModel
class GwyGraphModel_init(unittest.TestCase):
"""Test constructor of GwyGraphModel class
"""
def setUp(self):
self.test_meta = {'ncurves': 2,
'title': 'Plot',
'top_label': 'Top label',
'left_label': 'Left label',
'right_label': 'Right label',
'bottom_label': 'Bottom label',
'x_unit': 'm',
'y_unit': 'm',
'x_min': 0.,
'x_min_set': True,
'x_max': 1.,
'x_max_set': True,
'y_min': None,
'y_min_set': False,
'y_max': None,
'y_max_set': False,
'x_is_logarithmic': False,
'y_is_logarithmic': False,
'label.visible': True,
'label.has_frame': True,
'label.reverse': False,
'label.frame_thickness': 1,
'label.position': 0,
'grid-type': 1}
self.test_curves = [Mock(spec=GwyGraphCurve),
Mock(spec=GwyGraphCurve)]
def test_init_with_curves_and_meta(self):
"""Test GwyGraphModel constructor if meta is defined
"""
graph = GwyGraphModel(curves=self.test_curves,
meta=self.test_meta)
self.assertEqual(graph.curves, self.test_curves)
self.assertDictEqual(graph.meta, self.test_meta)
def test_init_with_curves_without_meta(self):
"""Test GwyGraphModel constructor with default meta
"""
graph = GwyGraphModel(curves=self.test_curves)
self.assertEqual(graph.curves, self.test_curves)
self.assertDictEqual(graph.meta,
{'ncurves': 2,
'title': '',
'top_label': '',
'left_label': '',
'right_label': '',
'bottom_label': '',
'x_unit': '',
'y_unit': '',
'x_min': None,
'x_min_set': False,
'x_max': None,
'x_max_set': False,
'y_min': None,
'y_min_set': False,
'y_max': None,
'y_max_set': False,
'x_is_logarithmic': False,
'y_is_logarithmic': False,
'label.visible': True,
'label.has_frame': True,
'label.reverse': False,
'label.frame_thickness': 1,
'label.position': 0,
'grid-type': 1})
def test_raise_TypeError_if_curve_is_not_GwyGraphCurve(self):
"""Raise TypeError exception if curve is not GwyGraphCurve
instance
"""
self.assertRaises(TypeError,
GwyGraphModel,
curves=np.random.rand(10))
def test_raise_ValueError_if_curves_number_and_ncurves_different(self):
"""Raise ValueError if len(curves) is not equal to meta['ncurves']
"""
self.assertRaises(ValueError,
GwyGraphModel,
curves=[Mock(GwyGraphCurve)], # just one curve
meta=self.test_meta) # meta['ncurves'] = 2
class GwyGraphModel_from_gwy(unittest.TestCase):
"""Test from_gwy method of GwyGraphModel class
"""
@patch('pygwyfile.gwygraph.GwyGraphModel', autospec=True)
@patch('pygwyfile.gwygraph.GwyGraphCurve', autospec=True)
@patch.object(GwyGraphModel, '_get_curves')
@patch.object(GwyGraphModel, '_get_meta')
def test_arg_passing_to_other_methods(self,
mock_get_meta,
mock_get_curves,
mock_GwyGraphCurve,
mock_GwyGraphModel):
"""
"""
gwygraphmodel = Mock()
test_meta = {'ncurves': 2}
test_gwycurves = [Mock(), Mock()]
mock_get_meta.return_value = test_meta
mock_get_curves.return_value = test_gwycurves
graphmodel = Mock(spec=GwyGraphModel)
mock_GwyGraphModel.return_value = graphmodel
graph = GwyGraphModel.from_gwy(gwygraphmodel)
# get meta data from <GwyGraphModel*> object
mock_get_meta.assert_has_calls(
[call(gwygraphmodel)])
# get list of <GwyGraphModelCurve*> objects
mock_get_curves.assert_has_calls(
[call(gwygraphmodel, test_meta['ncurves'])])
# create list of GwyGraphCurves instances
mock_GwyGraphCurve.from_gwy.assert_has_calls(
[call(gwycurve) for gwycurve in test_gwycurves])
# create GwyGraphModel instance
mock_GwyGraphModel.assert_has_calls(
[call(curves=[mock_GwyGraphCurve.from_gwy.return_value
for gwycurve in test_gwycurves],
meta=test_meta)])
# return GwyGraphModel instance
self.assertEqual(graph, graphmodel)
class GwyGraphModel_get_meta(unittest.TestCase):
"""Test _get_meta method of GwyGraphModel class
"""
def setUp(self):
self.gwygraphmodel = Mock()
patcher_lib = patch('pygwyfile.gwygraph.lib',
autospec=True)
self.addCleanup(patcher_lib.stop)
self.mock_lib = patcher_lib.start()
def test_getting_number_of_curves(self):
"""
Test getting number of curves from graphmodel object
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._get_number_of_curves)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['ncurves'], 3)
def _get_number_of_curves(self, *args):
"""
Return 3 as a number of curves in graphmodel object
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['ncurves'][0] = 3
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_title_field_is_not_empty(self):
"""
'title' field in graphmodel object is not empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._title_is_not_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['title'], "test title")
def _title_is_not_empty(self, *args):
"""
Write "test title" C string to title field
"""
title = ffi.new("char[]", b"test title")
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['title'][0] = title
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_title_field_is_empty(self):
"""
'title' field in graphmodel object is empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._title_is_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['title'], '')
def _title_is_empty(self, *args):
"""
Write NULL to title field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['title'][0] = ffi.NULL
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_top_label_field_is_not_empty(self):
"""
'top_label' field in graphmodel object is not empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._top_label_is_not_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['top_label'], "test top label")
def _top_label_is_not_empty(self, *args):
"""
Write "test top label" C string to 'top_label' field
"""
top_label = ffi.new("char[]", b"test top label")
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['top_label'][0] = top_label
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_top_label_field_is_empty(self):
"""
'top_label' field in graphmodel object is empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._top_label_is_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['top_label'], '')
def _top_label_is_empty(self, *args):
"""
Write NULL to top_label field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['top_label'][0] = ffi.NULL
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_left_label_field_is_not_empty(self):
"""
'left_label' field in graphmodel object is not empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._left_label_is_not_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['left_label'], "test left label")
def _left_label_is_not_empty(self, *args):
"""
Write "test left label" C string to 'left_label' field
"""
left_label = ffi.new("char[]", b"test left label")
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['left_label'][0] = left_label
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_left_label_field_is_empty(self):
"""
'left_label' field in graphmodel object is empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._left_label_is_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['left_label'], '')
def _left_label_is_empty(self, *args):
"""
Write NULL to left_label field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['left_label'][0] = ffi.NULL
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_right_label_field_is_not_empty(self):
"""
'right_label' field in graphmodel object is not empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._right_label_is_not_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['right_label'], "test right label")
def _right_label_is_not_empty(self, *args):
"""
Write "test right label" C string to 'right_label' field
"""
right_label = ffi.new("char[]", b"test right label")
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['right_label'][0] = right_label
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_right_label_field_is_empty(self):
"""
'right_label' field in graphmodel object is empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._right_label_is_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['right_label'], '')
def _right_label_is_empty(self, *args):
"""
Write NULL to right_label field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['right_label'][0] = ffi.NULL
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_bottom_label_field_is_not_empty(self):
"""
'bottom_label' field in graphmodel object is not empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._bottom_label_is_not_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['bottom_label'], "test bottom label")
def _bottom_label_is_not_empty(self, *args):
"""
Write "test bottom label" C string to 'bottom_label' field
"""
bottom_label = ffi.new("char[]", b"test bottom label")
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['bottom_label'][0] = bottom_label
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_bottom_label_field_is_empty(self):
"""
'bottom_label' field in graphmodel object is empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._bottom_label_is_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['bottom_label'], '')
def _bottom_label_is_empty(self, *args):
"""
Write NULL to bottom_label field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['bottom_label'][0] = ffi.NULL
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_x_unit_field_is_not_empty(self):
"""
'x_unit' field in graphmodel object is not empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._x_unit_is_not_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['x_unit'], 'm')
def _x_unit_is_not_empty(self, *args):
"""
Write "m" C string to 'x_unit' field
"""
x_unit = ffi.new("char[]", b"m")
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['x_unit'][0] = x_unit
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_x_unit_field_is_empty(self):
"""
'x_unit' field in graphmodel object is empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._x_unit_is_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['x_unit'], '')
def _x_unit_is_empty(self, *args):
"""
Write NULL to x_unit field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['x_unit'][0] = ffi.NULL
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_y_unit_field_is_not_empty(self):
"""
'y_unit' field in graphmodel object is not empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._y_unit_is_not_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['y_unit'], 'm')
def _y_unit_is_not_empty(self, *args):
"""
Write "m" C string to 'y_unit' field
"""
y_unit = ffi.new("char[]", b"m")
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['y_unit'][0] = y_unit
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_y_unit_field_is_empty(self):
"""
'y_unit' field in graphmodel object is empty
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._y_unit_is_empty)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['y_unit'], '')
def _y_unit_is_empty(self, *args):
"""
Write NULL to y_unit field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['y_unit'][0] = ffi.NULL
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_x_min_set_is_true(self):
"""
Check metadata dictionary if 'x_min_set' is True
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._x_min_set_is_true)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['x_min_set'], True)
self.assertEqual(meta['x_min'], 0.)
def _x_min_set_is_true(self, *args):
"""
Write True in 'x_min_set' field and 0. in 'x_min' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
truep = ffi.new("bool*", True)
arg_dict['x_min_set'][0] = truep[0]
arg_dict['x_min'][0] = 0.
# C func returns true if the graphmodel object loock acceptable
return truep[0]
def test_x_min_set_is_false(self):
"""
Check metadata dictionary if 'x_min_set' is False
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._x_min_set_is_false)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['x_min_set'], False)
self.assertIsNone(meta['x_min'])
def _x_min_set_is_false(self, *args):
"""
Write False in 'x_min_set' field and 0. in 'x_min' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
truep = ffi.new("bool*", True)
falsep = ffi.new("bool*", False)
arg_dict['x_min_set'][0] = falsep[0]
arg_dict['x_min'][0] = 0.
# C func returns true if the graphmodel object loock acceptable
return truep[0]
def test_x_max_set_is_true(self):
"""
Check metadata dictionary if 'x_max_set' is True
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._x_max_set_is_true)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['x_max_set'], True)
self.assertEqual(meta['x_max'], 0.)
def _x_max_set_is_true(self, *args):
"""
Write True in 'x_max_set' field and 0. in 'x_max' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
truep = ffi.new("bool*", True)
arg_dict['x_max_set'][0] = truep[0]
arg_dict['x_max'][0] = 0.
# C func returns true if the graphmodel object loock acceptable
return truep[0]
def test_x_max_set_is_false(self):
"""
Check metadata dictionary if 'x_max_set' is False
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._x_max_set_is_false)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['x_max_set'], False)
self.assertIsNone(meta['x_max'])
def _x_max_set_is_false(self, *args):
"""
Write False in 'x_max_set' field and 0. in 'x_max' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
truep = ffi.new("bool*", True)
falsep = ffi.new("bool*", False)
arg_dict['x_max_set'][0] = falsep[0]
arg_dict['x_max'][0] = 0.
# C func returns true if the graphmodel object loock acceptable
return truep[0]
def test_y_min_set_is_true(self):
"""
Check metadata dictionary if 'y_min_set' is True
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._y_min_set_is_true)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['y_min_set'], True)
self.assertEqual(meta['y_min'], 0.)
def _y_min_set_is_true(self, *args):
"""
Write True in 'y_min_set' field and 0. in 'y_min' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
truep = ffi.new("bool*", True)
arg_dict['y_min_set'][0] = truep[0]
arg_dict['y_min'][0] = 0.
# C func returns true if the graphmodel object loock acceptable
return truep[0]
def test_y_min_set_is_false(self):
"""
Check metadata dictionary if 'y_min_set' is False
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._y_min_set_is_false)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['y_min_set'], False)
self.assertIsNone(meta['y_min'])
def _y_min_set_is_false(self, *args):
"""
Write False in 'y_min_set' field and 0. in 'y_min' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
truep = ffi.new("bool*", True)
falsep = ffi.new("bool*", False)
arg_dict['y_min_set'][0] = falsep[0]
arg_dict['y_min'][0] = 0.
# C func returns true if the graphmodel object loock acceptable
return truep[0]
def test_y_max_set_is_true(self):
"""
Check metadata dictionary if 'y_max_set' is True
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._y_max_set_is_true)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['y_max_set'], True)
self.assertEqual(meta['y_max'], 0.)
def _y_max_set_is_true(self, *args):
"""
Write True in 'y_max_set' field and 0. in 'y_max' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
truep = ffi.new("bool*", True)
arg_dict['y_max_set'][0] = truep[0]
arg_dict['y_max'][0] = 0.
# C func returns true if the graphmodel object loock acceptable
return truep[0]
def test_y_max_set_is_false(self):
"""
Check metadata dictionary if 'y_max_set' is False
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._y_max_set_is_false)
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['y_max_set'], False)
self.assertIsNone(meta['y_max'])
def _y_max_set_is_false(self, *args):
"""
Write False in 'y_max_set' field and 0. in 'y_max' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
truep = ffi.new("bool*", True)
falsep = ffi.new("bool*", False)
arg_dict['y_max_set'][0] = falsep[0]
arg_dict['y_max'][0] = 0.
# C func returns true if the graphmodel object loock acceptable
return truep[0]
def test_x_is_logarithmic_true(self):
"""
'x_is_logarithmic' field is True
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._x_is_logarithmic)
self.x_is_logarithmic = True
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['x_is_logarithmic'], True)
def test_x_is_logarithmic_false(self):
"""
'x_is_logarithmic' field is False
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._x_is_logarithmic)
self.x_is_logarithmic = False
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['x_is_logarithmic'], False)
def _x_is_logarithmic(self, *args):
"""
Write self.x_is_logarithmic in 'x_is_logarithmic' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
truep = ffi.new("bool*", True)
falsep = ffi.new("bool*", False)
if self.x_is_logarithmic:
arg_dict['x_is_logarithmic'][0] = truep[0]
else:
arg_dict['x_is_logarithmic'][0] = falsep[0]
# C func returns true if the graphmodel object loock acceptable
return truep[0]
def test_y_is_logarithmic_true(self):
"""
'y_is_logarithmic' field is True
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._y_is_logarithmic)
self.y_is_logarithmic = True
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['y_is_logarithmic'], True)
def test_y_is_logarithmic_false(self):
"""
'y_is_logarithmic' field is False
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._y_is_logarithmic)
self.y_is_logarithmic = False
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['y_is_logarithmic'], False)
def _y_is_logarithmic(self, *args):
"""
Write self.y_is_logarithmic in 'y_is_logarithmic' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
truep = ffi.new("bool*", True)
falsep = ffi.new("bool*", False)
if self.y_is_logarithmic:
arg_dict['y_is_logarithmic'][0] = truep[0]
else:
arg_dict['y_is_logarithmic'][0] = falsep[0]
# C func returns true if the graphmodel object loock acceptable
return truep[0]
def test_label_visible_is_true(self):
"""
'label.visible' field is True
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._label_visible)
self.label_visible = True
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['label.visible'], True)
def test_label_visible_is_false(self):
"""
'label.visible' field is False
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._label_visible)
self.label_visible = False
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['label.visible'], False)
def _label_visible(self, *args):
"""
Write self.label_visible in 'label.visible' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
truep = ffi.new("bool*", True)
falsep = ffi.new("bool*", False)
if self.label_visible:
arg_dict['label.visible'][0] = truep[0]
else:
arg_dict['label.visible'][0] = falsep[0]
# C func returns true if the graphmodel object loock acceptable
return truep[0]
def test_label_has_frame_is_true(self):
"""
'label.has_frame' field is True
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._label_has_frame)
self.label_has_frame = True
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['label.has_frame'], True)
def test_label_has_frame_is_false(self):
"""
'label.has_frame' field is False
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._label_has_frame)
self.label_has_frame = False
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['label.has_frame'], False)
def _label_has_frame(self, *args):
"""
Write self.label_has_frame in 'label.has_frame' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
truep = ffi.new("bool*", True)
falsep = ffi.new("bool*", False)
if self.label_has_frame:
arg_dict['label.has_frame'][0] = truep[0]
else:
arg_dict['label.has_frame'][0] = falsep[0]
# C func returns true if the graphmodel object loock acceptable
return truep[0]
def test_label_reverse_is_true(self):
"""
'label.reverse' field is True
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._label_reverse)
self.label_reverse = True
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['label.reverse'], True)
def test_label_reverse_is_false(self):
"""
'label.reverse' field is False
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._label_reverse)
self.label_reverse = False
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertIs(meta['label.reverse'], False)
def _label_reverse(self, *args):
"""
Write self.label_reverse in 'label.reverse' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
truep = ffi.new("bool*", True)
falsep = ffi.new("bool*", False)
if self.label_reverse:
arg_dict['label.reverse'][0] = truep[0]
else:
arg_dict['label.reverse'][0] = falsep[0]
# C func returns true if the graphmodel object loock acceptable
return truep[0]
def test_label_frame_thickness(self):
"""
Check 'label.frame_thickness' field in metadata dictionary
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._label_frame_thickness)
self.label_frame_thickness = 1
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['label.frame_thickness'],
self.label_frame_thickness)
def _label_frame_thickness(self, *args):
"""
Write self.label_frame_thickness in 'label.frame_thickness' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['label.frame_thickness'][0] = self.label_frame_thickness
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_label_position(self):
"""
Check 'label.position' field in metadata dictionary
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._label_position)
self.label_position = 1
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['label.position'], self.label_position)
def _label_position(self, *args):
"""
Write self.label_position in 'label.position' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['label.position'][0] = self.label_position
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_grid_type(self):
"""
Check 'grid-type' field in metadata dictionary
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._grid_type)
self.grid_type = 1
meta = GwyGraphModel._get_meta(self.gwygraphmodel)
self.assertEqual(meta['grid-type'], self.grid_type)
def _grid_type(self, *args):
"""
Write self.grid_type in 'grid-type' field
"""
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['grid-type'][0] = self.grid_type
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
def test_raise_exception_if_graphmodel_object_looks_unacceptable(self):
"""
Raise GwyfileErrorCMsg if gwyfile_object_graphmodel_get returns False
"""
falsep = ffi.new("bool*", False)
self.mock_lib.gwyfile_object_graphmodel_get.return_value = (
falsep[0])
self.assertRaises(GwyfileErrorCMsg,
GwyGraphModel.from_gwy,
self.gwygraphmodel)
class GwyGraphModel_get_curves(unittest.TestCase):
"""
Test _get_curves method of GwyGraphModel class
"""
def setUp(self):
self.ncurves = 3 # number of curves in graphmodel object
self.curves_array = ffi.new("GwyfileObject*[]", self.ncurves)
self.gwygraphmodel = Mock()
patcher_lib = patch('pygwyfile.gwygraph.lib',
autospec=True)
self.addCleanup(patcher_lib.stop)
self.mock_lib = patcher_lib.start()
def test_raise_exception_if_graphmodel_object_looks_unacceptable(self):
"""
Raise GwyfileErrorCMsg if gwyfile_object_graphmodel_get returns False
"""
falsep = ffi.new("bool*", False)
self.mock_lib.gwyfile_object_graphmodel_get.return_value = falsep[0]
self.assertRaises(GwyfileErrorCMsg,
GwyGraphModel._get_curves,
self.gwygraphmodel,
self.ncurves)
def test_get_curves_array(self):
"""
Get array of curves (GwyfileObjects) from graphmodel object
"""
self.mock_lib.gwyfile_object_graphmodel_get.side_effect = (
self._side_effect)
curves = GwyGraphModel._get_curves(self.gwygraphmodel,
self.ncurves)
self.assertListEqual(curves, list(self.curves_array))
def _side_effect(self, *args):
"""
Check args of gwyfile_object_graphmodel_get func
and write self.curves_array in 'curves' field
"""
# first arg is GwyDatafield returned by get_gwyitem_object
self.assertEqual(args[0], self.gwygraphmodel)
# second arg is GwyfileError**
assert ffi.typeof(args[1]) == ffi.typeof(ffi.new("GwyfileError**"))
# last arg in Null
self.assertEqual(args[-1], ffi.NULL)
# combine fields names and fields pointers in one dictionary
arg_keys = [ffi.string(key).decode('utf-8') for key in args[2:-1:2]]
arg_pointers = [pointer for pointer in args[3:-1:2]]
arg_dict = dict(zip(arg_keys, arg_pointers))
arg_dict['curves'][0] = self.curves_array
# C func returns true if the graphmodel object loock acceptable
truep = ffi.new("bool*", True)
return truep[0]
class GwyGraphModel_to_gwy(unittest.TestCase):
"""Tests for to_gwy method of GwyGraphModel class """
def setUp(self):
self.gwygraphmodel = Mock(spec=GwyGraphModel)
self.ncurves = 2
self.gwygraphmodel.curves = [GwyGraphCurve(np.random.rand(2, 10))
for curve in range(self.ncurves)]
self.gwygraphmodel.meta = {'title': 'Title',
'top_label': '',
'left_label': 'y',
'right_label': '',
'bottom_label': 'x',
'x_unit': 'm',
'y_unit': 'm',
'x_min': 0.,
'x_min_set': True,
'x_max': 1.,
'x_max_set': True,
'x_is_logarithmic': False,
'y_is_logarithmic': False,
'label.visible': True,
'label.has_frame': True,
'label.reverse': False,
'label.frame_thickness': 1,
'label.position': 0,
'grid-type': 1}
self.gwygraphmodel.to_gwy = GwyGraphModel.to_gwy
self.expected_return = Mock()
@patch('pygwyfile.gwygraph.lib', autospec=True)
def test_args_of_libgwyfile_func(self, mock_lib):
""" Test args of gwyfile_object_new_graphmodel """
mock_lib.gwyfile_object_new_graphmodel.side_effect = (
self._side_effect)
actual_return = self.gwygraphmodel.to_gwy(self.gwygraphmodel)
self.assertEqual(actual_return, self.expected_return)
def _side_effect(self, *args):
self.assertEqual(int(args[0]), self.ncurves)
self.assertEqual(ffi.string(args[1]), b"curves")
self.assertEqual(ffi.string(args[3]), b"title")
self.assertEqual(ffi.string(args[4]),
self.gwygraphmodel.meta['title'].encode('utf-8'))
self.assertEqual(ffi.string(args[5]), b"top_label")
self.assertEqual(
ffi.string(args[6]),
self.gwygraphmodel.meta['top_label'].encode('utf-8'))
self.assertEqual(ffi.string(args[7]), b"left_label")
self.assertEqual(
ffi.string(args[8]),
self.gwygraphmodel.meta['left_label'].encode('utf-8'))
self.assertEqual(ffi.string(args[9]), b"right_label")
self.assertEqual(
ffi.string(args[10]),
self.gwygraphmodel.meta['right_label'].encode('utf-8'))
self.assertEqual(ffi.string(args[11]), b"bottom_label")
self.assertEqual(
ffi.string(args[12]),
self.gwygraphmodel.meta['bottom_label'].encode('utf-8'))
self.assertEqual(ffi.string(args[13]), b"x_unit")
self.assertEqual(
ffi.string(args[14]),
self.gwygraphmodel.meta['x_unit'].encode('utf-8'))
self.assertEqual(ffi.string(args[15]), b"y_unit")
self.assertEqual(
ffi.string(args[16]),
self.gwygraphmodel.meta['y_unit'].encode('utf-8'))
self.assertEqual(ffi.string(args[17]), b"x_min")
self.assertEqual(float(args[18]), self.gwygraphmodel.meta['x_min'])
self.assertEqual(ffi.string(args[19]), b"x_min_set")
self.assertEqual(bool(args[20]), self.gwygraphmodel.meta['x_min_set'])
self.assertEqual(ffi.string(args[21]), b"x_max")
self.assertEqual(float(args[22]), self.gwygraphmodel.meta['x_max'])
self.assertEqual(ffi.string(args[23]), b"x_max_set")
self.assertEqual(bool(args[24]), self.gwygraphmodel.meta['x_max_set'])
self.assertEqual(ffi.string(args[25]), b"x_is_logarithmic")
self.assertEqual(bool(args[26]),
self.gwygraphmodel.meta['x_is_logarithmic'])
self.assertEqual(ffi.string(args[27]), b"y_is_logarithmic")
self.assertEqual(bool(args[28]),
self.gwygraphmodel.meta['y_is_logarithmic'])
self.assertEqual(ffi.string(args[29]), b'label.visible')
self.assertEqual(bool(args[30]),
self.gwygraphmodel.meta['label.visible'])
self.assertEqual(ffi.string(args[31]), b"label.has_frame")
self.assertEqual(bool(args[32]),
self.gwygraphmodel.meta['label.has_frame'])
self.assertEqual(ffi.string(args[33]), b"label.reverse")
self.assertEqual(bool(args[34]),
self.gwygraphmodel.meta['label.reverse'])
self.assertEqual(ffi.string(args[35]), b'label.frame_thickness')
self.assertEqual(int(args[36]),
self.gwygraphmodel.meta['label.frame_thickness'])
self.assertEqual(ffi.string(args[37]), b'label.position')
self.assertEqual(int(args[38]),
self.gwygraphmodel.meta['label.position'])
self.assertEqual(ffi.string(args[39]), b'grid-type')
self.assertEqual(int(args[40]),
self.gwygraphmodel.meta['grid-type'])
self.assertEqual(args[-1], ffi.NULL)
return self.expected_return
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "4f2aec73340bec2707375dea7c563ce2",
"timestamp": "",
"source": "github",
"line_count": 1324,
"max_line_length": 78,
"avg_line_length": 35.188066465256796,
"alnum_prop": 0.5640387215866406,
"repo_name": "dmitry-streltsov/gwy-postgresql",
"id": "23212eab7706eeebf71a56c3786c24e6521abef1",
"size": "46589",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_gwygraph.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "226444"
}
],
"symlink_target": ""
}
|
import json
import traceback
from typing import Dict, cast
import ansible_runner
import demistomock as demisto # noqa: F401
import ssh_agent_setup
from CommonServerPython import * # noqa: F401
# Dict to Markdown Converter adapted from https://github.com/PolBaladas/torsimany/
def dict2md(json_block, depth=0):
markdown = ""
if isinstance(json_block, dict):
markdown = parseDict(json_block, depth)
if isinstance(json_block, list):
markdown = parseList(json_block, depth)
return markdown
def parseDict(d, depth):
markdown = ""
for k in d:
if isinstance(d[k], (dict, list)):
markdown += addHeader(k, depth)
markdown += dict2md(d[k], depth + 1)
else:
markdown += buildValueChain(k, d[k], depth)
return markdown
def parseList(rawlist, depth):
markdown = ""
for value in rawlist:
if not isinstance(value, (dict, list)):
index = rawlist.index(value)
markdown += buildValueChain(index, value, depth)
else:
markdown += parseDict(value, depth)
return markdown
def buildHeaderChain(depth):
list_tag = '* '
htag = '#'
chain = list_tag * (bool(depth)) + htag * (depth + 1) + \
' value ' + (htag * (depth + 1) + '\n')
return chain
def buildValueChain(key, value, depth):
tab = " "
list_tag = '* '
chain = tab * (bool(depth - 1)) + list_tag + \
str(key) + ": " + str(value) + "\n"
return chain
def addHeader(value, depth):
chain = buildHeaderChain(depth)
chain = chain.replace('value', value.title())
return chain
# Remove ansible branding from results
def rec_ansible_key_strip(obj):
if isinstance(obj, dict):
return {key.replace('ansible_', ''): rec_ansible_key_strip(val) for key, val in obj.items()}
return obj
# COMMAND FUNCTIONS
def generic_ansible(integration_name, command, args: Dict[str, Any]) -> CommandResults:
readable_output = ""
sshkey = ""
fork_count = 1 # default to executing against 1 host at a time
if args.get('concurrency'):
fork_count = cast(int, args.get('concurrency'))
inventory: Dict[str, dict] = {}
inventory['all'] = {}
inventory['all']['hosts'] = {}
inventory['all']['hosts']['localhost'] = {}
inventory['all']['hosts']['localhost']['ansible_connection'] = 'local'
module_args = ""
# build module args list
for arg_key, arg_value in args.items():
# skip hardcoded host arg, as it doesn't related to module
if arg_key == 'host':
continue
module_args += "%s=\"%s\" " % (arg_key, arg_value)
# If this isn't host based, then all the integratation parms will be used as command args
for arg_key, arg_value in demisto.params().items():
module_args += "%s=\"%s\" " % (arg_key, arg_value)
r = ansible_runner.run(inventory=inventory, host_pattern='all', module=command, quiet=True,
omit_event_data=True, ssh_key=sshkey, module_args=module_args, forks=fork_count)
results = []
for each_host_event in r.events:
# Troubleshooting
# demisto.log("%s: %s\n" % (each_host_event['event'], each_host_event))
if each_host_event['event'] in ["runner_on_ok", "runner_on_unreachable", "runner_on_failed"]:
# parse results
result = json.loads('{' + each_host_event['stdout'].split('{', 1)[1])
host = each_host_event['stdout'].split('|', 1)[0].strip()
status = each_host_event['stdout'].replace('=>', '|').split('|', 3)[1]
# if successful build outputs
if each_host_event['event'] == "runner_on_ok":
if 'fact' in command:
result = result['ansible_facts']
else:
if result.get(command) is not None:
result = result[command]
else:
result.pop("ansible_facts", None)
result = rec_ansible_key_strip(result)
if host != "localhost":
readable_output += "# %s - %s\n" % (host, status)
else:
# This is integration is not host based
readable_output += "# %s\n" % status
readable_output += dict2md(result)
# add host and status to result
result['host'] = host
result['status'] = status
results.append(result)
if each_host_event['event'] == "runner_on_unreachable":
msg = "Host %s unreachable\nError Details: %s" % (host, result)
return_error(msg)
if each_host_event['event'] == "runner_on_failed":
msg = "Host %s failed running command\nError Details: %s" % (host, result)
return_error(msg)
# This is integration is not host based and always runs against localhost
results = results[0]
return CommandResults(
readable_output=readable_output,
outputs_prefix=integration_name + '.' + command,
outputs_key_field='',
outputs=results
)
# MAIN FUNCTION
def main() -> None:
"""main function, parses params and runs command functions
:return:
:rtype:
"""
# SSH Key integration requires ssh_agent to be running in the background
ssh_agent_setup.setup()
try:
if demisto.command() == 'test-module':
# This is the call made when pressing the integration Test button.
return_results('ok')
elif demisto.command() == 'vmware-about-info':
return_results(generic_ansible('vmwarev2', 'vmware_about_info', demisto.args()))
elif demisto.command() == 'vmware-category':
return_results(generic_ansible('vmwarev2', 'vmware_category', demisto.args()))
elif demisto.command() == 'vmware-category-info':
return_results(generic_ansible('vmwarev2', 'vmware_category_info', demisto.args()))
elif demisto.command() == 'vmware-cfg-backup':
return_results(generic_ansible('vmwarev2', 'vmware_cfg_backup', demisto.args()))
elif demisto.command() == 'vmware-cluster':
return_results(generic_ansible('vmwarev2', 'vmware_cluster', demisto.args()))
elif demisto.command() == 'vmware-cluster-drs':
return_results(generic_ansible('vmwarev2', 'vmware_cluster_drs', demisto.args()))
elif demisto.command() == 'vmware-cluster-ha':
return_results(generic_ansible('vmwarev2', 'vmware_cluster_ha', demisto.args()))
elif demisto.command() == 'vmware-cluster-info':
return_results(generic_ansible('vmwarev2', 'vmware_cluster_info', demisto.args()))
elif demisto.command() == 'vmware-cluster-vsan':
return_results(generic_ansible('vmwarev2', 'vmware_cluster_vsan', demisto.args()))
elif demisto.command() == 'vmware-content-deploy-template':
return_results(generic_ansible('vmwarev2', 'vmware_content_deploy_template', demisto.args()))
elif demisto.command() == 'vmware-content-library-info':
return_results(generic_ansible('vmwarev2', 'vmware_content_library_info', demisto.args()))
elif demisto.command() == 'vmware-content-library-manager':
return_results(generic_ansible('vmwarev2', 'vmware_content_library_manager', demisto.args()))
elif demisto.command() == 'vmware-datacenter':
return_results(generic_ansible('vmwarev2', 'vmware_datacenter', demisto.args()))
elif demisto.command() == 'vmware-datastore-cluster':
return_results(generic_ansible('vmwarev2', 'vmware_datastore_cluster', demisto.args()))
elif demisto.command() == 'vmware-datastore-info':
return_results(generic_ansible('vmwarev2', 'vmware_datastore_info', demisto.args()))
elif demisto.command() == 'vmware-datastore-maintenancemode':
return_results(generic_ansible('vmwarev2', 'vmware_datastore_maintenancemode', demisto.args()))
elif demisto.command() == 'vmware-dns-config':
return_results(generic_ansible('vmwarev2', 'vmware_dns_config', demisto.args()))
elif demisto.command() == 'vmware-drs-group':
return_results(generic_ansible('vmwarev2', 'vmware_drs_group', demisto.args()))
elif demisto.command() == 'vmware-drs-group-info':
return_results(generic_ansible('vmwarev2', 'vmware_drs_group_info', demisto.args()))
elif demisto.command() == 'vmware-drs-rule-info':
return_results(generic_ansible('vmwarev2', 'vmware_drs_rule_info', demisto.args()))
elif demisto.command() == 'vmware-dvs-host':
return_results(generic_ansible('vmwarev2', 'vmware_dvs_host', demisto.args()))
elif demisto.command() == 'vmware-dvs-portgroup':
return_results(generic_ansible('vmwarev2', 'vmware_dvs_portgroup', demisto.args()))
elif demisto.command() == 'vmware-dvs-portgroup-find':
return_results(generic_ansible('vmwarev2', 'vmware_dvs_portgroup_find', demisto.args()))
elif demisto.command() == 'vmware-dvs-portgroup-info':
return_results(generic_ansible('vmwarev2', 'vmware_dvs_portgroup_info', demisto.args()))
elif demisto.command() == 'vmware-dvswitch':
return_results(generic_ansible('vmwarev2', 'vmware_dvswitch', demisto.args()))
elif demisto.command() == 'vmware-dvswitch-lacp':
return_results(generic_ansible('vmwarev2', 'vmware_dvswitch_lacp', demisto.args()))
elif demisto.command() == 'vmware-dvswitch-nioc':
return_results(generic_ansible('vmwarev2', 'vmware_dvswitch_nioc', demisto.args()))
elif demisto.command() == 'vmware-dvswitch-pvlans':
return_results(generic_ansible('vmwarev2', 'vmware_dvswitch_pvlans', demisto.args()))
elif demisto.command() == 'vmware-dvswitch-uplink-pg':
return_results(generic_ansible('vmwarev2', 'vmware_dvswitch_uplink_pg', demisto.args()))
elif demisto.command() == 'vmware-evc-mode':
return_results(generic_ansible('vmwarev2', 'vmware_evc_mode', demisto.args()))
elif demisto.command() == 'vmware-folder-info':
return_results(generic_ansible('vmwarev2', 'vmware_folder_info', demisto.args()))
elif demisto.command() == 'vmware-guest':
return_results(generic_ansible('vmwarev2', 'vmware_guest', demisto.args()))
elif demisto.command() == 'vmware-guest-boot-info':
return_results(generic_ansible('vmwarev2', 'vmware_guest_boot_info', demisto.args()))
elif demisto.command() == 'vmware-guest-boot-manager':
return_results(generic_ansible('vmwarev2', 'vmware_guest_boot_manager', demisto.args()))
elif demisto.command() == 'vmware-guest-custom-attribute-defs':
return_results(generic_ansible('vmwarev2', 'vmware_guest_custom_attribute_defs', demisto.args()))
elif demisto.command() == 'vmware-guest-custom-attributes':
return_results(generic_ansible('vmwarev2', 'vmware_guest_custom_attributes', demisto.args()))
elif demisto.command() == 'vmware-guest-customization-info':
return_results(generic_ansible('vmwarev2', 'vmware_guest_customization_info', demisto.args()))
elif demisto.command() == 'vmware-guest-disk':
return_results(generic_ansible('vmwarev2', 'vmware_guest_disk', demisto.args()))
elif demisto.command() == 'vmware-guest-disk-info':
return_results(generic_ansible('vmwarev2', 'vmware_guest_disk_info', demisto.args()))
elif demisto.command() == 'vmware-guest-find':
return_results(generic_ansible('vmwarev2', 'vmware_guest_find', demisto.args()))
elif demisto.command() == 'vmware-guest-info':
return_results(generic_ansible('vmwarev2', 'vmware_guest_info', demisto.args()))
elif demisto.command() == 'vmware-guest-move':
return_results(generic_ansible('vmwarev2', 'vmware_guest_move', demisto.args()))
elif demisto.command() == 'vmware-guest-network':
return_results(generic_ansible('vmwarev2', 'vmware_guest_network', demisto.args()))
elif demisto.command() == 'vmware-guest-powerstate':
return_results(generic_ansible('vmwarev2', 'vmware_guest_powerstate', demisto.args()))
elif demisto.command() == 'vmware-guest-screenshot':
return_results(generic_ansible('vmwarev2', 'vmware_guest_screenshot', demisto.args()))
elif demisto.command() == 'vmware-guest-sendkey':
return_results(generic_ansible('vmwarev2', 'vmware_guest_sendkey', demisto.args()))
elif demisto.command() == 'vmware-guest-snapshot':
return_results(generic_ansible('vmwarev2', 'vmware_guest_snapshot', demisto.args()))
elif demisto.command() == 'vmware-guest-snapshot-info':
return_results(generic_ansible('vmwarev2', 'vmware_guest_snapshot_info', demisto.args()))
elif demisto.command() == 'vmware-guest-tools-upgrade':
return_results(generic_ansible('vmwarev2', 'vmware_guest_tools_upgrade', demisto.args()))
elif demisto.command() == 'vmware-guest-tools-wait':
return_results(generic_ansible('vmwarev2', 'vmware_guest_tools_wait', demisto.args()))
elif demisto.command() == 'vmware-guest-video':
return_results(generic_ansible('vmwarev2', 'vmware_guest_video', demisto.args()))
elif demisto.command() == 'vmware-guest-vnc':
return_results(generic_ansible('vmwarev2', 'vmware_guest_vnc', demisto.args()))
elif demisto.command() == 'vmware-host':
return_results(generic_ansible('vmwarev2', 'vmware_host', demisto.args()))
elif demisto.command() == 'vmware-host-acceptance':
return_results(generic_ansible('vmwarev2', 'vmware_host_acceptance', demisto.args()))
elif demisto.command() == 'vmware-host-active-directory':
return_results(generic_ansible('vmwarev2', 'vmware_host_active_directory', demisto.args()))
elif demisto.command() == 'vmware-host-capability-info':
return_results(generic_ansible('vmwarev2', 'vmware_host_capability_info', demisto.args()))
elif demisto.command() == 'vmware-host-config-info':
return_results(generic_ansible('vmwarev2', 'vmware_host_config_info', demisto.args()))
elif demisto.command() == 'vmware-host-config-manager':
return_results(generic_ansible('vmwarev2', 'vmware_host_config_manager', demisto.args()))
elif demisto.command() == 'vmware-host-datastore':
return_results(generic_ansible('vmwarev2', 'vmware_host_datastore', demisto.args()))
elif demisto.command() == 'vmware-host-dns-info':
return_results(generic_ansible('vmwarev2', 'vmware_host_dns_info', demisto.args()))
elif demisto.command() == 'vmware-host-facts':
return_results(generic_ansible('vmwarev2', 'vmware_host_facts', demisto.args()))
elif demisto.command() == 'vmware-host-feature-info':
return_results(generic_ansible('vmwarev2', 'vmware_host_feature_info', demisto.args()))
elif demisto.command() == 'vmware-host-firewall-info':
return_results(generic_ansible('vmwarev2', 'vmware_host_firewall_info', demisto.args()))
elif demisto.command() == 'vmware-host-firewall-manager':
return_results(generic_ansible('vmwarev2', 'vmware_host_firewall_manager', demisto.args()))
elif demisto.command() == 'vmware-host-hyperthreading':
return_results(generic_ansible('vmwarev2', 'vmware_host_hyperthreading', demisto.args()))
elif demisto.command() == 'vmware-host-ipv6':
return_results(generic_ansible('vmwarev2', 'vmware_host_ipv6', demisto.args()))
elif demisto.command() == 'vmware-host-kernel-manager':
return_results(generic_ansible('vmwarev2', 'vmware_host_kernel_manager', demisto.args()))
elif demisto.command() == 'vmware-host-lockdown':
return_results(generic_ansible('vmwarev2', 'vmware_host_lockdown', demisto.args()))
elif demisto.command() == 'vmware-host-ntp':
return_results(generic_ansible('vmwarev2', 'vmware_host_ntp', demisto.args()))
elif demisto.command() == 'vmware-host-ntp-info':
return_results(generic_ansible('vmwarev2', 'vmware_host_ntp_info', demisto.args()))
elif demisto.command() == 'vmware-host-package-info':
return_results(generic_ansible('vmwarev2', 'vmware_host_package_info', demisto.args()))
elif demisto.command() == 'vmware-host-powermgmt-policy':
return_results(generic_ansible('vmwarev2', 'vmware_host_powermgmt_policy', demisto.args()))
elif demisto.command() == 'vmware-host-powerstate':
return_results(generic_ansible('vmwarev2', 'vmware_host_powerstate', demisto.args()))
elif demisto.command() == 'vmware-host-scanhba':
return_results(generic_ansible('vmwarev2', 'vmware_host_scanhba', demisto.args()))
elif demisto.command() == 'vmware-host-service-info':
return_results(generic_ansible('vmwarev2', 'vmware_host_service_info', demisto.args()))
elif demisto.command() == 'vmware-host-service-manager':
return_results(generic_ansible('vmwarev2', 'vmware_host_service_manager', demisto.args()))
elif demisto.command() == 'vmware-host-snmp':
return_results(generic_ansible('vmwarev2', 'vmware_host_snmp', demisto.args()))
elif demisto.command() == 'vmware-host-ssl-info':
return_results(generic_ansible('vmwarev2', 'vmware_host_ssl_info', demisto.args()))
elif demisto.command() == 'vmware-host-vmhba-info':
return_results(generic_ansible('vmwarev2', 'vmware_host_vmhba_info', demisto.args()))
elif demisto.command() == 'vmware-host-vmnic-info':
return_results(generic_ansible('vmwarev2', 'vmware_host_vmnic_info', demisto.args()))
elif demisto.command() == 'vmware-local-role-info':
return_results(generic_ansible('vmwarev2', 'vmware_local_role_info', demisto.args()))
elif demisto.command() == 'vmware-local-role-manager':
return_results(generic_ansible('vmwarev2', 'vmware_local_role_manager', demisto.args()))
elif demisto.command() == 'vmware-local-user-info':
return_results(generic_ansible('vmwarev2', 'vmware_local_user_info', demisto.args()))
elif demisto.command() == 'vmware-local-user-manager':
return_results(generic_ansible('vmwarev2', 'vmware_local_user_manager', demisto.args()))
elif demisto.command() == 'vmware-maintenancemode':
return_results(generic_ansible('vmwarev2', 'vmware_maintenancemode', demisto.args()))
elif demisto.command() == 'vmware-migrate-vmk':
return_results(generic_ansible('vmwarev2', 'vmware_migrate_vmk', demisto.args()))
elif demisto.command() == 'vmware-object-role-permission':
return_results(generic_ansible('vmwarev2', 'vmware_object_role_permission', demisto.args()))
elif demisto.command() == 'vmware-portgroup':
return_results(generic_ansible('vmwarev2', 'vmware_portgroup', demisto.args()))
elif demisto.command() == 'vmware-portgroup-info':
return_results(generic_ansible('vmwarev2', 'vmware_portgroup_info', demisto.args()))
elif demisto.command() == 'vmware-resource-pool':
return_results(generic_ansible('vmwarev2', 'vmware_resource_pool', demisto.args()))
elif demisto.command() == 'vmware-resource-pool-info':
return_results(generic_ansible('vmwarev2', 'vmware_resource_pool_info', demisto.args()))
elif demisto.command() == 'vmware-tag':
return_results(generic_ansible('vmwarev2', 'vmware_tag', demisto.args()))
elif demisto.command() == 'vmware-tag-info':
return_results(generic_ansible('vmwarev2', 'vmware_tag_info', demisto.args()))
elif demisto.command() == 'vmware-tag-manager':
return_results(generic_ansible('vmwarev2', 'vmware_tag_manager', demisto.args()))
elif demisto.command() == 'vmware-target-canonical-info':
return_results(generic_ansible('vmwarev2', 'vmware_target_canonical_info', demisto.args()))
elif demisto.command() == 'vmware-vcenter-settings':
return_results(generic_ansible('vmwarev2', 'vmware_vcenter_settings', demisto.args()))
elif demisto.command() == 'vmware-vcenter-statistics':
return_results(generic_ansible('vmwarev2', 'vmware_vcenter_statistics', demisto.args()))
elif demisto.command() == 'vmware-vm-host-drs-rule':
return_results(generic_ansible('vmwarev2', 'vmware_vm_host_drs_rule', demisto.args()))
elif demisto.command() == 'vmware-vm-info':
return_results(generic_ansible('vmwarev2', 'vmware_vm_info', demisto.args()))
elif demisto.command() == 'vmware-vm-shell':
return_results(generic_ansible('vmwarev2', 'vmware_vm_shell', demisto.args()))
elif demisto.command() == 'vmware-vm-storage-policy-info':
return_results(generic_ansible('vmwarev2', 'vmware_vm_storage_policy_info', demisto.args()))
elif demisto.command() == 'vmware-vm-vm-drs-rule':
return_results(generic_ansible('vmwarev2', 'vmware_vm_vm_drs_rule', demisto.args()))
elif demisto.command() == 'vmware-vm-vss-dvs-migrate':
return_results(generic_ansible('vmwarev2', 'vmware_vm_vss_dvs_migrate', demisto.args()))
elif demisto.command() == 'vmware-vmkernel':
return_results(generic_ansible('vmwarev2', 'vmware_vmkernel', demisto.args()))
elif demisto.command() == 'vmware-vmkernel-info':
return_results(generic_ansible('vmwarev2', 'vmware_vmkernel_info', demisto.args()))
elif demisto.command() == 'vmware-vmkernel-ip-config':
return_results(generic_ansible('vmwarev2', 'vmware_vmkernel_ip_config', demisto.args()))
elif demisto.command() == 'vmware-vmotion':
return_results(generic_ansible('vmwarev2', 'vmware_vmotion', demisto.args()))
elif demisto.command() == 'vmware-vsan-cluster':
return_results(generic_ansible('vmwarev2', 'vmware_vsan_cluster', demisto.args()))
elif demisto.command() == 'vmware-vspan-session':
return_results(generic_ansible('vmwarev2', 'vmware_vspan_session', demisto.args()))
elif demisto.command() == 'vmware-vswitch':
return_results(generic_ansible('vmwarev2', 'vmware_vswitch', demisto.args()))
elif demisto.command() == 'vmware-vswitch-info':
return_results(generic_ansible('vmwarev2', 'vmware_vswitch_info', demisto.args()))
elif demisto.command() == 'vmware-vsphere-file':
return_results(generic_ansible('vmwarev2', 'vsphere_file', demisto.args()))
elif demisto.command() == 'vmware-vcenter-extension':
return_results(generic_ansible('vmwarev2', 'vcenter_extension', demisto.args()))
elif demisto.command() == 'vmware-vcenter-extension-info':
return_results(generic_ansible('vmwarev2', 'vcenter_extension_info', demisto.args()))
elif demisto.command() == 'vmware-vcenter-folder':
return_results(generic_ansible('vmwarev2', 'vcenter_folder', demisto.args()))
elif demisto.command() == 'vmware-vcenter-license':
return_results(generic_ansible('vmwarev2', 'vcenter_license', demisto.args()))
# Log exceptions and return errors
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute {demisto.command()} command.\nError:\n{str(e)}')
# ENTRY POINT
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
|
{
"content_hash": "6e756619dc9350c9565eac1f225cd84e",
"timestamp": "",
"source": "github",
"line_count": 423,
"max_line_length": 109,
"avg_line_length": 56.81323877068558,
"alnum_prop": 0.6312832889480693,
"repo_name": "demisto/content",
"id": "c62ddfcde20e40bca5ab0c76df0f397bf947ced5",
"size": "24032",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Packs/Ansible_Powered_Integrations/Integrations/VMwareV2/VMwareV2.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "2146"
},
{
"name": "HTML",
"bytes": "205901"
},
{
"name": "JavaScript",
"bytes": "1584075"
},
{
"name": "PowerShell",
"bytes": "442288"
},
{
"name": "Python",
"bytes": "47881712"
},
{
"name": "Rich Text Format",
"bytes": "480911"
},
{
"name": "Shell",
"bytes": "108066"
},
{
"name": "YARA",
"bytes": "1185"
}
],
"symlink_target": ""
}
|
from os.path import dirname, join
from setuptools import setup, find_packages
with open(join(dirname(__file__), 'scrapy/VERSION'), 'rb') as f:
version = f.read().decode('ascii').strip()
setup(
name='Scrapy',
version=version,
url='http://scrapy.org',
description='A high-level Web Crawling and Screen Scraping framework',
long_description=open('README.rst').read(),
author='Scrapy developers',
maintainer='Pablo Hoffman',
maintainer_email='pablo@pablohoffman.com',
license='BSD',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': ['scrapy = scrapy.cmdline:execute']
},
classifiers=[
'Framework :: Scrapy',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[
'Twisted>=10.0.0',
'w3lib>=1.8.0',
'queuelib',
'lxml',
'pyOpenSSL',
'cssselect>=0.9',
'six>=1.5.2',
],
)
|
{
"content_hash": "226fd7674e13db205f0446c1fd1d91e8",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 79,
"avg_line_length": 31.75,
"alnum_prop": 0.5964566929133859,
"repo_name": "ramiro/scrapy",
"id": "d463bccd9abfd97223584dbd5c13112ec35c09bc",
"size": "1524",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Groff",
"bytes": "2008"
},
{
"name": "HTML",
"bytes": "1809"
},
{
"name": "Python",
"bytes": "1237354"
},
{
"name": "Shell",
"bytes": "673"
}
],
"symlink_target": ""
}
|
import logging
import os
import sys
class ResultProcessorBase(object):
"""A result processor. Each defined result processor will
be asked if it can process new results"""
def __init__(self):
"""Init the result processor base
"""
self._log = logging.getLogger("ResultProc").getChild(self.__class__.__name__)
def process(self, result):
"""Process the result. Processing the result IS allowed to delete the
result as part of the processing. Creating new models/other changes is
also allowed/expected.
:param mongoengine.Document result: The result to process
"""
raise NotImplemented("Inheriting classes must implement the process function")
def can_process(self, result):
"""A query function to determine if this result processor can process
the result.
:returns: True/False
"""
raise NotImplemented("Inheriting classes must implement the can_process function")
|
{
"content_hash": "a9b5114ecc2dde290f31fd69e26a422f",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 90,
"avg_line_length": 33.3,
"alnum_prop": 0.6696696696696697,
"repo_name": "talus-framework/talus-master",
"id": "6d819795d5146373e4b5417c67cbb0aa356f53d7",
"size": "1041",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "watchers/result_processors/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "92865"
},
{
"name": "Shell",
"bytes": "14927"
}
],
"symlink_target": ""
}
|
"""Script to run feeder via command line
Usage:
yo-ci get <ci> <type> [--config=<path> -v]
yo-ci get <ci> <type> <name> [--config=<path> -v]
yo-ci --version
Arguments:
get Gets info for type
Options:
-h --help Show this screen.
-c --config=<path> Path to generator config file.
-w --ci=<string> transport to use (e.g. File)
-v --verbose a LOT of output
--version Display current version of yoci and exit
"""
from __future__ import absolute_import
from docopt import docopt
from yoci.yoci import init_logger
from yoci.yoci import _set_global_verbosity_level
from yoci.yoci import call_yoci
lgr = init_logger()
def ver_check():
import pkg_resources
version = None
try:
version = pkg_resources.get_distribution('yoci').version
except Exception as e:
print(e)
finally:
del pkg_resources
return version
def yoci_run(o):
print o
if o['get']:
call_yoci(config=o.get('--config'),
ci=o.get('<ci>'),
req_type=o.get('<type>'),
name=o.get('<name>'),
verbose=o.get('--verbose'))
# elif o['get'] and o['<name>']:
# call_yoci(config=o.get('--config'),
# ci=o.get('<ci>'),
# req_type=o.get('<type>'),
# verbose=o.get('--verbose'))
def yoci(test_options=None):
"""Main entry point for script."""
version = ver_check()
options = test_options or docopt(__doc__, version=version)
_set_global_verbosity_level(options.get('--verbose'))
lgr.debug(options)
yoci_run(options)
def main():
yoci()
if __name__ == '__main__':
main()
|
{
"content_hash": "023d209fc94137ddf970555730524bd2",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 72,
"avg_line_length": 25.285714285714285,
"alnum_prop": 0.5440677966101695,
"repo_name": "cloudify-cosmo/yo-ci",
"id": "dc7539dc571011d487ddbb024c81ef761443e038",
"size": "1785",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "yoci/cli.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "1151"
},
{
"name": "Python",
"bytes": "28374"
},
{
"name": "Ruby",
"bytes": "2356"
}
],
"symlink_target": ""
}
|
"""
Utilities for converting to/from a tuple of ords
(c) 2015 Matthew Cotton
"""
def ords(string):
"""Convert string to tuple of ints"""
return tuple(ord(c) for c in string)
def chars(tup):
"""Convert tuple of ints to string"""
return ''.join(chr(o) for o in tup)
|
{
"content_hash": "28f35c6c3caf594f953ec07a98d5419d",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 48,
"avg_line_length": 21.76923076923077,
"alnum_prop": 0.6501766784452296,
"repo_name": "MattCCS/PyGetch",
"id": "ce1fc7bbcc5d3382a105912339e124093fb3fe06",
"size": "283",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pygetch/utils/conversion.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24612"
},
{
"name": "Shell",
"bytes": "1429"
}
],
"symlink_target": ""
}
|
def tags_as_list(tags) -> list:
if tags is None:
return []
if isinstance(tags, (list, tuple)):
return list(tags)
elif isinstance(tags, dict):
return sorted([
':'.join([str(k), str(v)]) if v else k
for k, v in tags.items()
])
raise ValueError('unexpected type for parameter tags')
class MetricsError(Exception):
pass
|
{
"content_hash": "ad05c70ec3189b5963fa231920389758",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 58,
"avg_line_length": 26.266666666666666,
"alnum_prop": 0.565989847715736,
"repo_name": "managedbyq/mbq.metrics",
"id": "a49ccf71ff399044de19e0681eded4f5d8af7339",
"size": "394",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mbq/metrics/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "201"
},
{
"name": "Python",
"bytes": "24214"
}
],
"symlink_target": ""
}
|
from django.core.checks import ( # pylint: disable=redefined-builtin
Tags,
Warning,
register,
)
from django.utils.module_loading import import_string
from axes.backends import AxesBackend
from axes.conf import settings
class Messages:
CACHE_INVALID = (
"You are using the django-axes cache handler for login attempt tracking."
" Your cache configuration is however invalid and will not work correctly with django-axes."
" This can leave security holes in your login systems as attempts are not tracked correctly."
" Reconfigure settings.AXES_CACHE and settings.CACHES per django-axes configuration documentation."
)
MIDDLEWARE_INVALID = (
"You do not have 'axes.middleware.AxesMiddleware' in your settings.MIDDLEWARE."
)
BACKEND_INVALID = "You do not have 'axes.backends.AxesBackend' or a subclass in your settings.AUTHENTICATION_BACKENDS."
SETTING_DEPRECATED = "You have a deprecated setting {deprecated_setting} configured in your project settings"
class Hints:
CACHE_INVALID = None
MIDDLEWARE_INVALID = None
BACKEND_INVALID = (
"AxesModelBackend was renamed to AxesBackend in django-axes version 5.0."
)
SETTING_DEPRECATED = None
class Codes:
CACHE_INVALID = "axes.W001"
MIDDLEWARE_INVALID = "axes.W002"
BACKEND_INVALID = "axes.W003"
SETTING_DEPRECATED = "axes.W004"
@register(Tags.security, Tags.caches, Tags.compatibility)
def axes_cache_check(app_configs, **kwargs): # pylint: disable=unused-argument
axes_handler = getattr(settings, "AXES_HANDLER", "")
axes_cache_key = getattr(settings, "AXES_CACHE", "default")
axes_cache_config = settings.CACHES.get(axes_cache_key, {})
axes_cache_backend = axes_cache_config.get("BACKEND", "")
axes_cache_backend_incompatible = [
"django.core.cache.backends.dummy.DummyCache",
"django.core.cache.backends.locmem.LocMemCache",
"django.core.cache.backends.filebased.FileBasedCache",
]
warnings = []
if axes_handler == "axes.handlers.cache.AxesCacheHandler":
if axes_cache_backend in axes_cache_backend_incompatible:
warnings.append(
Warning(
msg=Messages.CACHE_INVALID,
hint=Hints.CACHE_INVALID,
id=Codes.CACHE_INVALID,
)
)
return warnings
@register(Tags.security, Tags.compatibility)
def axes_middleware_check(app_configs, **kwargs): # pylint: disable=unused-argument
warnings = []
if "axes.middleware.AxesMiddleware" not in settings.MIDDLEWARE:
warnings.append(
Warning(
msg=Messages.MIDDLEWARE_INVALID,
hint=Hints.MIDDLEWARE_INVALID,
id=Codes.MIDDLEWARE_INVALID,
)
)
return warnings
@register(Tags.security, Tags.compatibility)
def axes_backend_check(app_configs, **kwargs): # pylint: disable=unused-argument
warnings = []
found = False
for name in settings.AUTHENTICATION_BACKENDS:
try:
backend = import_string(name)
except ModuleNotFoundError as e:
raise ModuleNotFoundError(
"Can not find module path defined in settings.AUTHENTICATION_BACKENDS"
) from e
except ImportError as e:
raise ImportError(
"Can not import backend class defined in settings.AUTHENTICATION_BACKENDS"
) from e
if issubclass(backend, AxesBackend):
found = True
break
if not found:
warnings.append(
Warning(
msg=Messages.BACKEND_INVALID,
hint=Hints.BACKEND_INVALID,
id=Codes.BACKEND_INVALID,
)
)
return warnings
@register(Tags.compatibility)
def axes_deprecation_check(app_configs, **kwargs): # pylint: disable=unused-argument
warnings = []
deprecated_settings = ["AXES_DISABLE_SUCCESS_ACCESS_LOG"]
for deprecated_setting in deprecated_settings:
try:
getattr(settings, deprecated_setting)
warnings.append(
Warning(
msg=Messages.SETTING_DEPRECATED.format(
deprecated_setting=deprecated_setting
),
hint=None,
id=Codes.SETTING_DEPRECATED,
)
)
except AttributeError:
pass
return warnings
|
{
"content_hash": "3d34305e22462be47d832fb8db90e8e5",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 123,
"avg_line_length": 32.00709219858156,
"alnum_prop": 0.6306226456902282,
"repo_name": "django-pci/django-axes",
"id": "fabc2c979cbcf0fb5847745c602e95eb8eb13070",
"size": "4513",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "axes/checks.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "5726"
},
{
"name": "Python",
"bytes": "41064"
}
],
"symlink_target": ""
}
|
f_in_trn = 'Data/images_train'
f_in_tst = 'Data/images_test'
f_in_sol = 'Data/train_solutions.csv'
f_in_flat_trn = 'Data/train_.csv'
f_in_flat_tst = 'Data/test_.csv'
f_out_trn = 'Data/train_32_deskew.csv'
f_out_tst = 'Data/test_32_deskew.csv'
f_out_subm = 'Submissions/ls_32_deskew.csv'
# Process images
from readData import readData
(Xtrn, Ytrn, Xtst) = readData(f_in_trn, f_in_tst, f_in_sol, augmenting=False)
from saveData import saveData
saveData((Xtrn, Xtst), (f_out_trn, f_out_tst), colfmt='%.18e')
# Load processed images from flat file, on disk
'''
from loadData import loadData
Xtrn = loadData(f_in_flat_trn, rowskip=0)
Xtst = loadData(f_in_flat_tst, rowskip=0)
tst = loadData(f_in_flat_tst, rowskip=0)
Ytrn = loadData(f_in_sol, rowskip=1)
'''
# Fit OLS
'''
from sklearn import linear_model
model = linear_model.LinearRegression()
model.fit(Xtrn, Ytrn[::, 1:])
Ytst = model.predict(Xtst)
'''
# Fit RF
'''
from sklearn.ensemble import RandomForestRegressor
model = RandomForestRegressor()
model.fit(Xtrn, Ytrn[::, 1:])
Ytst = model.predict(Xtst)
'''
# Fit Extra Trees (More random)
'''
seed = 0
from sklearn.ensemble import ExtraTreesRegressor
# n_estimators=10 : 0.14191 [sqrt] [32]
# n_estimators=10 : 0.14185 [sqrt] [32c]
# n_estimators=10 : 0.13998 [sqrt] [64]
# n_estimators=50 : 0.12859 [None] [64c]
# n_estimators=50 : 0.13590 [sqrt]
# n_estimators=50 : 0.13081 [None]
# n_estimators=500 : 0.12954 [None]
# n_estimators=500 : 0.12935 [None]
model = ExtraTreesRegressor(n_estimators=500, max_features=None,
random_state=seed, verbose=True,
oob_score=True, bootstrap=True,
n_jobs=1)
model.fit(Xtrn, Ytrn[::, 1:])
Ytst = model.predict(Xtst)
'''
# Fit gradient boosting
from sklearn.ensemble import GradientBoostingRegressor
model = GradientBoostingRegressor()
model.fit(Xtrn, Ytrn[::, 1:])
Ytst = model.predict(Xtst)
# Plot learning curve
'''
from plotLearningCurve import plotLearningCurve
plotLearningCurve(Xtrn, Ytrn, model)
'''
# Fit Ridge
'''
model = linear_model.RidgeCV(alphas = [0, 25, 50, 100])
model.fit(Xtrn, Ytrn[::, 1:])
my_alpha = model.alpha_
model = linear_model.Ridge(alpha = my_alpha)
model.fit(Xtrn, Ytrn[::, 1:])
Ytst = model.predict(Xtst)
'''
# Save submission to disk
from saveSubmission import *
saveSubmission(Ytst, f_in_tst, f_out_subm)
|
{
"content_hash": "b976960fe61d5f811a04566873e073c5",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 77,
"avg_line_length": 27.229885057471265,
"alnum_prop": 0.6817222456732799,
"repo_name": "mattdelhey/kaggle-galaxy",
"id": "13ecfb059cf21bf30eb7d1cafb658a7830d7a739",
"size": "2369",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "runExperiment.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "62279"
},
{
"name": "R",
"bytes": "1064"
},
{
"name": "TeX",
"bytes": "739"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function, unicode_literals
from botocore.exceptions import ClientError
from concurrent.futures import as_completed
from datetime import datetime
from c7n.actions import BaseAction, ModifyVpcSecurityGroupsAction
from c7n.filters.kms import KmsRelatedFilter
from c7n import query
from c7n.manager import resources
from c7n.tags import (
TagDelayedAction, RemoveTag, TagActionFilter, Tag, universal_augment)
from c7n.utils import (
local_session, chunks, type_schema, snapshot_identifier)
from c7n.filters.vpc import SecurityGroupFilter, SubnetFilter
@resources.register('dynamodb-table')
class Table(query.QueryResourceManager):
class resource_type(query.TypeInfo):
service = 'dynamodb'
arn_type = 'table'
enum_spec = ('list_tables', 'TableNames', None)
detail_spec = ("describe_table", "TableName", None, "Table")
id = 'TableName'
name = 'TableName'
date = 'CreationDateTime'
dimension = 'TableName'
config_type = 'AWS::DynamoDB::Table'
universal_taggable = object()
def get_source(self, source_type):
if source_type == 'describe':
return DescribeTable(self)
elif source_type == 'config':
return ConfigTable(self)
raise ValueError('invalid source %s' % source_type)
class ConfigTable(query.ConfigSource):
def load_resource(self, item):
resource = super(ConfigTable, self).load_resource(item)
resource['CreationDateTime'] = datetime.fromtimestamp(resource['CreationDateTime'] / 1000.0)
if 'LastUpdateToPayPerRequestDateTime' in resource['BillingModeSummary']:
resource['BillingModeSummary'][
'LastUpdateToPayPerRequestDateTime'] = datetime.fromtimestamp(
resource['BillingModeSummary']['LastUpdateToPayPerRequestDateTime'] / 1000.0)
sse_info = resource.pop('Ssedescription', None)
if sse_info is None:
return resource
resource['SSEDescription'] = sse_info
for k, r in (('KmsmasterKeyArn', 'KMSMasterKeyArn'),
('Ssetype', 'SSEType')):
if k in sse_info:
sse_info[r] = sse_info.pop(k)
return resource
class DescribeTable(query.DescribeSource):
def augment(self, resources):
return universal_augment(
self.manager,
super(DescribeTable, self).augment(resources))
class StatusFilter(object):
"""Filter tables by status"""
valid_states = ()
def filter_table_state(self, tables, states=None):
states = states or self.valid_states
orig_count = len(tables)
result = [t for t in tables if t['TableStatus'] in states]
self.log.info("%s %d of %d tables" % (
self.__class__.__name__, len(result), orig_count))
return result
def filter_backup_state(self, tables, states=None):
states = states or self.valid_states
orig_count = len(tables)
result = [t for t in tables if t['BackupStatus'] in states]
self.log.info("%s %d of %d tables" % (
self.__class__.__name__, len(result), orig_count))
return result
@Table.filter_registry.register('kms-key')
class KmsFilter(KmsRelatedFilter):
"""
Filter a resource by its associcated kms key and optionally the aliasname
of the kms key by using 'c7n:AliasName'
:example:
.. code-block:: yaml
policies:
- name: dynamodb-kms-key-filters
resource: dynamodb-table
filters:
- type: kms-key
key: c7n:AliasName
value: "^(alias/aws/dynamodb)"
op: regex
"""
RelatedIdsExpression = 'SSEDescription.KMSMasterKeyArn'
@Table.action_registry.register('delete')
class DeleteTable(BaseAction, StatusFilter):
"""Action to delete dynamodb tables
:example:
.. code-block:: yaml
policies:
- name: delete-empty-tables
resource: dynamodb-table
filters:
- TableSizeBytes: 0
actions:
- delete
"""
valid_status = ('ACTIVE',)
schema = type_schema('delete')
permissions = ("dynamodb:DeleteTable",)
def delete_table(self, client, table_set):
for t in table_set:
client.delete_table(TableName=t['TableName'])
def process(self, resources):
resources = self.filter_table_state(
resources, self.valid_status)
if not len(resources):
return
futures = []
client = local_session(self.manager.session_factory).client('dynamodb')
with self.executor_factory(max_workers=2) as w:
for table_set in chunks(resources, 20):
futures.append(w.submit(self.delete_table, client, table_set))
for f in as_completed(futures):
if f.exception():
self.log.error(
"Exception deleting dynamodb table set \n %s"
% (f.exception()))
@Table.action_registry.register('set-stream')
class SetStream(BaseAction, StatusFilter):
"""Action to enable/disable streams on table.
:example:
.. code-block:: yaml
policies:
- name: stream-update
resource: dynamodb-table
filters:
- TableName: 'test'
- TableStatus: 'ACTIVE'
actions:
- type: set-stream
state: True
stream_view_type: 'NEW_IMAGE'
"""
valid_status = ('ACTIVE',)
schema = type_schema('set-stream',
state={'type': 'boolean'},
stream_view_type={'type': 'string'})
permissions = ("dynamodb:UpdateTable",)
def process(self, tables):
tables = self.filter_table_state(
tables, self.valid_status)
if not len(tables):
self.log.warning("Table not in ACTIVE state.")
return
state = self.data.get('state')
type = self.data.get('stream_view_type')
stream_spec = {"StreamEnabled": state}
if self.data.get('stream_view_type') is not None:
stream_spec.update({"StreamViewType": type})
c = local_session(self.manager.session_factory).client('dynamodb')
with self.executor_factory(max_workers=2) as w:
futures = {w.submit(c.update_table,
TableName=t['TableName'],
StreamSpecification=stream_spec): t for t in tables}
for f in as_completed(futures):
t = futures[f]
if f.exception():
self.log.error(
"Exception updating dynamodb table set \n %s"
% (f.exception()))
continue
if self.data.get('stream_view_type') is not None:
stream_state = \
f.result()['TableDescription']['StreamSpecification']['StreamEnabled']
stream_type = \
f.result()['TableDescription']['StreamSpecification']['StreamViewType']
t['c7n:StreamState'] = stream_state
t['c7n:StreamType'] = stream_type
@Table.action_registry.register('backup')
class CreateBackup(BaseAction, StatusFilter):
"""Creates a manual backup of a DynamoDB table. Use of the optional
prefix flag will attach a user specified prefix. Otherwise,
the backup prefix will default to 'Backup'.
:example:
.. code-block:: yaml
policies:
- name: dynamodb-create-backup
resource: dynamodb-table
actions:
- type: backup
prefix: custom
"""
valid_status = ('ACTIVE',)
schema = type_schema('backup',
prefix={'type': 'string'})
permissions = ('dynamodb:CreateBackup',)
def process(self, resources):
resources = self.filter_table_state(
resources, self.valid_status)
if not len(resources):
return
c = local_session(self.manager.session_factory).client('dynamodb')
futures = {}
prefix = self.data.get('prefix', 'Backup')
with self.executor_factory(max_workers=2) as w:
for t in resources:
futures[w.submit(
c.create_backup,
BackupName=snapshot_identifier(
prefix, t['TableName']),
TableName=t['TableName'])] = t
for f in as_completed(futures):
t = futures[f]
if f.exception():
self.manager.log.warning(
"Could not complete DynamoDB backup table:%s", t)
arn = f.result()['BackupDetails']['BackupArn']
t['c7n:BackupArn'] = arn
@resources.register('dynamodb-backup')
class Backup(query.QueryResourceManager):
class resource_type(query.TypeInfo):
service = 'dynamodb'
arn = 'BackupArn'
enum_spec = ('list_backups', 'BackupSummaries', None)
id = 'BackupArn'
name = 'BackupName'
date = 'BackupCreationDateTime'
@Backup.action_registry.register('delete')
class DeleteBackup(BaseAction, StatusFilter):
"""Deletes backups of a DynamoDB table
:example:
.. code-block:: yaml
policies:
- name: dynamodb-delete-backup
resource: dynamodb-backup
filters:
- type: value
key: BackupCreationDateTime
op: greater-than
value_type: age
value: 28
actions:
- type: delete
"""
valid_status = ('AVAILABLE',)
schema = type_schema('delete')
permissions = ('dynamodb:DeleteBackup',)
def process(self, backups):
backups = self.filter_backup_state(
backups, self.valid_status)
if not len(backups):
return
c = local_session(self.manager.session_factory).client('dynamodb')
for table_set in chunks(backups, 20):
self.process_dynamodb_backups(table_set, c)
def process_dynamodb_backups(self, table_set, c):
for t in table_set:
try:
c.delete_backup(
BackupArn=t['BackupArn'])
except ClientError as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
self.log.warning("Could not complete DynamoDB backup deletion for table:%s", t)
continue
raise
@resources.register('dynamodb-stream')
class Stream(query.QueryResourceManager):
# Note stream management takes place on the table resource
class resource_type(query.TypeInfo):
service = 'dynamodbstreams'
permission_prefix = 'dynamodb'
# Note max rate of 5 calls per second
enum_spec = ('list_streams', 'Streams', None)
# Note max rate of 10 calls per second.
detail_spec = (
"describe_stream", "StreamArn", "StreamArn", "StreamDescription")
arn = id = 'StreamArn'
arn_type = 'stream'
name = 'TableName'
date = 'CreationDateTime'
dimension = 'TableName'
@resources.register('dax')
class DynamoDbAccelerator(query.QueryResourceManager):
class resource_type(query.TypeInfo):
service = 'dax'
arn_type = 'cluster'
enum_spec = ('describe_clusters', 'Clusters', None)
id = 'ClusterArn'
name = 'ClusterName'
config_type = 'AWS::DAX::Cluster'
permissions = ('dax:ListTags',)
def get_source(self, source_type):
if source_type == 'describe':
return DescribeDaxCluster(self)
elif source_type == 'config':
return query.ConfigSource(self)
raise ValueError('invalid source %s' % source_type)
def get_resources(self, ids, cache=True, augment=True):
"""Override in order to disable the augment for serverless policies.
list_tags on dax resources always fail until the cluster is finished creating.
"""
return super(DynamoDbAccelerator, self).get_resources(ids, cache, augment=False)
class DescribeDaxCluster(query.DescribeSource):
def get_resources(self, ids, cache=True):
"""Retrieve dax resources for serverless policies or related resources
"""
client = local_session(self.manager.session_factory).client('dax')
return client.describe_clusters(ClusterNames=ids).get('Clusters')
def augment(self, clusters):
resources = super(DescribeDaxCluster, self).augment(clusters)
return list(filter(None, _dax_cluster_tags(
resources,
self.manager.session_factory,
self.manager.retry,
self.manager.log)))
def _dax_cluster_tags(tables, session_factory, retry, log):
client = local_session(session_factory).client('dax')
def process_tags(r):
try:
r['Tags'] = retry(
client.list_tags, ResourceName=r['ClusterArn'])['Tags']
return r
except (client.exceptions.ClusterNotFoundFault,
client.exceptions.InvalidClusterStateFault):
return None
return filter(None, list(map(process_tags, tables)))
DynamoDbAccelerator.filter_registry.register('marked-for-op', TagActionFilter)
@DynamoDbAccelerator.filter_registry.register('security-group')
class DaxSecurityGroupFilter(SecurityGroupFilter):
RelatedIdsExpression = "SecurityGroups[].SecurityGroupIdentifier"
@DynamoDbAccelerator.action_registry.register('tag')
class DaxTagging(Tag):
"""Action to create tag(s) on a resource
:example:
.. code-block:: yaml
policies:
- name: dax-cluster-tag
resource: dax
filters:
- "tag:target-tag": absent
actions:
- type: tag
key: target-tag
value: target-tag-value
"""
permissions = ('dax:TagResource',)
def process_resource_set(self, client, resources, tags):
mid = self.manager.resource_type.id
for r in resources:
try:
client.tag_resource(ResourceName=r[mid], Tags=tags)
except (client.exceptions.ClusterNotFoundFault,
client.exceptions.InvalidARNFault,
client.exceptions.InvalidClusterStateFault) as e:
self.log.warning('Exception tagging %s: \n%s', r['ClusterName'], e)
@DynamoDbAccelerator.action_registry.register('remove-tag')
class DaxRemoveTagging(RemoveTag):
"""Action to remove tag(s) on a resource
:example:
.. code-block:: yaml
policies:
- name: dax-remove-tag
resource: dax
filters:
- "tag:OutdatedTag": present
actions:
- type: remove-tag
tags: ["OutdatedTag"]
"""
permissions = ('dax:UntagResource',)
def process_resource_set(self, client, resources, tag_keys):
for r in resources:
try:
client.untag_resource(
ResourceName=r['ClusterArn'], TagKeys=tag_keys)
except (client.exceptions.ClusterNotFoundFault,
client.exceptions.TagNotFoundFault,
client.exceptions.InvalidClusterStateFault) as e:
self.log.warning('Exception removing tags on %s: \n%s', r['ClusterName'], e)
@DynamoDbAccelerator.action_registry.register('mark-for-op')
class DaxMarkForOp(TagDelayedAction):
"""Action to specify an action to occur at a later date
:example:
.. code-block:: yaml
policies:
- name: dax-mark-tag-compliance
resource: dax
filters:
- "tag:custodian_cleanup": absent
- "tag:OwnerName": absent
actions:
- type: mark-for-op
tag: custodian_cleanup
msg: "Missing tag 'OwnerName': {op}@{action_date}"
op: delete
days: 7
"""
@DynamoDbAccelerator.action_registry.register('delete')
class DaxDeleteCluster(BaseAction):
"""Action to delete a DAX cluster
:example:
.. code-block:: yaml
policies:
- name: dax-delete-cluster
resource: dax
filters:
- "tag:DeleteMe": present
actions:
- type: delete
"""
permissions = ('dax:DeleteCluster',)
schema = type_schema('delete')
def process(self, resources):
client = local_session(self.manager.session_factory).client('dax')
for r in resources:
try:
client.delete_cluster(ClusterName=r['ClusterName'])
except (client.exceptions.ClusterNotFoundFault,
client.exceptions.InvalidARNFault,
client.exceptions.InvalidClusterStateFault) as e:
self.log.warning('Exception marking %s: \n%s', r['ClusterName'], e)
@DynamoDbAccelerator.action_registry.register('update-cluster')
class DaxUpdateCluster(BaseAction):
"""Updates a DAX cluster configuration
:example:
.. code-block:: yaml
policies:
- name: dax-update-cluster
resource: dax
filters:
- ParameterGroup.ParameterGroupName: 'default.dax1.0'
actions:
- type: update-cluster
ParameterGroupName: 'testparamgroup'
"""
schema = {
'type': 'object',
'additionalProperties': False,
'properties': {
'type': {'enum': ['update-cluster']},
'Description': {'type': 'string'},
'PreferredMaintenanceWindow': {'type': 'string'},
'NotificationTopicArn': {'type': 'string'},
'NotificationTopicStatus': {'type': 'string'},
'ParameterGroupName': {'type': 'string'}
}
}
permissions = ('dax:UpdateCluster',)
def process(self, resources):
client = local_session(self.manager.session_factory).client('dax')
params = dict(self.data)
params.pop('type')
for r in resources:
params['ClusterName'] = r['ClusterName']
try:
client.update_cluster(**params)
except (client.exceptions.ClusterNotFoundFault,
client.exceptions.InvalidClusterStateFault) as e:
self.log.warning(
'Exception updating dax cluster %s: \n%s',
r['ClusterName'], e)
@DynamoDbAccelerator.action_registry.register('modify-security-groups')
class DaxModifySecurityGroup(ModifyVpcSecurityGroupsAction):
permissions = ('dax:UpdateCluster',)
def process(self, resources):
client = local_session(self.manager.session_factory).client('dax')
groups = super(DaxModifySecurityGroup, self).get_groups(resources)
for idx, r in enumerate(resources):
client.update_cluster(
ClusterName=r['ClusterName'], SecurityGroupIds=groups[idx])
@DynamoDbAccelerator.filter_registry.register('subnet')
class DaxSubnetFilter(SubnetFilter):
"""Filters DAX clusters based on their associated subnet group
:example:
.. code-block:: yaml
policies:
- name: dax-no-auto-public
resource: dax
filters:
- type: subnet
key: MapPublicIpOnLaunch
value: False
"""
RelatedIdsExpression = ""
def get_related_ids(self, resources):
group_ids = set()
for r in resources:
group_ids.update(
[s['SubnetIdentifier'] for s in
self.groups[r['SubnetGroup']]['Subnets']])
return group_ids
def process(self, resources, event=None):
client = local_session(self.manager.session_factory).client('dax')
subnet_groups = client.describe_subnet_groups()['SubnetGroups']
self.groups = {s['SubnetGroupName']: s for s in subnet_groups}
return super(DaxSubnetFilter, self).process(resources)
|
{
"content_hash": "54dd29daa06a81681f25224a91d4f4e5",
"timestamp": "",
"source": "github",
"line_count": 628,
"max_line_length": 100,
"avg_line_length": 32.68312101910828,
"alnum_prop": 0.5811449451887941,
"repo_name": "kapilt/cloud-custodian",
"id": "bf3aa1437130c29473dccd0742680589c2b0d92c",
"size": "21115",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "c7n/resources/dynamodb.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "8163"
},
{
"name": "Go",
"bytes": "146630"
},
{
"name": "HTML",
"bytes": "31"
},
{
"name": "Makefile",
"bytes": "9971"
},
{
"name": "PowerShell",
"bytes": "1804"
},
{
"name": "Python",
"bytes": "5354902"
},
{
"name": "Shell",
"bytes": "13032"
},
{
"name": "Smarty",
"bytes": "359"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import frappe
@frappe.whitelist()
def get_app_list():
out = {}
installed = frappe.get_installed_apps()
for app in frappe.get_all_apps(True):
app_hooks = frappe.get_hooks(app_name=app)
if app_hooks.get('hide_in_installer'):
continue
out[app] = {}
for key in ("app_name", "app_title", "app_description", "app_icon",
"app_publisher", "app_version", "app_url", "app_color"):
val = app_hooks.get(key) or []
out[app][key] = val[0] if len(val) else ""
if app in installed:
out[app]["installed"] = 1
return out
|
{
"content_hash": "83573bd376f69139dc069546a2054edd",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 69,
"avg_line_length": 25.391304347826086,
"alnum_prop": 0.636986301369863,
"repo_name": "gangadhar-kadam/hrfrappe",
"id": "ade393014cf10090f3c35b3602de69447b9dc761",
"size": "688",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "frappe/core/page/applications/applications.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "105798"
},
{
"name": "JavaScript",
"bytes": "1458963"
},
{
"name": "Python",
"bytes": "714974"
}
],
"symlink_target": ""
}
|
from cursive import exception as cursive_exception
from cursive import signature_utils
import glance_store
import mock
from glance.common import exception
import glance.location
from glance.tests.unit import base as unit_test_base
from glance.tests.unit import utils as unit_test_utils
from glance.tests import utils
BASE_URI = 'http://storeurl.com/container'
UUID1 = 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d'
UUID2 = '971ec09a-8067-4bc8-a91f-ae3557f1c4c7'
USER1 = '54492ba0-f4df-4e4e-be62-27f4d76b29cf'
TENANT1 = '6838eb7b-6ded-434a-882c-b344c77fe8df'
TENANT2 = '2c014f32-55eb-467d-8fcb-4bd706012f81'
TENANT3 = '228c6da5-29cd-4d67-9457-ed632e083fc0'
class ImageRepoStub(object):
def add(self, image):
return image
def save(self, image, from_state=None):
return image
class ImageStub(object):
def __init__(self, image_id, status=None, locations=None,
visibility=None, extra_properties=None):
self.image_id = image_id
self.status = status
self.locations = locations or []
self.visibility = visibility
self.size = 1
self.extra_properties = extra_properties or {}
def delete(self):
self.status = 'deleted'
def get_member_repo(self):
return FakeMemberRepo(self, [TENANT1, TENANT2])
class ImageFactoryStub(object):
def new_image(self, image_id=None, name=None, visibility='private',
min_disk=0, min_ram=0, protected=False, owner=None,
disk_format=None, container_format=None,
extra_properties=None, tags=None, **other_args):
return ImageStub(image_id, visibility=visibility,
extra_properties=extra_properties, **other_args)
class FakeMemberRepo(object):
def __init__(self, image, tenants=None):
self.image = image
self.factory = glance.domain.ImageMemberFactory()
self.tenants = tenants or []
def list(self, *args, **kwargs):
return [self.factory.new_image_member(self.image, tenant)
for tenant in self.tenants]
def add(self, member):
self.tenants.append(member.member_id)
def remove(self, member):
self.tenants.remove(member.member_id)
class TestStoreImage(utils.BaseTestCase):
def setUp(self):
locations = [{'url': '%s/%s' % (BASE_URI, UUID1),
'metadata': {}, 'status': 'active'}]
self.image_stub = ImageStub(UUID1, 'active', locations)
self.store_api = unit_test_utils.FakeStoreAPI()
self.store_utils = unit_test_utils.FakeStoreUtils(self.store_api)
super(TestStoreImage, self).setUp()
def test_image_delete(self):
image = glance.location.ImageProxy(self.image_stub, {},
self.store_api, self.store_utils)
location = image.locations[0]
self.assertEqual('active', image.status)
self.store_api.get_from_backend(location['url'], context={})
image.delete()
self.assertEqual('deleted', image.status)
self.assertRaises(glance_store.NotFound,
self.store_api.get_from_backend, location['url'], {})
def test_image_get_data(self):
image = glance.location.ImageProxy(self.image_stub, {},
self.store_api, self.store_utils)
self.assertEqual('XXX', image.get_data())
def test_image_get_data_from_second_location(self):
def fake_get_from_backend(self, location, offset=0,
chunk_size=None, context=None):
if UUID1 in location:
raise Exception('not allow download from %s' % location)
else:
return self.data[location]
image1 = glance.location.ImageProxy(self.image_stub, {},
self.store_api, self.store_utils)
self.assertEqual('XXX', image1.get_data())
# Multiple location support
context = glance.context.RequestContext(user=USER1)
(image2, image_stub2) = self._add_image(context, UUID2, 'ZZZ', 3)
location_data = image2.locations[0]
image1.locations.append(location_data)
self.assertEqual(2, len(image1.locations))
self.assertEqual(UUID2, location_data['url'])
self.stubs.Set(unit_test_utils.FakeStoreAPI, 'get_from_backend',
fake_get_from_backend)
# This time, image1.get_data() returns the data wrapped in a
# LimitingReader|CooperativeReader pipeline, so peeking under
# the hood of those objects to get at the underlying string.
self.assertEqual('ZZZ', image1.get_data().data.fd)
image1.locations.pop(0)
self.assertEqual(1, len(image1.locations))
image2.delete()
def test_image_set_data(self):
context = glance.context.RequestContext(user=USER1)
image_stub = ImageStub(UUID2, status='queued', locations=[])
image = glance.location.ImageProxy(image_stub, context,
self.store_api, self.store_utils)
image.set_data('YYYY', 4)
self.assertEqual(4, image.size)
# NOTE(markwash): FakeStore returns image_id for location
self.assertEqual(UUID2, image.locations[0]['url'])
self.assertEqual('Z', image.checksum)
self.assertEqual('active', image.status)
def test_image_set_data_location_metadata(self):
context = glance.context.RequestContext(user=USER1)
image_stub = ImageStub(UUID2, status='queued', locations=[])
loc_meta = {'key': 'value5032'}
store_api = unit_test_utils.FakeStoreAPI(store_metadata=loc_meta)
store_utils = unit_test_utils.FakeStoreUtils(store_api)
image = glance.location.ImageProxy(image_stub, context,
store_api, store_utils)
image.set_data('YYYY', 4)
self.assertEqual(4, image.size)
location_data = image.locations[0]
self.assertEqual(UUID2, location_data['url'])
self.assertEqual(loc_meta, location_data['metadata'])
self.assertEqual('Z', image.checksum)
self.assertEqual('active', image.status)
image.delete()
self.assertEqual(image.status, 'deleted')
self.assertRaises(glance_store.NotFound,
self.store_api.get_from_backend,
image.locations[0]['url'], {})
def test_image_set_data_unknown_size(self):
context = glance.context.RequestContext(user=USER1)
image_stub = ImageStub(UUID2, status='queued', locations=[])
image = glance.location.ImageProxy(image_stub, context,
self.store_api, self.store_utils)
image.set_data('YYYY', None)
self.assertEqual(4, image.size)
# NOTE(markwash): FakeStore returns image_id for location
self.assertEqual(UUID2, image.locations[0]['url'])
self.assertEqual('Z', image.checksum)
self.assertEqual('active', image.status)
image.delete()
self.assertEqual(image.status, 'deleted')
self.assertRaises(glance_store.NotFound,
self.store_api.get_from_backend,
image.locations[0]['url'], context={})
@mock.patch('glance.location.LOG')
def test_image_set_data_valid_signature(self, mock_log):
context = glance.context.RequestContext(user=USER1)
extra_properties = {
'img_signature_certificate_uuid': 'UUID',
'img_signature_hash_method': 'METHOD',
'img_signature_key_type': 'TYPE',
'img_signature': 'VALID'
}
image_stub = ImageStub(UUID2, status='queued',
extra_properties=extra_properties)
self.stubs.Set(signature_utils, 'get_verifier',
unit_test_utils.fake_get_verifier)
image = glance.location.ImageProxy(image_stub, context,
self.store_api, self.store_utils)
image.set_data('YYYY', 4)
self.assertEqual('active', image.status)
mock_log.info.assert_called_once_with(
u'Successfully verified signature for image %s',
UUID2)
def test_image_set_data_invalid_signature(self):
context = glance.context.RequestContext(user=USER1)
extra_properties = {
'img_signature_certificate_uuid': 'UUID',
'img_signature_hash_method': 'METHOD',
'img_signature_key_type': 'TYPE',
'img_signature': 'INVALID'
}
image_stub = ImageStub(UUID2, status='queued',
extra_properties=extra_properties)
self.stubs.Set(signature_utils, 'get_verifier',
unit_test_utils.fake_get_verifier)
image = glance.location.ImageProxy(image_stub, context,
self.store_api, self.store_utils)
self.assertRaises(cursive_exception.SignatureVerificationError,
image.set_data,
'YYYY', 4)
def test_image_set_data_invalid_signature_missing_metadata(self):
context = glance.context.RequestContext(user=USER1)
extra_properties = {
'img_signature_hash_method': 'METHOD',
'img_signature_key_type': 'TYPE',
'img_signature': 'INVALID'
}
image_stub = ImageStub(UUID2, status='queued',
extra_properties=extra_properties)
self.stubs.Set(signature_utils, 'get_verifier',
unit_test_utils.fake_get_verifier)
image = glance.location.ImageProxy(image_stub, context,
self.store_api, self.store_utils)
image.set_data('YYYY', 4)
self.assertEqual(UUID2, image.locations[0]['url'])
self.assertEqual('Z', image.checksum)
# Image is still active, since invalid signature was ignored
self.assertEqual('active', image.status)
def _add_image(self, context, image_id, data, len):
image_stub = ImageStub(image_id, status='queued', locations=[])
image = glance.location.ImageProxy(image_stub, context,
self.store_api, self.store_utils)
image.set_data(data, len)
self.assertEqual(len, image.size)
# NOTE(markwash): FakeStore returns image_id for location
location = {'url': image_id, 'metadata': {}, 'status': 'active'}
self.assertEqual([location], image.locations)
self.assertEqual([location], image_stub.locations)
self.assertEqual('active', image.status)
return (image, image_stub)
def test_image_change_append_invalid_location_uri(self):
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
location_bad = {'url': 'unknown://location', 'metadata': {}}
self.assertRaises(exception.BadStoreUri,
image1.locations.append, location_bad)
image1.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
def test_image_change_append_invalid_location_metatdata(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
# Using only one test rule here is enough to make sure
# 'store.check_location_metadata()' can be triggered
# in Location proxy layer. Complete test rule for
# 'store.check_location_metadata()' testing please
# check below cases within 'TestStoreMetaDataChecker'.
location_bad = {'url': UUID3, 'metadata': b"a invalid metadata"}
self.assertRaises(glance_store.BackendException,
image1.locations.append, location_bad)
image1.delete()
image2.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
def test_image_change_append_locations(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
location2 = {'url': UUID2, 'metadata': {}, 'status': 'active'}
location3 = {'url': UUID3, 'metadata': {}, 'status': 'active'}
image1.locations.append(location3)
self.assertEqual([location2, location3], image_stub1.locations)
self.assertEqual([location2, location3], image1.locations)
image1.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
image2.delete()
def test_image_change_pop_location(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
location2 = {'url': UUID2, 'metadata': {}, 'status': 'active'}
location3 = {'url': UUID3, 'metadata': {}, 'status': 'active'}
image1.locations.append(location3)
self.assertEqual([location2, location3], image_stub1.locations)
self.assertEqual([location2, location3], image1.locations)
image1.locations.pop()
self.assertEqual([location2], image_stub1.locations)
self.assertEqual([location2], image1.locations)
image1.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
image2.delete()
def test_image_change_extend_invalid_locations_uri(self):
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
location_bad = {'url': 'unknown://location', 'metadata': {}}
self.assertRaises(exception.BadStoreUri,
image1.locations.extend, [location_bad])
image1.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
def test_image_change_extend_invalid_locations_metadata(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
location_bad = {'url': UUID3, 'metadata': b"a invalid metadata"}
self.assertRaises(glance_store.BackendException,
image1.locations.extend, [location_bad])
image1.delete()
image2.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
def test_image_change_extend_locations(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
location2 = {'url': UUID2, 'metadata': {}, 'status': 'active'}
location3 = {'url': UUID3, 'metadata': {}, 'status': 'active'}
image1.locations.extend([location3])
self.assertEqual([location2, location3], image_stub1.locations)
self.assertEqual([location2, location3], image1.locations)
image1.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
image2.delete()
def test_image_change_remove_location(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
location2 = {'url': UUID2, 'metadata': {}, 'status': 'active'}
location3 = {'url': UUID3, 'metadata': {}, 'status': 'active'}
location_bad = {'url': 'unknown://location', 'metadata': {}}
image1.locations.extend([location3])
image1.locations.remove(location2)
self.assertEqual([location3], image_stub1.locations)
self.assertEqual([location3], image1.locations)
self.assertRaises(ValueError,
image1.locations.remove, location_bad)
image1.delete()
image2.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
def test_image_change_delete_location(self):
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
del image1.locations[0]
self.assertEqual([], image_stub1.locations)
self.assertEqual(0, len(image1.locations))
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
image1.delete()
def test_image_change_insert_invalid_location_uri(self):
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
location_bad = {'url': 'unknown://location', 'metadata': {}}
self.assertRaises(exception.BadStoreUri,
image1.locations.insert, 0, location_bad)
image1.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
def test_image_change_insert_invalid_location_metadata(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
location_bad = {'url': UUID3, 'metadata': b"a invalid metadata"}
self.assertRaises(glance_store.BackendException,
image1.locations.insert, 0, location_bad)
image1.delete()
image2.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
def test_image_change_insert_location(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
location2 = {'url': UUID2, 'metadata': {}, 'status': 'active'}
location3 = {'url': UUID3, 'metadata': {}, 'status': 'active'}
image1.locations.insert(0, location3)
self.assertEqual([location3, location2], image_stub1.locations)
self.assertEqual([location3, location2], image1.locations)
image1.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
image2.delete()
def test_image_change_delete_locations(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
location2 = {'url': UUID2, 'metadata': {}}
location3 = {'url': UUID3, 'metadata': {}}
image1.locations.insert(0, location3)
del image1.locations[0:100]
self.assertEqual([], image_stub1.locations)
self.assertEqual(0, len(image1.locations))
self.assertRaises(exception.BadStoreUri,
image1.locations.insert, 0, location2)
self.assertRaises(exception.BadStoreUri,
image2.locations.insert, 0, location3)
image1.delete()
image2.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
def test_image_change_adding_invalid_location_uri(self):
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
image_stub1 = ImageStub('fake_image_id', status='queued', locations=[])
image1 = glance.location.ImageProxy(image_stub1, context,
self.store_api, self.store_utils)
location_bad = {'url': 'unknown://location', 'metadata': {}}
self.assertRaises(exception.BadStoreUri,
image1.locations.__iadd__, [location_bad])
self.assertEqual([], image_stub1.locations)
self.assertEqual([], image1.locations)
image1.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
def test_image_change_adding_invalid_location_metadata(self):
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
image_stub2 = ImageStub('fake_image_id', status='queued', locations=[])
image2 = glance.location.ImageProxy(image_stub2, context,
self.store_api, self.store_utils)
location_bad = {'url': UUID2, 'metadata': b"a invalid metadata"}
self.assertRaises(glance_store.BackendException,
image2.locations.__iadd__, [location_bad])
self.assertEqual([], image_stub2.locations)
self.assertEqual([], image2.locations)
image1.delete()
image2.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
def test_image_change_adding_locations(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
image_stub3 = ImageStub('fake_image_id', status='queued', locations=[])
image3 = glance.location.ImageProxy(image_stub3, context,
self.store_api, self.store_utils)
location2 = {'url': UUID2, 'metadata': {}}
location3 = {'url': UUID3, 'metadata': {}}
image3.locations += [location2, location3]
self.assertEqual([location2, location3], image_stub3.locations)
self.assertEqual([location2, location3], image3.locations)
image3.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
image1.delete()
image2.delete()
def test_image_get_location_index(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
image_stub3 = ImageStub('fake_image_id', status='queued', locations=[])
image3 = glance.location.ImageProxy(image_stub3, context,
self.store_api, self.store_utils)
location2 = {'url': UUID2, 'metadata': {}}
location3 = {'url': UUID3, 'metadata': {}}
image3.locations += [location2, location3]
self.assertEqual(1, image_stub3.locations.index(location3))
image3.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
image1.delete()
image2.delete()
def test_image_get_location_by_index(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
image_stub3 = ImageStub('fake_image_id', status='queued', locations=[])
image3 = glance.location.ImageProxy(image_stub3, context,
self.store_api, self.store_utils)
location2 = {'url': UUID2, 'metadata': {}}
location3 = {'url': UUID3, 'metadata': {}}
image3.locations += [location2, location3]
self.assertEqual(1, image_stub3.locations.index(location3))
self.assertEqual(location2, image_stub3.locations[0])
image3.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
image1.delete()
image2.delete()
def test_image_checking_location_exists(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
image_stub3 = ImageStub('fake_image_id', status='queued', locations=[])
image3 = glance.location.ImageProxy(image_stub3, context,
self.store_api, self.store_utils)
location2 = {'url': UUID2, 'metadata': {}}
location3 = {'url': UUID3, 'metadata': {}}
location_bad = {'url': 'unknown://location', 'metadata': {}}
image3.locations += [location2, location3]
self.assertIn(location3, image_stub3.locations)
self.assertNotIn(location_bad, image_stub3.locations)
image3.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
image1.delete()
image2.delete()
def test_image_reverse_locations_order(self):
UUID3 = 'a8a61ec4-d7a3-11e2-8c28-000c29c27581'
self.assertEqual(2, len(self.store_api.data.keys()))
context = glance.context.RequestContext(user=USER1)
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
location2 = {'url': UUID2, 'metadata': {}}
location3 = {'url': UUID3, 'metadata': {}}
image_stub3 = ImageStub('fake_image_id', status='queued', locations=[])
image3 = glance.location.ImageProxy(image_stub3, context,
self.store_api, self.store_utils)
image3.locations += [location2, location3]
image_stub3.locations.reverse()
self.assertEqual([location3, location2], image_stub3.locations)
self.assertEqual([location3, location2], image3.locations)
image3.delete()
self.assertEqual(2, len(self.store_api.data.keys()))
self.assertNotIn(UUID2, self.store_api.data.keys())
self.assertNotIn(UUID3, self.store_api.data.keys())
image1.delete()
image2.delete()
class TestStoreImageRepo(utils.BaseTestCase):
def setUp(self):
super(TestStoreImageRepo, self).setUp()
self.store_api = unit_test_utils.FakeStoreAPI()
store_utils = unit_test_utils.FakeStoreUtils(self.store_api)
self.image_stub = ImageStub(UUID1)
self.image = glance.location.ImageProxy(self.image_stub, {},
self.store_api, store_utils)
self.image_repo_stub = ImageRepoStub()
self.image_repo = glance.location.ImageRepoProxy(self.image_repo_stub,
{}, self.store_api,
store_utils)
patcher = mock.patch("glance.location._get_member_repo_for_store",
self.get_fake_member_repo)
patcher.start()
self.addCleanup(patcher.stop)
self.fake_member_repo = FakeMemberRepo(self.image, [TENANT1, TENANT2])
self.image_member_repo = glance.location.ImageMemberRepoProxy(
self.fake_member_repo,
self.image,
{}, self.store_api)
def get_fake_member_repo(self, image, context, db_api, store_api):
return FakeMemberRepo(self.image, [TENANT1, TENANT2])
def test_add_updates_acls(self):
self.image_stub.locations = [{'url': 'foo', 'metadata': {},
'status': 'active'},
{'url': 'bar', 'metadata': {},
'status': 'active'}]
self.image_stub.visibility = 'public'
self.image_repo.add(self.image)
self.assertTrue(self.store_api.acls['foo']['public'])
self.assertEqual([], self.store_api.acls['foo']['read'])
self.assertEqual([], self.store_api.acls['foo']['write'])
self.assertTrue(self.store_api.acls['bar']['public'])
self.assertEqual([], self.store_api.acls['bar']['read'])
self.assertEqual([], self.store_api.acls['bar']['write'])
def test_add_ignores_acls_if_no_locations(self):
self.image_stub.locations = []
self.image_stub.visibility = 'public'
self.image_repo.add(self.image)
self.assertEqual(0, len(self.store_api.acls))
def test_save_updates_acls(self):
self.image_stub.locations = [{'url': 'foo', 'metadata': {},
'status': 'active'}]
self.image_repo.save(self.image)
self.assertIn('foo', self.store_api.acls)
def test_add_fetches_members_if_private(self):
self.image_stub.locations = [{'url': 'glue', 'metadata': {},
'status': 'active'}]
self.image_stub.visibility = 'private'
self.image_repo.add(self.image)
self.assertIn('glue', self.store_api.acls)
acls = self.store_api.acls['glue']
self.assertFalse(acls['public'])
self.assertEqual([], acls['write'])
self.assertEqual([TENANT1, TENANT2], acls['read'])
def test_save_fetches_members_if_private(self):
self.image_stub.locations = [{'url': 'glue', 'metadata': {},
'status': 'active'}]
self.image_stub.visibility = 'private'
self.image_repo.save(self.image)
self.assertIn('glue', self.store_api.acls)
acls = self.store_api.acls['glue']
self.assertFalse(acls['public'])
self.assertEqual([], acls['write'])
self.assertEqual([TENANT1, TENANT2], acls['read'])
def test_member_addition_updates_acls(self):
self.image_stub.locations = [{'url': 'glug', 'metadata': {},
'status': 'active'}]
self.image_stub.visibility = 'private'
membership = glance.domain.ImageMembership(
UUID1, TENANT3, None, None, status='accepted')
self.image_member_repo.add(membership)
self.assertIn('glug', self.store_api.acls)
acls = self.store_api.acls['glug']
self.assertFalse(acls['public'])
self.assertEqual([], acls['write'])
self.assertEqual([TENANT1, TENANT2, TENANT3], acls['read'])
def test_member_removal_updates_acls(self):
self.image_stub.locations = [{'url': 'glug', 'metadata': {},
'status': 'active'}]
self.image_stub.visibility = 'private'
membership = glance.domain.ImageMembership(
UUID1, TENANT1, None, None, status='accepted')
self.image_member_repo.remove(membership)
self.assertIn('glug', self.store_api.acls)
acls = self.store_api.acls['glug']
self.assertFalse(acls['public'])
self.assertEqual([], acls['write'])
self.assertEqual([TENANT2], acls['read'])
class TestImageFactory(unit_test_base.StoreClearingUnitTest):
def setUp(self):
super(TestImageFactory, self).setUp()
store_api = unit_test_utils.FakeStoreAPI()
store_utils = unit_test_utils.FakeStoreUtils(store_api)
self.image_factory = glance.location.ImageFactoryProxy(
ImageFactoryStub(),
glance.context.RequestContext(user=USER1),
store_api,
store_utils)
def test_new_image(self):
image = self.image_factory.new_image()
self.assertIsNone(image.image_id)
self.assertIsNone(image.status)
self.assertEqual('private', image.visibility)
self.assertEqual([], image.locations)
def test_new_image_with_location(self):
locations = [{'url': '%s/%s' % (BASE_URI, UUID1),
'metadata': {}}]
image = self.image_factory.new_image(locations=locations)
self.assertEqual(locations, image.locations)
location_bad = {'url': 'unknown://location', 'metadata': {}}
self.assertRaises(exception.BadStoreUri,
self.image_factory.new_image,
locations=[location_bad])
class TestStoreMetaDataChecker(utils.BaseTestCase):
def test_empty(self):
glance_store.check_location_metadata({})
def test_unicode(self):
m = {'key': u'somevalue'}
glance_store.check_location_metadata(m)
def test_unicode_list(self):
m = {'key': [u'somevalue', u'2']}
glance_store.check_location_metadata(m)
def test_unicode_dict(self):
inner = {'key1': u'somevalue', 'key2': u'somevalue'}
m = {'topkey': inner}
glance_store.check_location_metadata(m)
def test_unicode_dict_list(self):
inner = {'key1': u'somevalue', 'key2': u'somevalue'}
m = {'topkey': inner, 'list': [u'somevalue', u'2'], 'u': u'2'}
glance_store.check_location_metadata(m)
def test_nested_dict(self):
inner = {'key1': u'somevalue', 'key2': u'somevalue'}
inner = {'newkey': inner}
inner = {'anotherkey': inner}
m = {'topkey': inner}
glance_store.check_location_metadata(m)
def test_simple_bad(self):
m = {'key1': object()}
self.assertRaises(glance_store.BackendException,
glance_store.check_location_metadata,
m)
def test_list_bad(self):
m = {'key1': [u'somevalue', object()]}
self.assertRaises(glance_store.BackendException,
glance_store.check_location_metadata,
m)
def test_nested_dict_bad(self):
inner = {'key1': u'somevalue', 'key2': object()}
inner = {'newkey': inner}
inner = {'anotherkey': inner}
m = {'topkey': inner}
self.assertRaises(glance_store.BackendException,
glance_store.check_location_metadata,
m)
|
{
"content_hash": "204cf81917543a3c79b49056ac7062a8",
"timestamp": "",
"source": "github",
"line_count": 901,
"max_line_length": 79,
"avg_line_length": 41.490566037735846,
"alnum_prop": 0.6053553754380333,
"repo_name": "rajalokan/glance",
"id": "4fabf4817f04151bdb839643e203941987186ee3",
"size": "38018",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "glance/tests/unit/test_store_image.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "673"
},
{
"name": "Mako",
"bytes": "431"
},
{
"name": "Python",
"bytes": "3840692"
}
],
"symlink_target": ""
}
|
"""
SARSA Agent
This file builds upon the same functions as Q-learning agent (qlearning.py).
Here's usage example:
from sarsa import SarsaAgent
agent = SarsaAgent(
alpha=0.1,epsilon=0.25,discount=0.99,
getLegalActions = lambda s: actions_from_that_state)
action = agent.getAction(state)
agent.update(state, action, next_state, reward)
agent.epsilon *= 0.99
"""
import random
import numpy as np
from collections import defaultdict
class SarsaAgent(object):
"""
Classical SARSA agent.
The two main methods are
- self.getAction(state) - returns agent's action in that state
- self.update(state,action,reward,nextState,nextAction) - returns agent's next action
Instance variables you have access to
- self.epsilon (exploration prob)
- self.alpha (learning rate)
- self.discount (discount rate aka gamma)
"""
def __init__(self, alpha, epsilon, discount, getLegalActions):
"We initialize agent and Q-values here."
self.getLegalActions = getLegalActions
self._qValues = defaultdict(lambda: defaultdict(lambda: 0))
self.alpha = alpha
self.epsilon = epsilon
self.discount = discount
def getQValue(self, state, action):
"""
Returns Q(state,action)
"""
return self._qValues[state][action]
def setQValue(self, state, action, value):
"""
Sets the Qvalue for [state,action] to the given value
"""
self._qValues[state][action] = value
def getPolicy(self, state):
"""
Compute the best action to take in a state.
"""
possibleActions = self.getLegalActions(state)
# If there are no legal actions, return None
if len(possibleActions) == 0:
return None
"*** this code works exactly as Q-learning ***"
best_action = possibleActions[
np.argmax([self.getQValue(state, a) for a in possibleActions])]
return best_action
def getAction(self, state):
"""
Compute the action to take in the current state, including exploration.
"""
# Pick Action
possibleActions = self.getLegalActions(state)
action = None
# If there are no legal actions, return None
if len(possibleActions) == 0:
return None
# agent parameters:
epsilon = self.epsilon
"*** Epsilon-greedy strategy exactly as Q-learning ***"
if np.random.random() <= epsilon:
action = random.choice(possibleActions)
else:
action = self.getPolicy(state)
return action
def update(self, state, action, nextState, nextAction, reward):
"""
You should do your Q-Value update here
"""
# agent parameters
gamma = self.discount
learning_rate = self.alpha
"*** YOUR CODE HERE ***"
reference_qvalue = reward + gamma * self.getQValue(nextState, nextAction)
updated_qvalue = (1 - learning_rate) * self.getQValue(state, action) + \
learning_rate * reference_qvalue
self.setQValue(state, action, updated_qvalue)
|
{
"content_hash": "1d531253cca230d03781dcaf4639e0f9",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 91,
"avg_line_length": 29.824074074074073,
"alnum_prop": 0.6128531511952809,
"repo_name": "Scitator/rl-course-experiments",
"id": "ebfb9c137a749a880404525f377d24365f6237b1",
"size": "3221",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "TD/sarsa.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "1232862"
},
{
"name": "Python",
"bytes": "156437"
}
],
"symlink_target": ""
}
|
"""Test template support in VTK-Python
VTK-python decides which template specializations
to wrap according to which ones are used in typedefs
and which ones appear as superclasses of other classes.
In addition, the wrappers are hard-coded to wrap the
vtkDenseArray and vtkSparseArray classes over a broad
range of types.
Created on May 29, 2011 by David Gobbi
"""
import sys
import exceptions
import vtk
from vtk.test import Testing
arrayTypes = ['char', 'int8', 'uint8', 'int16', 'uint16',
'int32', 'uint32', int, 'uint', 'int64', 'uint64',
'float32', float, str, 'unicode', vtk.vtkVariant]
arrayCodes = ['c', 'b', 'B', 'h', 'H',
'i', 'I', 'l', 'L', 'q', 'Q',
'f', 'd']
class TestTemplates(Testing.vtkTest):
def testDenseArray(self):
"""Test vtkDenseArray template"""
for t in (arrayTypes + arrayCodes):
a = vtk.vtkDenseArray[t]()
a.Resize(1)
i = vtk.vtkArrayCoordinates(0)
if t in ['bool', '?']:
value = 1
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['float32', 'float64', 'float', 'f', 'd']:
value = 3.125
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['char', 'c']:
value = 'c'
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in [str, 'str', 'unicode']:
value = unicode("hello")
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['vtkVariant', vtk.vtkVariant]:
value = vtk.vtkVariant("world")
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
else:
value = 12
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
def testSparseArray(self):
"""Test vtkSparseArray template"""
for t in (arrayTypes + arrayCodes):
a = vtk.vtkSparseArray[t]()
a.Resize(1)
i = vtk.vtkArrayCoordinates(0)
if t in ['bool', '?']:
value = 0
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['float32', 'float64', 'float', 'f', 'd']:
value = 3.125
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['char', 'c']:
value = 'c'
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in [str, 'str', 'unicode']:
value = unicode("hello")
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
elif t in ['vtkVariant', vtk.vtkVariant]:
value = vtk.vtkVariant("world")
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
else:
value = 12
a.SetValue(i, value)
result = a.GetValue(i)
self.assertEqual(value, result)
def testArray(self):
"""Test array CreateArray"""
o = vtk.vtkArray.CreateArray(vtk.vtkArray.DENSE, vtk.VTK_DOUBLE)
self.assertEqual(o.__class__, vtk.vtkDenseArray[float])
def testVector(self):
"""Test vector templates"""
# make sure Rect inherits operators
r = vtk.vtkRectf(0, 0, 2, 2)
self.assertEqual(r[2], 2.0)
c = vtk.vtkColor4ub(0, 0, 0)
self.assertEqual(list(c), [0, 0, 0, 255])
e = vtk.vtkVector['float32', 3]([0.0, 1.0, 2.0])
self.assertEqual(list(e), [0.0, 1.0, 2.0])
i = vtk.vtkVector3['i'](0)
self.assertEqual(list(i), [0, 0, 0])
if __name__ == "__main__":
Testing.main([(TestTemplates, 'test')])
|
{
"content_hash": "48c9bdbd89b4b57eebd9b97f3b1ac758",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 72,
"avg_line_length": 36.225,
"alnum_prop": 0.5017253278122843,
"repo_name": "biddisco/VTK",
"id": "cdf349ce5dc3e72934052133b799be78cf9a67eb",
"size": "4369",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "Common/DataModel/Testing/Python/TestTemplates.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "37444"
},
{
"name": "C",
"bytes": "45542302"
},
{
"name": "C++",
"bytes": "60467840"
},
{
"name": "CSS",
"bytes": "157961"
},
{
"name": "Cuda",
"bytes": "28721"
},
{
"name": "GAP",
"bytes": "14120"
},
{
"name": "IDL",
"bytes": "4406"
},
{
"name": "Java",
"bytes": "184678"
},
{
"name": "JavaScript",
"bytes": "978324"
},
{
"name": "Objective-C",
"bytes": "121232"
},
{
"name": "Objective-C++",
"bytes": "101052"
},
{
"name": "Pascal",
"bytes": "3255"
},
{
"name": "Perl",
"bytes": "177007"
},
{
"name": "Python",
"bytes": "13262355"
},
{
"name": "Shell",
"bytes": "41929"
},
{
"name": "Tcl",
"bytes": "1894036"
}
],
"symlink_target": ""
}
|
"""Train simple phi model."""
import collections
import random
from absl import app
from absl import flags
from absl import logging
import numpy as np
import sonnet as snt
import tensorflow.compat.v1 as tf
import tree
from option_keyboard import scavenger
from option_keyboard import smart_module
FLAGS = flags.FLAGS
flags.DEFINE_integer("num_phis", 2, "Dimensionality of phis.")
flags.DEFINE_integer("num_train_steps", 2000, "Number of training steps.")
flags.DEFINE_integer("num_replay_steps", 500, "Number of replay steps.")
flags.DEFINE_integer("min_replay_size", 1000,
"Minimum replay size before starting training.")
flags.DEFINE_integer("num_train_repeats", 10, "Number of training repeats.")
flags.DEFINE_float("learning_rate", 3e-3, "Learning rate.")
flags.DEFINE_bool("use_random_tasks", False, "Use random tasks.")
flags.DEFINE_string("normalisation", "L2",
"Normalisation method for cumulant weights.")
flags.DEFINE_string("export_path", None, "Export path.")
StepOutput = collections.namedtuple("StepOutput",
["obs", "actions", "rewards", "next_obs"])
def collect_experience(env, num_episodes, verbose=False):
"""Collect experience."""
num_actions = env.action_spec().maximum + 1
observations = []
actions = []
rewards = []
next_observations = []
for _ in range(num_episodes):
timestep = env.reset()
episode_return = 0
while not timestep.last():
action = np.random.randint(num_actions)
observations.append(timestep.observation)
actions.append(action)
timestep = env.step(action)
rewards.append(timestep.observation["aux_tasks_reward"])
episode_return += timestep.reward
next_observations.append(timestep.observation)
if verbose:
logging.info("Total return for episode: %f", episode_return)
observation_spec = tree.map_structure(lambda _: None, observations[0])
def stack_observations(obs_list):
obs_list = [
np.stack(obs) for obs in zip(*[tree.flatten(obs) for obs in obs_list])
]
obs_dict = tree.unflatten_as(observation_spec, obs_list)
obs_dict.pop("aux_tasks_reward")
return obs_dict
observations = stack_observations(observations)
actions = np.array(actions, dtype=np.int32)
rewards = np.stack(rewards)
next_observations = stack_observations(next_observations)
return StepOutput(observations, actions, rewards, next_observations)
class PhiModel(snt.AbstractModule):
"""A model for learning phi."""
def __init__(self,
n_actions,
n_phis,
network_kwargs,
final_activation="sigmoid",
name="PhiModel"):
super(PhiModel, self).__init__(name=name)
self._n_actions = n_actions
self._n_phis = n_phis
self._network_kwargs = network_kwargs
self._final_activation = final_activation
def _build(self, observation, actions):
obs = observation["arena"]
n_outputs = self._n_actions * self._n_phis
flat_obs = snt.BatchFlatten()(obs)
net = snt.nets.MLP(**self._network_kwargs)(flat_obs)
net = snt.Linear(output_size=n_outputs)(net)
net = snt.BatchReshape((self._n_actions, self._n_phis))(net)
indices = tf.stack([tf.range(tf.shape(actions)[0]), actions], axis=1)
values = tf.gather_nd(net, indices)
if self._final_activation:
values = getattr(tf.nn, self._final_activation)(values)
return values
def create_ph(tensor):
return tf.placeholder(shape=(None,) + tensor.shape[1:], dtype=tensor.dtype)
def main(argv):
del argv
if FLAGS.use_random_tasks:
tasks = np.random.normal(size=(8, 2))
else:
tasks = [
[1.0, 0.0],
[0.0, 1.0],
[1.0, 1.0],
[-1.0, 1.0],
]
if FLAGS.normalisation == "L1":
tasks /= np.sum(np.abs(tasks), axis=-1, keepdims=True)
elif FLAGS.normalisation == "L2":
tasks /= np.linalg.norm(tasks, axis=-1, keepdims=True)
else:
raise ValueError("Unknown normlisation_method {}".format(
FLAGS.normalisation))
logging.info("Tasks: %s", tasks)
env_config = dict(
arena_size=11,
num_channels=2,
max_num_steps=100,
num_init_objects=10,
object_priors=[1.0, 1.0],
egocentric=True,
default_w=None,
aux_tasks_w=tasks)
env = scavenger.Scavenger(**env_config)
num_actions = env.action_spec().maximum + 1
model_config = dict(
n_actions=num_actions,
n_phis=FLAGS.num_phis,
network_kwargs=dict(
output_sizes=(64, 128),
activate_final=True,
),
)
model = smart_module.SmartModuleExport(lambda: PhiModel(**model_config))
dummy_steps = collect_experience(env, num_episodes=10, verbose=True)
num_rewards = dummy_steps.rewards.shape[-1]
# Placeholders
steps_ph = tree.map_structure(create_ph, dummy_steps)
phis = model(steps_ph.obs, steps_ph.actions)
phis_to_rewards = snt.Linear(
num_rewards, initializers=dict(w=tf.zeros), use_bias=False)
preds = phis_to_rewards(phis)
loss_per_batch = tf.square(preds - steps_ph.rewards)
loss_op = tf.reduce_mean(loss_per_batch)
replay = []
# Optimizer and train op.
with tf.variable_scope("optimizer"):
optimizer = tf.train.AdamOptimizer(learning_rate=FLAGS.learning_rate)
train_op = optimizer.minimize(loss_op)
# Add normalisation of weights in phis_to_rewards
if FLAGS.normalisation == "L1":
w_norm = tf.reduce_sum(tf.abs(phis_to_rewards.w), axis=0, keepdims=True)
elif FLAGS.normalisation == "L2":
w_norm = tf.norm(phis_to_rewards.w, axis=0, keepdims=True)
else:
raise ValueError("Unknown normlisation_method {}".format(
FLAGS.normalisation))
normalise_w = tf.assign(phis_to_rewards.w,
phis_to_rewards.w / tf.maximum(w_norm, 1e-6))
def filter_steps(steps):
mask = np.sum(np.abs(steps.rewards), axis=-1) > 0.1
nonzero_inds = np.where(mask)[0]
zero_inds = np.where(np.logical_not(mask))[0]
zero_inds = np.random.choice(
zero_inds, size=len(nonzero_inds), replace=False)
selected_inds = np.concatenate([nonzero_inds, zero_inds])
selected_steps = tree.map_structure(lambda x: x[selected_inds], steps)
return selected_steps, selected_inds
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
step = 0
while step < FLAGS.num_train_steps:
step += 1
steps_output = collect_experience(env, num_episodes=10)
selected_step_outputs, selected_inds = filter_steps(steps_output)
if len(replay) > FLAGS.min_replay_size:
# Do training.
for _ in range(FLAGS.num_train_repeats):
train_samples = random.choices(replay, k=128)
train_samples = tree.map_structure(
lambda *x: np.stack(x, axis=0), *train_samples)
train_samples = tree.unflatten_as(steps_ph, train_samples)
feed_dict = dict(
zip(tree.flatten(steps_ph), tree.flatten(train_samples)))
_, train_loss = sess.run([train_op, loss_op], feed_dict=feed_dict)
sess.run(normalise_w)
# Do evaluation.
if step % 50 == 0:
feed_dict = dict(
zip(tree.flatten(steps_ph), tree.flatten(selected_step_outputs)))
eval_loss = sess.run(loss_op, feed_dict=feed_dict)
logging.info("Step %d, train loss %f, eval loss %f, replay %s",
step, train_loss, eval_loss, len(replay))
print(sess.run(phis_to_rewards.get_variables())[0].T)
values = dict(step=step, train_loss=train_loss, eval_loss=eval_loss)
logging.info(values)
# Add to replay.
if step <= FLAGS.num_replay_steps:
def select_fn(ind):
return lambda x: x[ind]
for idx in range(len(selected_inds)):
replay.append(
tree.flatten(
tree.map_structure(select_fn(idx), selected_step_outputs)))
# Export trained model.
if FLAGS.export_path:
model.export(FLAGS.export_path, sess, overwrite=True)
if __name__ == "__main__":
tf.disable_v2_behavior()
app.run(main)
|
{
"content_hash": "5999f798b1f932fa573331f0cc741463",
"timestamp": "",
"source": "github",
"line_count": 252,
"max_line_length": 79,
"avg_line_length": 32.36507936507937,
"alnum_prop": 0.6402648357037763,
"repo_name": "deepmind/deepmind-research",
"id": "2b637deef563687b0d4efdb3aca9c3e12b7371b4",
"size": "8886",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "option_keyboard/gpe_gpi_experiments/train_phi_model.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1002"
},
{
"name": "C++",
"bytes": "5765"
},
{
"name": "Jupyter Notebook",
"bytes": "12330730"
},
{
"name": "Lua",
"bytes": "76186"
},
{
"name": "OpenEdge ABL",
"bytes": "15630"
},
{
"name": "PureBasic",
"bytes": "8"
},
{
"name": "Python",
"bytes": "3419119"
},
{
"name": "Racket",
"bytes": "226692"
},
{
"name": "Shell",
"bytes": "84450"
},
{
"name": "Starlark",
"bytes": "3463"
}
],
"symlink_target": ""
}
|
import jsonschema
from mock import MagicMock
from mock import Mock
from mock import patch
from testtools import TestCase
from testtools.matchers import Is, Equals
from trove.cluster import models
from trove.cluster.models import Cluster
from trove.cluster.service import ClusterController
from trove.cluster import views
import trove.common.cfg as cfg
from trove.common import exception
from trove.common import strategy
from trove.common import utils
from trove.datastore import models as datastore_models
class TestClusterController(TestCase):
def setUp(self):
super(TestClusterController, self).setUp()
self.controller = ClusterController()
self.cluster = {
"cluster": {
"name": "products",
"datastore": {
"type": "mongodb",
"version": "2.4.10"
},
"instances": [
{
"flavorRef": "7",
"volume": {
"size": 1
},
},
{
"flavorRef": "7",
"volume": {
"size": 1
},
},
{
"flavorRef": "7",
"volume": {
"size": 1
},
},
{
"flavorRef": "7",
"volume": {
"size": 1
},
},
{
"flavorRef": "7",
"volume": {
"size": 1
},
}
]
}
}
self.add_shard = {
"add_shard": {}
}
def test_get_schema_create(self):
schema = self.controller.get_schema('create', self.cluster)
self.assertIsNotNone(schema)
self.assertTrue('cluster' in schema['properties'])
self.assertTrue('cluster')
def test_get_schema_action_add_shard(self):
schema = self.controller.get_schema('add_shard', self.add_shard)
self.assertIsNotNone(schema)
self.assertTrue('add_shard' in schema['properties'])
def test_validate_create(self):
body = self.cluster
schema = self.controller.get_schema('create', body)
validator = jsonschema.Draft4Validator(schema)
self.assertTrue(validator.is_valid(body))
def test_validate_add_shard(self):
body = self.add_shard
schema = self.controller.get_schema('add_shard', body)
validator = jsonschema.Draft4Validator(schema)
self.assertTrue(validator.is_valid(body))
def test_validate_create_blankname(self):
body = self.cluster
body['cluster']['name'] = " "
schema = self.controller.get_schema('create', body)
validator = jsonschema.Draft4Validator(schema)
self.assertFalse(validator.is_valid(body))
errors = sorted(validator.iter_errors(body), key=lambda e: e.path)
self.assertThat(len(errors), Is(1))
self.assertThat(errors[0].message,
Equals("' ' does not match '^.*[0-9a-zA-Z]+.*$'"))
def test_validate_create_blank_datastore(self):
body = self.cluster
body['cluster']['datastore']['type'] = ""
schema = self.controller.get_schema('create', body)
validator = jsonschema.Draft4Validator(schema)
self.assertFalse(validator.is_valid(body))
errors = sorted(validator.iter_errors(body), key=lambda e: e.path)
error_messages = [error.message for error in errors]
error_paths = [error.path.pop() for error in errors]
self.assertThat(len(errors), Is(2))
self.assertIn("'' is too short", error_messages)
self.assertIn("'' does not match '^.*[0-9a-zA-Z]+.*$'", error_messages)
self.assertIn("type", error_paths)
@patch.object(Cluster, 'create')
@patch.object(datastore_models, 'get_datastore_version')
def test_create_clusters_disabled(self,
mock_get_datastore_version,
mock_cluster_create):
body = self.cluster
tenant_id = Mock()
context = Mock()
req = Mock()
req.environ = MagicMock()
req.environ.get = Mock(return_value=context)
datastore_version = Mock()
datastore_version.manager = 'mysql'
mock_get_datastore_version.return_value = (Mock(), datastore_version)
self.assertRaises(exception.ClusterDatastoreNotSupported,
self.controller.create,
req,
body,
tenant_id)
@patch.object(Cluster, 'create')
@patch.object(utils, 'get_id_from_href')
@patch.object(datastore_models, 'get_datastore_version')
def test_create_clusters(self,
mock_get_datastore_version,
mock_id_from_href,
mock_cluster_create):
body = self.cluster
tenant_id = Mock()
context = Mock()
req = Mock()
req.environ = Mock()
req.environ.__getitem__ = Mock(return_value=context)
datastore_version = Mock()
datastore_version.manager = 'mongodb'
datastore = Mock()
mock_get_datastore_version.return_value = (datastore,
datastore_version)
instances = [{'volume_size': 1, 'flavor_id': '1234'},
{'volume_size': 1, 'flavor_id': '1234'},
{'volume_size': 1, 'flavor_id': '1234'},
{'volume_size': 1, 'flavor_id': '1234'},
{'volume_size': 1, 'flavor_id': '1234'}]
mock_id_from_href.return_value = '1234'
mock_cluster = Mock()
mock_cluster.instances = []
mock_cluster.datastore_version.manager = 'mongodb'
mock_cluster_create.return_value = mock_cluster
self.controller.create(req, body, tenant_id)
mock_cluster_create.assert_called_with(context, 'products',
datastore, datastore_version,
instances)
@patch.object(Cluster, 'load')
def test_show_cluster(self,
mock_cluster_load):
tenant_id = Mock()
id = Mock()
context = Mock()
req = Mock()
req.environ = Mock()
req.environ.__getitem__ = Mock(return_value=context)
mock_cluster = Mock()
mock_cluster.instances = []
mock_cluster.datastore_version.manager = 'mongodb'
mock_cluster_load.return_value = mock_cluster
self.controller.show(req, tenant_id, id)
mock_cluster_load.assert_called_with(context, id)
@patch.object(Cluster, 'load')
@patch.object(Cluster, 'load_instance')
def test_show_cluster_instance(self,
mock_cluster_load_instance,
mock_cluster_load):
tenant_id = Mock()
cluster_id = Mock()
instance_id = Mock()
context = Mock()
req = Mock()
req.environ = Mock()
req.environ.__getitem__ = Mock(return_value=context)
cluster = Mock()
mock_cluster_load.return_value = cluster
cluster.id = cluster_id
self.controller.show_instance(req, tenant_id, cluster_id, instance_id)
mock_cluster_load_instance.assert_called_with(context, cluster.id,
instance_id)
@patch.object(Cluster, 'load')
def test_delete_cluster(self, mock_cluster_load):
tenant_id = Mock()
cluster_id = Mock()
req = MagicMock()
cluster = Mock()
mock_cluster_load.return_value = cluster
self.controller.delete(req, tenant_id, cluster_id)
cluster.delete.assert_called
class TestClusterControllerWithStrategy(TestCase):
def setUp(self):
super(TestClusterControllerWithStrategy, self).setUp()
self.controller = ClusterController()
self.cluster = {
"cluster": {
"name": "products",
"datastore": {
"type": "mongodb",
"version": "2.4.10"
},
"instances": [
{
"flavorRef": "7",
"volume": {
"size": 1
},
},
{
"flavorRef": "7",
"volume": {
"size": 1
},
},
{
"flavorRef": "7",
"volume": {
"size": 1
},
},
{
"flavorRef": "7",
"volume": {
"size": 1
},
},
{
"flavorRef": "7",
"volume": {
"size": 1
},
}
]
}
}
def tearDown(self):
super(TestClusterControllerWithStrategy, self).tearDown()
cfg.CONF.clear_override('cluster_support', group='mongodb')
cfg.CONF.clear_override('api_strategy', group='mongodb')
@patch.object(datastore_models, 'get_datastore_version')
@patch.object(models.Cluster, 'create')
def test_create_clusters_disabled(self,
mock_cluster_create,
mock_get_datastore_version):
cfg.CONF.set_override('cluster_support', False, group='mongodb')
body = self.cluster
tenant_id = Mock()
context = Mock()
req = Mock()
req.environ = MagicMock()
req.environ.get = Mock(return_value=context)
datastore_version = Mock()
datastore_version.manager = 'mongodb'
mock_get_datastore_version.return_value = (Mock(), datastore_version)
self.assertRaises(exception.TroveError, self.controller.create, req,
body, tenant_id)
@patch.object(views.ClusterView, 'data', return_value={})
@patch.object(datastore_models, 'get_datastore_version')
@patch.object(models.Cluster, 'create')
def test_create_clusters_enabled(self,
mock_cluster_create,
mock_get_datastore_version,
mock_cluster_view_data):
cfg.CONF.set_override('cluster_support', True, group='mongodb')
body = self.cluster
tenant_id = Mock()
context = Mock()
req = Mock()
req.environ = MagicMock()
req.environ.get = Mock(return_value=context)
datastore_version = Mock()
datastore_version.manager = 'mongodb'
mock_get_datastore_version.return_value = (Mock(), datastore_version)
mock_cluster = Mock()
mock_cluster.datastore_version.manager = 'mongodb'
mock_cluster_create.return_value = mock_cluster
self.controller.create(req, body, tenant_id)
@patch.object(models.Cluster, 'load')
def test_controller_action_no_strategy(self,
mock_cluster_load):
body = {'do_stuff2': {}}
tenant_id = Mock()
context = Mock()
id = Mock()
req = Mock()
req.environ = MagicMock()
req.environ.get = Mock(return_value=context)
cluster = Mock()
cluster.datastore_version.manager = 'mongodb'
mock_cluster_load.return_value = cluster
self.assertRaises(exception.TroveError, self.controller.action, req,
body, tenant_id, id)
@patch.object(strategy, 'load_api_strategy')
@patch.object(models.Cluster, 'load')
def test_controller_action_found(self,
mock_cluster_load,
mock_cluster_api_strategy):
body = {'do_stuff': {}}
tenant_id = Mock()
context = Mock()
id = Mock()
req = Mock()
req.environ = MagicMock()
req.environ.get = Mock(return_value=context)
cluster = Mock()
cluster.datastore_version.manager = 'mongodb'
mock_cluster_load.return_value = cluster
strat = Mock()
do_stuff_func = Mock()
strat.cluster_controller_actions = \
{'do_stuff': do_stuff_func}
mock_cluster_api_strategy.return_value = strat
self.controller.action(req, body, tenant_id, id)
self.assertEqual(1, do_stuff_func.call_count)
|
{
"content_hash": "0837e749c1f0a066db4b9f3a938b7fa1",
"timestamp": "",
"source": "github",
"line_count": 370,
"max_line_length": 79,
"avg_line_length": 35.78108108108108,
"alnum_prop": 0.50079311126218,
"repo_name": "changsimon/trove",
"id": "4e29f5562d10f76d677860d7ba323f7903bc2810",
"size": "13880",
"binary": false,
"copies": "1",
"ref": "refs/heads/bug/1347114-dev",
"path": "trove/tests/unittests/cluster/test_cluster_controller.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "21914"
},
{
"name": "JavaScript",
"bytes": "60526"
},
{
"name": "Python",
"bytes": "2620989"
},
{
"name": "Shell",
"bytes": "4771"
},
{
"name": "XSLT",
"bytes": "50542"
}
],
"symlink_target": ""
}
|
"""Utilities to manipulate TensorProtos."""
import numpy as np
from tensorboard.compat.proto import tensor_pb2
from tensorboard.compat.tensorflow_stub import dtypes, compat, tensor_shape
def ExtractBitsFromFloat16(x):
return np.asarray(x, dtype=np.float16).view(np.uint16).item()
def SlowAppendFloat16ArrayToTensorProto(tensor_proto, proto_values):
tensor_proto.half_val.extend(
[ExtractBitsFromFloat16(x) for x in proto_values]
)
def ExtractBitsFromBFloat16(x):
return (
np.asarray(x, dtype=dtypes.bfloat16.as_numpy_dtype)
.view(np.uint16)
.item()
)
def SlowAppendBFloat16ArrayToTensorProto(tensor_proto, proto_values):
tensor_proto.half_val.extend(
[ExtractBitsFromBFloat16(x) for x in proto_values]
)
def SlowAppendFloat32ArrayToTensorProto(tensor_proto, proto_values):
tensor_proto.float_val.extend([x.item() for x in proto_values])
def SlowAppendFloat64ArrayToTensorProto(tensor_proto, proto_values):
tensor_proto.double_val.extend([x.item() for x in proto_values])
def SlowAppendIntArrayToTensorProto(tensor_proto, proto_values):
tensor_proto.int_val.extend([x.item() for x in proto_values])
def SlowAppendInt64ArrayToTensorProto(tensor_proto, proto_values):
tensor_proto.int64_val.extend([x.item() for x in proto_values])
def SlowAppendQIntArrayToTensorProto(tensor_proto, proto_values):
tensor_proto.int_val.extend([x[0].item() for x in proto_values])
def SlowAppendUInt32ArrayToTensorProto(tensor_proto, proto_values):
tensor_proto.uint32_val.extend([x.item() for x in proto_values])
def SlowAppendUInt64ArrayToTensorProto(tensor_proto, proto_values):
tensor_proto.uint64_val.extend([x.item() for x in proto_values])
def SlowAppendComplex64ArrayToTensorProto(tensor_proto, proto_values):
tensor_proto.scomplex_val.extend(
[v.item() for x in proto_values for v in [x.real, x.imag]]
)
def SlowAppendComplex128ArrayToTensorProto(tensor_proto, proto_values):
tensor_proto.dcomplex_val.extend(
[v.item() for x in proto_values for v in [x.real, x.imag]]
)
def SlowAppendObjectArrayToTensorProto(tensor_proto, proto_values):
tensor_proto.string_val.extend([compat.as_bytes(x) for x in proto_values])
def SlowAppendBoolArrayToTensorProto(tensor_proto, proto_values):
tensor_proto.bool_val.extend([x.item() for x in proto_values])
_NP_TO_APPEND_FN = {
np.float16: SlowAppendFloat16ArrayToTensorProto,
np.float32: SlowAppendFloat32ArrayToTensorProto,
np.float64: SlowAppendFloat64ArrayToTensorProto,
np.int32: SlowAppendIntArrayToTensorProto,
np.int64: SlowAppendInt64ArrayToTensorProto,
np.uint8: SlowAppendIntArrayToTensorProto,
np.uint16: SlowAppendIntArrayToTensorProto,
np.uint32: SlowAppendUInt32ArrayToTensorProto,
np.uint64: SlowAppendUInt64ArrayToTensorProto,
np.int8: SlowAppendIntArrayToTensorProto,
np.int16: SlowAppendIntArrayToTensorProto,
np.complex64: SlowAppendComplex64ArrayToTensorProto,
np.complex128: SlowAppendComplex128ArrayToTensorProto,
np.object_: SlowAppendObjectArrayToTensorProto,
np.bool_: SlowAppendBoolArrayToTensorProto,
dtypes.qint8.as_numpy_dtype: SlowAppendQIntArrayToTensorProto,
dtypes.quint8.as_numpy_dtype: SlowAppendQIntArrayToTensorProto,
dtypes.qint16.as_numpy_dtype: SlowAppendQIntArrayToTensorProto,
dtypes.quint16.as_numpy_dtype: SlowAppendQIntArrayToTensorProto,
dtypes.qint32.as_numpy_dtype: SlowAppendQIntArrayToTensorProto,
# NOTE(touts): Intentionally no way to feed a DT_BFLOAT16.
}
BACKUP_DICT = {
dtypes.bfloat16.as_numpy_dtype: SlowAppendBFloat16ArrayToTensorProto
}
def GetFromNumpyDTypeDict(dtype_dict, dtype):
# NOTE: dtype_dict.get(dtype) always returns None.
for key, val in dtype_dict.items():
if key == dtype:
return val
for key, val in BACKUP_DICT.items():
if key == dtype:
return val
return None
def GetNumpyAppendFn(dtype):
# numpy dtype for strings are variable length. We can not compare
# dtype with a single constant (np.string does not exist) to decide
# dtype is a "string" type. We need to compare the dtype.type to be
# sure it's a string type.
if dtype.type == np.string_ or dtype.type == np.unicode_:
return SlowAppendObjectArrayToTensorProto
return GetFromNumpyDTypeDict(_NP_TO_APPEND_FN, dtype)
def _GetDenseDimensions(list_of_lists):
"""Returns the inferred dense dimensions of a list of lists."""
if not isinstance(list_of_lists, (list, tuple)):
return []
elif not list_of_lists:
return [0]
else:
return [len(list_of_lists)] + _GetDenseDimensions(list_of_lists[0])
def _FlattenToStrings(nested_strings):
if isinstance(nested_strings, (list, tuple)):
for inner in nested_strings:
for flattened_string in _FlattenToStrings(inner):
yield flattened_string
else:
yield nested_strings
_TENSOR_CONTENT_TYPES = frozenset(
[
dtypes.float32,
dtypes.float64,
dtypes.int32,
dtypes.uint8,
dtypes.int16,
dtypes.int8,
dtypes.int64,
dtypes.qint8,
dtypes.quint8,
dtypes.qint16,
dtypes.quint16,
dtypes.qint32,
dtypes.uint32,
dtypes.uint64,
]
)
class _Message(object):
def __init__(self, message):
self._message = message
def __repr__(self):
return self._message
def _FirstNotNone(l):
for x in l:
if x is not None:
return x
return None
def _NotNone(v):
if v is None:
return _Message("None")
else:
return v
def _FilterTuple(v):
if not isinstance(v, (list, tuple)):
return v
if isinstance(v, tuple):
if not any(isinstance(x, (list, tuple)) for x in v):
return None
if isinstance(v, list):
if not any(isinstance(x, (list, tuple)) for x in v):
return _FirstNotNone(
[None if isinstance(x, (list, tuple)) else x for x in v]
)
return _FirstNotNone([_FilterTuple(x) for x in v])
def _FilterInt(v):
if isinstance(v, (list, tuple)):
return _FirstNotNone([_FilterInt(x) for x in v])
return (
None
if isinstance(v, (compat.integral_types, tensor_shape.Dimension))
else _NotNone(v)
)
def _FilterFloat(v):
if isinstance(v, (list, tuple)):
return _FirstNotNone([_FilterFloat(x) for x in v])
return None if isinstance(v, compat.real_types) else _NotNone(v)
def _FilterComplex(v):
if isinstance(v, (list, tuple)):
return _FirstNotNone([_FilterComplex(x) for x in v])
return None if isinstance(v, compat.complex_types) else _NotNone(v)
def _FilterStr(v):
if isinstance(v, (list, tuple)):
return _FirstNotNone([_FilterStr(x) for x in v])
if isinstance(v, compat.bytes_or_text_types):
return None
else:
return _NotNone(v)
def _FilterBool(v):
if isinstance(v, (list, tuple)):
return _FirstNotNone([_FilterBool(x) for x in v])
return None if isinstance(v, bool) else _NotNone(v)
_TF_TO_IS_OK = {
dtypes.bool: [_FilterBool],
dtypes.complex128: [_FilterComplex],
dtypes.complex64: [_FilterComplex],
dtypes.float16: [_FilterFloat],
dtypes.float32: [_FilterFloat],
dtypes.float64: [_FilterFloat],
dtypes.int16: [_FilterInt],
dtypes.int32: [_FilterInt],
dtypes.int64: [_FilterInt],
dtypes.int8: [_FilterInt],
dtypes.qint16: [_FilterInt, _FilterTuple],
dtypes.qint32: [_FilterInt, _FilterTuple],
dtypes.qint8: [_FilterInt, _FilterTuple],
dtypes.quint16: [_FilterInt, _FilterTuple],
dtypes.quint8: [_FilterInt, _FilterTuple],
dtypes.string: [_FilterStr],
dtypes.uint16: [_FilterInt],
dtypes.uint8: [_FilterInt],
}
def _Assertconvertible(values, dtype):
# If dtype is None or not recognized, assume it's convertible.
if dtype is None or dtype not in _TF_TO_IS_OK:
return
fn_list = _TF_TO_IS_OK.get(dtype)
mismatch = _FirstNotNone([fn(values) for fn in fn_list])
if mismatch is not None:
raise TypeError(
"Expected %s, got %s of type '%s' instead."
% (dtype.name, repr(mismatch), type(mismatch).__name__)
)
def make_tensor_proto(values, dtype=None, shape=None, verify_shape=False):
"""Create a TensorProto.
Args:
values: Values to put in the TensorProto.
dtype: Optional tensor_pb2 DataType value.
shape: List of integers representing the dimensions of tensor.
verify_shape: Boolean that enables verification of a shape of values.
Returns:
A `TensorProto`. Depending on the type, it may contain data in the
"tensor_content" attribute, which is not directly useful to Python programs.
To access the values you should convert the proto back to a numpy ndarray
with `tensor_util.MakeNdarray(proto)`.
If `values` is a `TensorProto`, it is immediately returned; `dtype` and
`shape` are ignored.
Raises:
TypeError: if unsupported types are provided.
ValueError: if arguments have inappropriate values or if verify_shape is
True and shape of values is not equals to a shape from the argument.
make_tensor_proto accepts "values" of a python scalar, a python list, a
numpy ndarray, or a numpy scalar.
If "values" is a python scalar or a python list, make_tensor_proto
first convert it to numpy ndarray. If dtype is None, the
conversion tries its best to infer the right numpy data
type. Otherwise, the resulting numpy array has a convertible data
type with the given dtype.
In either case above, the numpy ndarray (either the caller provided
or the auto converted) must have the convertible type with dtype.
make_tensor_proto then converts the numpy array to a tensor proto.
If "shape" is None, the resulting tensor proto represents the numpy
array precisely.
Otherwise, "shape" specifies the tensor's shape and the numpy array
can not have more elements than what "shape" specifies.
"""
if isinstance(values, tensor_pb2.TensorProto):
return values
if dtype:
dtype = dtypes.as_dtype(dtype)
is_quantized = dtype in [
dtypes.qint8,
dtypes.quint8,
dtypes.qint16,
dtypes.quint16,
dtypes.qint32,
]
# We first convert value to a numpy array or scalar.
if isinstance(values, (np.ndarray, np.generic)):
if dtype:
nparray = values.astype(dtype.as_numpy_dtype)
else:
nparray = values
elif callable(getattr(values, "__array__", None)) or isinstance(
getattr(values, "__array_interface__", None), dict
):
# If a class has the __array__ method, or __array_interface__ dict, then it
# is possible to convert to numpy array.
nparray = np.asarray(values, dtype=dtype)
# This is the preferred way to create an array from the object, so replace
# the `values` with the array so that _FlattenToStrings is not run.
values = nparray
else:
if values is None:
raise ValueError("None values not supported.")
# if dtype is provided, forces numpy array to be the type
# provided if possible.
if dtype and dtype.is_numpy_compatible:
np_dt = dtype.as_numpy_dtype
else:
np_dt = None
# If shape is None, numpy.prod returns None when dtype is not set, but raises
# exception when dtype is set to np.int64
if shape is not None and np.prod(shape, dtype=np.int64) == 0:
nparray = np.empty(shape, dtype=np_dt)
else:
_Assertconvertible(values, dtype)
nparray = np.array(values, dtype=np_dt)
# check to them.
# We need to pass in quantized values as tuples, so don't apply the shape
if (
list(nparray.shape) != _GetDenseDimensions(values)
and not is_quantized
):
raise ValueError(
"""Argument must be a dense tensor: %s"""
""" - got shape %s, but wanted %s."""
% (values, list(nparray.shape), _GetDenseDimensions(values))
)
# python/numpy default float type is float64. We prefer float32 instead.
if (nparray.dtype == np.float64) and dtype is None:
nparray = nparray.astype(np.float32)
# python/numpy default int type is int64. We prefer int32 instead.
elif (nparray.dtype == np.int64) and dtype is None:
downcasted_array = nparray.astype(np.int32)
# Do not down cast if it leads to precision loss.
if np.array_equal(downcasted_array, nparray):
nparray = downcasted_array
# if dtype is provided, it must be convertible with what numpy
# conversion says.
numpy_dtype = dtypes.as_dtype(nparray.dtype)
if numpy_dtype is None:
raise TypeError("Unrecognized data type: %s" % nparray.dtype)
# If dtype was specified and is a quantized type, we convert
# numpy_dtype back into the quantized version.
if is_quantized:
numpy_dtype = dtype
if dtype is not None and (
not hasattr(dtype, "base_dtype")
or dtype.base_dtype != numpy_dtype.base_dtype
):
raise TypeError(
"Inconvertible types: %s vs. %s. Value is %s"
% (dtype, nparray.dtype, values)
)
# If shape is not given, get the shape from the numpy array.
if shape is None:
shape = nparray.shape
is_same_size = True
shape_size = nparray.size
else:
shape = [int(dim) for dim in shape]
shape_size = np.prod(shape, dtype=np.int64)
is_same_size = shape_size == nparray.size
if verify_shape:
if not nparray.shape == tuple(shape):
raise TypeError(
"Expected Tensor's shape: %s, got %s."
% (tuple(shape), nparray.shape)
)
if nparray.size > shape_size:
raise ValueError(
"Too many elements provided. Needed at most %d, but received %d"
% (shape_size, nparray.size)
)
tensor_proto = tensor_pb2.TensorProto(
dtype=numpy_dtype.as_datatype_enum,
tensor_shape=tensor_shape.as_shape(shape).as_proto(),
)
if is_same_size and numpy_dtype in _TENSOR_CONTENT_TYPES and shape_size > 1:
if nparray.size * nparray.itemsize >= (1 << 31):
raise ValueError(
"Cannot create a tensor proto whose content is larger than 2GB."
)
tensor_proto.tensor_content = nparray.tobytes()
return tensor_proto
# If we were not given values as a numpy array, compute the proto_values
# from the given values directly, to avoid numpy trimming nulls from the
# strings. Since values could be a list of strings, or a multi-dimensional
# list of lists that might or might not correspond to the given shape,
# we flatten it conservatively.
if numpy_dtype == dtypes.string and not isinstance(values, np.ndarray):
proto_values = _FlattenToStrings(values)
# At this point, values may be a list of objects that we could not
# identify a common type for (hence it was inferred as
# np.object/dtypes.string). If we are unable to convert it to a
# string, we raise a more helpful error message.
#
# Ideally, we'd be able to convert the elements of the list to a
# common type, but this type inference requires some thinking and
# so we defer it for now.
try:
str_values = [compat.as_bytes(x) for x in proto_values]
except TypeError:
raise TypeError(
"Failed to convert object of type %s to Tensor. "
"Contents: %s. Consider casting elements to a "
"supported type." % (type(values), values)
)
tensor_proto.string_val.extend(str_values)
return tensor_proto
# TensorFlow expects C order (a.k.a., eigen row major).
proto_values = nparray.ravel()
append_fn = GetNumpyAppendFn(proto_values.dtype)
if append_fn is None:
raise TypeError(
"Element type not supported in TensorProto: %s" % numpy_dtype.name
)
append_fn(tensor_proto, proto_values)
return tensor_proto
def make_ndarray(tensor):
"""Create a numpy ndarray from a tensor.
Create a numpy ndarray with the same shape and data as the tensor.
Args:
tensor: A TensorProto.
Returns:
A numpy array with the tensor contents.
Raises:
TypeError: if tensor has unsupported type.
"""
shape = [d.size for d in tensor.tensor_shape.dim]
num_elements = np.prod(shape, dtype=np.int64)
tensor_dtype = dtypes.as_dtype(tensor.dtype)
dtype = tensor_dtype.as_numpy_dtype
if tensor.tensor_content:
return (
np.frombuffer(tensor.tensor_content, dtype=dtype)
.copy()
.reshape(shape)
)
elif tensor_dtype == dtypes.float16 or tensor_dtype == dtypes.bfloat16:
# the half_val field of the TensorProto stores the binary representation
# of the fp16: we need to reinterpret this as a proper float16
if len(tensor.half_val) == 1:
tmp = np.array(tensor.half_val[0], dtype=np.uint16)
tmp.dtype = tensor_dtype.as_numpy_dtype
return np.repeat(tmp, num_elements).reshape(shape)
else:
tmp = np.fromiter(tensor.half_val, dtype=np.uint16)
tmp.dtype = tensor_dtype.as_numpy_dtype
return tmp.reshape(shape)
elif tensor_dtype == dtypes.float32:
if len(tensor.float_val) == 1:
return np.repeat(
np.array(tensor.float_val[0], dtype=dtype), num_elements
).reshape(shape)
else:
return np.fromiter(tensor.float_val, dtype=dtype).reshape(shape)
elif tensor_dtype == dtypes.float64:
if len(tensor.double_val) == 1:
return np.repeat(
np.array(tensor.double_val[0], dtype=dtype), num_elements
).reshape(shape)
else:
return np.fromiter(tensor.double_val, dtype=dtype).reshape(shape)
elif tensor_dtype in [
dtypes.int32,
dtypes.uint8,
dtypes.uint16,
dtypes.int16,
dtypes.int8,
dtypes.qint32,
dtypes.quint8,
dtypes.qint8,
dtypes.qint16,
dtypes.quint16,
]:
if len(tensor.int_val) == 1:
return np.repeat(
np.array(tensor.int_val[0], dtype=dtype), num_elements
).reshape(shape)
else:
return np.fromiter(tensor.int_val, dtype=dtype).reshape(shape)
elif tensor_dtype == dtypes.int64:
if len(tensor.int64_val) == 1:
return np.repeat(
np.array(tensor.int64_val[0], dtype=dtype), num_elements
).reshape(shape)
else:
return np.fromiter(tensor.int64_val, dtype=dtype).reshape(shape)
elif tensor_dtype == dtypes.string:
if len(tensor.string_val) == 1:
return np.repeat(
np.array(tensor.string_val[0], dtype=dtype), num_elements
).reshape(shape)
else:
return np.array(
[x for x in tensor.string_val], dtype=dtype
).reshape(shape)
elif tensor_dtype == dtypes.complex64:
it = iter(tensor.scomplex_val)
if len(tensor.scomplex_val) == 2:
return np.repeat(
np.array(
complex(tensor.scomplex_val[0], tensor.scomplex_val[1]),
dtype=dtype,
),
num_elements,
).reshape(shape)
else:
return np.array(
[complex(x[0], x[1]) for x in zip(it, it)], dtype=dtype
).reshape(shape)
elif tensor_dtype == dtypes.complex128:
it = iter(tensor.dcomplex_val)
if len(tensor.dcomplex_val) == 2:
return np.repeat(
np.array(
complex(tensor.dcomplex_val[0], tensor.dcomplex_val[1]),
dtype=dtype,
),
num_elements,
).reshape(shape)
else:
return np.array(
[complex(x[0], x[1]) for x in zip(it, it)], dtype=dtype
).reshape(shape)
elif tensor_dtype == dtypes.bool:
if len(tensor.bool_val) == 1:
return np.repeat(
np.array(tensor.bool_val[0], dtype=dtype), num_elements
).reshape(shape)
else:
return np.fromiter(tensor.bool_val, dtype=dtype).reshape(shape)
else:
raise TypeError("Unsupported tensor type: %s" % tensor.dtype)
|
{
"content_hash": "09429acbbfa5a6bf855232669181d652",
"timestamp": "",
"source": "github",
"line_count": 605,
"max_line_length": 85,
"avg_line_length": 34.897520661157024,
"alnum_prop": 0.6292331738739165,
"repo_name": "tensorflow/tensorboard",
"id": "013b62909f8e212a868be9cbd40a74bb9c2d044d",
"size": "21802",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorboard/util/tensor_util.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16222"
},
{
"name": "Dockerfile",
"bytes": "1226"
},
{
"name": "HTML",
"bytes": "154824"
},
{
"name": "Java",
"bytes": "20643"
},
{
"name": "JavaScript",
"bytes": "11869"
},
{
"name": "Jupyter Notebook",
"bytes": "7697"
},
{
"name": "Python",
"bytes": "2922179"
},
{
"name": "Rust",
"bytes": "311041"
},
{
"name": "SCSS",
"bytes": "136834"
},
{
"name": "Shell",
"bytes": "36731"
},
{
"name": "Starlark",
"bytes": "541743"
},
{
"name": "TypeScript",
"bytes": "5930550"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.conf import settings
from django.test import TestCase, override_settings
from staticgen.exceptions import StaticgenError
from staticgen.staticgen_crawler import StaticgenCrawler
class TestStaticgenCrawler(TestCase):
def setUp(self):
self.crawler = StaticgenCrawler()
@override_settings(STATICGEN_SITEMAP_URL='sitemap.xml')
def test_get_sitemap_url_from_settings(self):
sitemap_url = self.crawler.get_sitemap_url()
self.assertEqual(sitemap_url, 'sitemap.xml')
def test_get_sitemap_url_autodiscover(self):
sitemap_url = self.crawler.get_sitemap_url()
self.assertEqual(sitemap_url, '/sitemap.xml')
@override_settings(ROOT_URLCONF=(), STATICGEN_FAIL_SILENTLY=False)
def test_get_sitemap_url_raises_error(self):
self.assertRaises(StaticgenError, self.crawler.get_sitemap_url)
def test_get_sitemap_links(self):
links = self.crawler.get_sitemap_links()
# should be 9 urls - for 9 posts urls in sitemap
self.assertEqual(len(links), 9)
def test_clean_urls_with_bad_urls(self):
bad_urls = (
'{media_url}user-upload.jpg'.format(media_url=settings.MEDIA_URL),
'{static_url}logo.jpg'.format(static_url=settings.STATIC_URL),
'#page-top',
'mailto:staticgen@example.com',
'http://twitter.com/staticgen/'
)
for url in bad_urls:
self.assertEqual(self.crawler.clean_url(url), None)
def test_clean_urls_with_good_url(self):
good_urls = (
('/staticgen/', '/staticgen/'),
('http://example.com/staticgen/', '//example.com/staticgen/'),
('https://example.com/staticgen/', '//example.com/staticgen/'),
('http://www.example.com/staticgen/', '//example.com/staticgen/'),
('https://www.example.com/staticgen/', '//example.com/staticgen/'),
)
for url, cleaned_url in good_urls:
self.assertEqual(self.crawler.clean_url(url), cleaned_url)
def test_get_pages(self):
# should be 9 posts detailview + 1 post listview + homepage + 1 redirect view = 12 pages
urls = self.crawler.get_urls()
self.assertEqual(len(urls), 12)
|
{
"content_hash": "2c80ef14e6558018b38c2e6bb23040f6",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 96,
"avg_line_length": 38,
"alnum_prop": 0.6429824561403509,
"repo_name": "mishbahr/django-staticgen",
"id": "c66d940363614f3812c9a8eb65e23c26c9794dd8",
"size": "2305",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_staticgen_crawler.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "6277"
},
{
"name": "Makefile",
"bytes": "1249"
},
{
"name": "Python",
"bytes": "80640"
}
],
"symlink_target": ""
}
|
import textwrap
def wrap(string, max_width):
return textwrap.wrap(string, max_width)
if __name__ == '__main__':
string, max_width = input(), int(input())
print('\n'.join(wrap(string, max_width)))
|
{
"content_hash": "b2f6fbd1025e73652532e0517a938103",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 45,
"avg_line_length": 26.125,
"alnum_prop": 0.631578947368421,
"repo_name": "neiesc/Problem-solving",
"id": "9f172f3fc50fd3de25819d01538a1c607dff35b3",
"size": "295",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "HackerRank/Python/Strings/text-wrap.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "42048"
},
{
"name": "Elixir",
"bytes": "7313"
},
{
"name": "JavaScript",
"bytes": "1052"
},
{
"name": "Python",
"bytes": "58944"
},
{
"name": "Scala",
"bytes": "333"
},
{
"name": "Shell",
"bytes": "317"
}
],
"symlink_target": ""
}
|
from setuptools import setuptools
VERSION = "0.1.0"
setup(
name='techblog',
version=VERSION,
description="Blog for Coders and Photographers",
zip_safe=False,
license="MIT",
author="Will McGugan",
author_email="willmcgugan@gmail.com",
url="https://github.com/moyaproject/moya-techblog",
entry_points={
"console_scripts": [
'techblog = techblog:main'
]
}
)
|
{
"content_hash": "8de9f6d31e21fb85ae62e96a2961c089",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 55,
"avg_line_length": 19.40909090909091,
"alnum_prop": 0.6206088992974239,
"repo_name": "moyaproject/moya-techblog",
"id": "1dafa036d3e90c0bcceec24674af2c2720c35ab2",
"size": "427",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "144528"
},
{
"name": "HTML",
"bytes": "54165"
},
{
"name": "JavaScript",
"bytes": "9627"
},
{
"name": "Python",
"bytes": "1167"
},
{
"name": "Shell",
"bytes": "146"
}
],
"symlink_target": ""
}
|
from bitcoin import *
from txUtils import *
from jsonConn import *
from time import sleep
# Ptx=makePtx(script, privtopub(rKeyClient), uKeyServer, 50000)
# clientSig = multisign(Ptx, 0, script, rKeyClient)
# conn.jsend(Ptx)#
# conn.jsend(clientSig)#
def paymentSpin(redeemScript, conn, privkeyC, pubkeyS, increment):
amount = 0
i = 0
print 'inpaymentspin'
while i<10:
print 'Paid so far: ', amount
amount += increment
i+=1
# make a payment transaction with amount += increment
Ptx = makePtx(privtopub(privkeyC), pubkeyS, amount)
# partially sign it
clientSig = multisign(Ptx, 0, redeemScript, privkeyC)
# send Ptx
conn.jsend(Ptx)
# send client signature
sleep(.2)
conn.jsend(clientSig)
sleep(2)
print 'Total spent this time: %d' %amount
return amount
# Test
if __name__ == "__main__":
if len(sys.argv) > 1:
host = str(sys.argv[1])
else:
host = '192.168.12.1'
# private key of client
rKeyClient = 'KyLuCNsxddnqpdJW1Q3q2mQtkssThJUfcGq9hdCE8W72xYPD3He3' # 1CA1rufdFggCkd4kZQaff6NxZa1P9AfrrE
# connect
conn = JsonConn()
conn.connect((host,7879))
# HANDSHAKE
uKeyServer = exchangePubKey(privtopub(rKeyClient), conn)
#uKeyServer = '029cb4cbc58ff1b71b9aee78b0228313201e09d4a3bf263ed8e7e31709e49f7d28' # 1QKb78KGXGivbgMXJbVmUz6Tp9w6BAtYve
dep = balanceAddr(privtoaddr(rKeyClient)) - 15000
# building the Dtx
[Dtx, script] = makeDtx(rKeyClient, uKeyServer, dep)
# sign and send Dtx
DtxS = signAllIns(Dtx, rKeyClient)
print DtxS
conn.jsend(DtxS) #
conn.jsend(script)#
sleep(3)
# book keeping
scriptAddr = scriptaddr(script)
print("What remains in original address:")
print balanceAddr(privkey_to_address(rKeyClient))
print("The DEPOSIT address: ")
print scriptAddr
print("The DEPOSIT address has the following tx history: ")
print history(scriptAddr)
# PAY AS YOU CONSUME
paymentSpin(script, conn, rKeyClient, uKeyServer, 1000)
# close the channel
conn.close()
print 'Internet is overrated'
|
{
"content_hash": "471f5b784d926a8ae84a786f3478e0ec",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 120,
"avg_line_length": 26.24,
"alnum_prop": 0.7327235772357723,
"repo_name": "samnet/bybit",
"id": "07c61e2fae6b8b916605f1e32373205f22b3de87",
"size": "1968",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "uml/unixClient.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "257"
},
{
"name": "Groff",
"bytes": "304"
},
{
"name": "Java",
"bytes": "681225"
},
{
"name": "Makefile",
"bytes": "516"
},
{
"name": "Python",
"bytes": "44150"
},
{
"name": "Shell",
"bytes": "60864"
}
],
"symlink_target": ""
}
|
from .models import User, get_todays_recent_offers
from flask import Flask, request, session, redirect, url_for, render_template, flash
app = Flask(__name__)
@app.route('/')
def index():
offers = get_todays_recent_offers()
return render_template('welcome.html', offers=offers)
@app.route('/register', methods=['GET','POST'])
def register():
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
if len(username) < 1:
flash('* Please enter a fun username.')
elif len(password) < 5:
flash('* Please enter a password with at least 5 chracters.')
elif not User(username).register(password):
flash('* Sorry, that username already exists.')
else:
session['username'] = username
flash('Logged in.')
return redirect(url_for('index'))
return render_template('register.html')
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
if not User(username).verify_password(password):
flash('* Invalid log-in, please try again.')
else:
session['username'] = username
# flash('Logged in.')
return redirect(url_for('index'))
return render_template('login.html')
@app.route('/logout')
def logout():
session.pop('username', None)
flash('Logged out.')
return redirect(url_for('index'))
@app.route('/add_offer', methods=['POST'])
def add_offer():
title = request.form['title']
tags = request.form['tags']
text = request.form['text']
if not title or not tags or not text:
if not title:
flash('Please give your offer a title.')
if not tags:
flash('Please give your offer at least one tag.')
if not text:
flash('Please give your offer some details.')
else:
User(session['username']).add_offer(title, tags, text)
return redirect(url_for('index'))
@app.route('/like_offer/<offer_id>')
def like_offer(offer_id):
username = session.get('username')
if not username:
flash('You must be logged in to like an offer.')
return redirect(url_for('login'))
User(username).like_offer(offer_id)
flash('Liked offer.')
return redirect(request.referrer)
@app.route('/profile/<username>')
def profile(username):
logged_in_username = session.get('username')
user_being_viewed_username = username
user_being_viewed = User(user_being_viewed_username)
offers = user_being_viewed.get_recent_offers()
similar = []
common = []
if logged_in_username:
logged_in_user = User(logged_in_username)
if logged_in_user.username == user_being_viewed.username:
similar = logged_in_user.get_similar_users()
else:
common = logged_in_user.get_commonality_of_user(user_being_viewed)
return render_template(
'profile.html',
username=username,
offers=offers,
similar=similar,
common=common
)
|
{
"content_hash": "6605262b58f4216aa4e2435c7ab7d97c",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 84,
"avg_line_length": 29.546296296296298,
"alnum_prop": 0.6120338451895957,
"repo_name": "brennv/karma",
"id": "b92e164d8f78f2bb574126b92e40682458bb5e8b",
"size": "3191",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "graph_app/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3912"
},
{
"name": "HTML",
"bytes": "25418"
},
{
"name": "Python",
"bytes": "7699"
},
{
"name": "Shell",
"bytes": "5735"
}
],
"symlink_target": ""
}
|
from datetime import datetime
from copy import copy
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.functional import curry
from django_statemodel.signals import save_timestamp_cache, set_default_state
OPTIONS_CLASS = "StateModelMeta"
OPTIONS_ATTR_NAME = "_statemodelmeta"
DONE_INITIALIZING = "_statemodel_done_initializing"
class StateModelBase(models.base.ModelBase):
def __new__(mcs, name, bases, attrs):
# Look at Options for information about the StateModel states
options = attrs.pop(OPTIONS_CLASS, None)
if options is None:
options = {}
else:
options = options.__dict__
# Get the field name for state from the meta options
state_field_name = options.get('state_field_name', 'state')
state_timestamps_field_name = options.get('state_timestamps_field_name',
'state_timestamps')
# state_map contains the mapping of states values to their attribute
# names.
state_map = options.get('state_map', [])
default_state = options.get('default_state')
add_states_to_model = options.get('add_states_to_model', True)
if state_map:
# default_state is either the first state in the map, or as
# overridden
if default_state is None:
default_state = state_map[0][0]
# Assign the states as attributes to the model
if add_states_to_model:
for key, value in state_map:
attrs[value] = key
# db_index boolean to add an index to the state field. Defaults to True
db_index = options.get('db_index', True)
# Check if we should store the timestamp as utc
use_utc = options.get('use_utc', True)
# Check if we allow None states
allow_none_state = options.get('allow_none_state', True)
if not allow_none_state and default_state is None:
raise ValueError("'allow_none_state' cannot be False while "
"'default_state' is set to None or 'state_map' "
"is undefined.")
# Get a Django field from the given model's _meta object
def get_field(model, field):
if hasattr(model, "_meta"):
try:
return model._meta.get_field(field)
except models.fields.FieldDoesNotExist:
return None
return None
# Check if any of the inherited models have the state field
parent_has_state = False
for parent in bases:
if bool(get_field(parent, state_field_name)):
parent_has_state = True
break
# Check if this is an abstract model
is_abstract_model = getattr(attrs.get('Meta', {}), "abstract", False)
# If this model is abstract and the state field isn't inherited, add it
if is_abstract_model and not parent_has_state:
attrs[state_field_name] = models.IntegerField(null=True,
db_index=db_index)
attrs[state_timestamps_field_name] = generic.GenericRelation(
StateTransitionTimestamp,
content_type_field='content_type',
object_id_field='content_id')
cls = super(StateModelBase, mcs).__new__(mcs, name, bases, attrs)
# Save the options for this model in an object attached to the model
options_cache = StateModelBase.StateModelOptions(
dict(state_map), default_state, use_utc, allow_none_state,
db_index, add_states_to_model, state_field_name,
state_timestamps_field_name)
setattr(cls, OPTIONS_ATTR_NAME, options_cache)
# Add signals to the inheriting models to save the state transitions
if not is_abstract_model:
models.signals.post_save.connect(save_timestamp_cache,
sender=cls)
models.signals.post_init.connect(set_default_state,
sender=cls)
state_field = get_field(cls, state_field_name)
if state_map and state_field:
# Set up the choices on the state field
state_field._choices = state_map
# Add in the django 'get_<field>_display' method. This is done
# in the django metaclass, which has run already, but needs choices
# to work.
setattr(cls, 'get_%s_display' % state_field.attname,
curry(cls._get_FIELD_display, field=state_field))
return cls
class StateModelOptions(object):
def __init__(self, state_map, default_state, use_utc, allow_none_state,
db_index, add_states_to_model, state_field_name,
state_timestamps_field_name):
self.state_map = copy(state_map)
self.default_state = default_state
self.use_utc = use_utc
self.allow_none_state = allow_none_state
self.db_index = db_index
self.add_states_to_model = add_states_to_model
self.state_field_name = state_field_name
self.state_timestamps_field_name = state_timestamps_field_name
self.state_timestamps_cache_name = \
"%s_cache" % state_timestamps_field_name
class StateTransitionTimestamp(models.Model):
state = models.IntegerField(
blank=False,
null=False,
help_text="The state of this transition")
state_time = models.DateTimeField(
blank=False,
null=False,
default=datetime.utcnow,
help_text="The time this state was entered")
content_type = models.ForeignKey(
ContentType,
blank=True,
null=True)
content_id = models.PositiveIntegerField(
blank=False,
null=False)
content = generic.GenericForeignKey(
ct_field="content_type",
fk_field="content_id")
def __unicode__(self):
return "%s: %s" % (self.state, self.state_time)
class StateModel(models.Model):
__metaclass__ = StateModelBase
class Meta:
abstract = True
def __setattr__(self, key, value):
meta_options = getattr(self, OPTIONS_ATTR_NAME)
# Check if we are setting the "state" field and that we are done
# initializing. Done initializing means the __init__ is finished.
if key == meta_options.state_field_name and \
getattr(self, DONE_INITIALIZING, False):
# Value can be a tuple of (<state>, <datetime object>)
if isinstance(value, (tuple, list)):
if len(value) != 2 or not isinstance(value[1], datetime):
raise ValueError("'%s' must be in the format: <state> or "
"(<state>, <datetime>)"
% meta_options.state_field_name)
timestamp = value[1]
value = value[0]
else:
# If no timestamp is given, set it to now
timestamp = datetime.utcnow() if meta_options.use_utc else \
datetime.now()
if not meta_options.allow_none_state and value is None:
raise ValueError("The given state value is None, and None "
"states are not allowed.")
if value not in meta_options.state_map and value is not None:
raise ValueError("The given state '%s' is not a valid state "
"listed in the statemap: '%s'."
% (value, meta_options.state_map))
# Don't update the state's timestamp if the state hasn't changed.
if value != getattr(self, meta_options.state_field_name):
# We store the timestamp in a cache until the model is saved.
# This way, we only update the state_timestamps once per save.
setattr(self,
meta_options.state_timestamps_cache_name,
StateTransitionTimestamp(state=value,
state_time=timestamp))
super(StateModel, self).__setattr__(key, value)
|
{
"content_hash": "b86cf9d990bae7664c8c231671f1369f",
"timestamp": "",
"source": "github",
"line_count": 206,
"max_line_length": 80,
"avg_line_length": 42.33980582524272,
"alnum_prop": 0.5575556065122679,
"repo_name": "MerchantAtlas/django-statemodel",
"id": "c118f53eb3d60ca4594eebed1168d955717f879b",
"size": "8722",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_statemodel/statemodel.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "10057"
}
],
"symlink_target": ""
}
|
"""
Unit Tests for manila.share.rpcapi.
"""
import copy
from oslo_config import cfg
from oslo_serialization import jsonutils
from manila.common import constants
from manila import context
from manila.share import rpcapi as share_rpcapi
from manila import test
from manila.tests import db_utils
CONF = cfg.CONF
class ShareRpcAPITestCase(test.TestCase):
def setUp(self):
super(ShareRpcAPITestCase, self).setUp()
self.context = context.get_admin_context()
share = db_utils.create_share(
availability_zone=CONF.storage_availability_zone,
status=constants.STATUS_AVAILABLE
)
access = db_utils.create_access(share_id=share['id'])
snapshot = db_utils.create_snapshot(share_id=share['id'])
share_server = db_utils.create_share_server()
cg = {'id': 'fake_cg_id', 'host': 'fake_host'}
cgsnapshot = {'id': 'fake_cg_id'}
host = {'host': 'fake_host', 'capabilities': 1}
self.fake_share = jsonutils.to_primitive(share)
self.fake_access = jsonutils.to_primitive(access)
self.fake_snapshot = jsonutils.to_primitive(snapshot)
self.fake_share_server = jsonutils.to_primitive(share_server)
self.fake_cg = jsonutils.to_primitive(cg)
self.fake_cgsnapshot = jsonutils.to_primitive(cgsnapshot)
self.fake_host = jsonutils.to_primitive(host)
self.ctxt = context.RequestContext('fake_user', 'fake_project')
self.rpcapi = share_rpcapi.ShareAPI()
def test_serialized_share_has_id(self):
self.assertTrue('id' in self.fake_share)
def _test_share_api(self, method, rpc_method, **kwargs):
expected_retval = 'foo' if method == 'call' else None
target = {
"version": kwargs.pop('version', self.rpcapi.BASE_RPC_API_VERSION)
}
expected_msg = copy.deepcopy(kwargs)
if 'share' in expected_msg and method != 'get_migration_info':
share = expected_msg['share']
del expected_msg['share']
expected_msg['share_id'] = share['id']
if 'share_instance' in expected_msg:
share_instance = expected_msg.pop('share_instance', None)
expected_msg['share_instance_id'] = share_instance['id']
if 'cg' in expected_msg:
cg = expected_msg['cg']
del expected_msg['cg']
expected_msg['cg_id'] = cg['id']
if 'cgsnapshot' in expected_msg:
snap = expected_msg['cgsnapshot']
del expected_msg['cgsnapshot']
expected_msg['cgsnapshot_id'] = snap['id']
if 'access' in expected_msg:
access = expected_msg['access']
del expected_msg['access']
expected_msg['access_id'] = access['id']
if 'host' in expected_msg:
del expected_msg['host']
if 'snapshot' in expected_msg:
snapshot = expected_msg['snapshot']
del expected_msg['snapshot']
expected_msg['snapshot_id'] = snapshot['id']
if 'dest_host' in expected_msg:
del expected_msg['dest_host']
expected_msg['host'] = self.fake_host
if 'host' in kwargs:
host = kwargs['host']
elif 'cg' in kwargs:
host = kwargs['cg']['host']
elif 'share_instance' in kwargs:
host = kwargs['share_instance']['host']
elif 'share_server' in kwargs:
host = kwargs['share_server']['host']
else:
host = kwargs['share']['host']
target['server'] = host
target['topic'] = '%s.%s' % (CONF.share_topic, host)
self.fake_args = None
self.fake_kwargs = None
def _fake_prepare_method(*args, **kwds):
for kwd in kwds:
self.assertEqual(target[kwd], kwds[kwd])
return self.rpcapi.client
def _fake_rpc_method(*args, **kwargs):
self.fake_args = args
self.fake_kwargs = kwargs
if expected_retval:
return expected_retval
self.mock_object(self.rpcapi.client, "prepare", _fake_prepare_method)
self.mock_object(self.rpcapi.client, rpc_method, _fake_rpc_method)
retval = getattr(self.rpcapi, method)(self.ctxt, **kwargs)
self.assertEqual(expected_retval, retval)
expected_args = [self.ctxt, method]
for arg, expected_arg in zip(self.fake_args, expected_args):
self.assertEqual(expected_arg, arg)
for kwarg, value in self.fake_kwargs.items():
self.assertEqual(expected_msg[kwarg], value)
def test_create_share_instance(self):
self._test_share_api('create_share_instance',
rpc_method='cast',
version='1.4',
share_instance=self.fake_share,
host='fake_host1',
snapshot_id='fake_snapshot_id',
filter_properties=None,
request_spec=None)
def test_delete_share_instance(self):
self._test_share_api('delete_share_instance',
rpc_method='cast',
version='1.4',
share_instance=self.fake_share)
def test_allow_access(self):
self._test_share_api('allow_access',
rpc_method='cast',
version='1.4',
share_instance=self.fake_share,
access=self.fake_access)
def test_deny_access(self):
self._test_share_api('deny_access',
rpc_method='cast',
version='1.4',
share_instance=self.fake_share,
access=self.fake_access)
def test_create_snapshot(self):
self._test_share_api('create_snapshot',
rpc_method='cast',
share=self.fake_share,
snapshot=self.fake_snapshot)
def test_delete_snapshot(self):
self._test_share_api('delete_snapshot',
rpc_method='cast',
snapshot=self.fake_snapshot,
host='fake_host')
def test_delete_share_server(self):
self._test_share_api('delete_share_server',
rpc_method='cast',
share_server=self.fake_share_server)
def test_extend_share(self):
self._test_share_api('extend_share',
rpc_method='cast',
version='1.2',
share=self.fake_share,
new_size=123,
reservations={'fake': 'fake'})
def test_shrink_share(self):
self._test_share_api('shrink_share',
rpc_method='cast',
version='1.3',
share=self.fake_share,
new_size=123)
def test_create_consistency_group(self):
self._test_share_api('create_consistency_group',
version='1.5',
rpc_method='cast',
cg=self.fake_cg,
host='fake_host1')
def test_delete_consistency_group(self):
self._test_share_api('delete_consistency_group',
version='1.5',
rpc_method='cast',
cg=self.fake_cg)
def test_create_cgsnapshot(self):
self._test_share_api('create_cgsnapshot',
version='1.5',
rpc_method='cast',
cgsnapshot=self.fake_cgsnapshot,
host='fake_host1')
def test_delete_cgsnapshot(self):
self._test_share_api('delete_cgsnapshot',
version='1.5',
rpc_method='cast',
cgsnapshot=self.fake_cgsnapshot,
host='fake_host1')
def test_migrate_share(self):
fake_dest_host = self.Desthost()
self._test_share_api('migrate_share',
rpc_method='cast',
version='1.6',
share=self.fake_share,
dest_host=fake_dest_host,
force_host_copy='1')
def test_get_migration_info(self):
self._test_share_api('get_migration_info',
rpc_method='call',
version='1.6',
share_instance=self.fake_share,
share_server=self.fake_share_server)
def test_get_driver_migration_info(self):
self._test_share_api('get_driver_migration_info',
rpc_method='call',
version='1.6',
share_instance=self.fake_share,
share_server=self.fake_share_server)
class Desthost(object):
host = 'fake_host'
capabilities = 1
|
{
"content_hash": "d6d4333d07416e7bf1cb7bc30f20ab72",
"timestamp": "",
"source": "github",
"line_count": 237,
"max_line_length": 78,
"avg_line_length": 39.48101265822785,
"alnum_prop": 0.5082825691995297,
"repo_name": "scality/manila",
"id": "6f24dac823a9ca86b46d33b150461a8ef93b6a0f",
"size": "9960",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manila/tests/share/test_rpcapi.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "953"
},
{
"name": "Python",
"bytes": "5912966"
},
{
"name": "Shell",
"bytes": "46081"
}
],
"symlink_target": ""
}
|
import sys
import os
import argparse
import itertools
from fractions import Fraction as Frac
try:
# Python 3
from tkinter import *
from tkinter import filedialog
except ImportError:
# Python 2
from Tkinter import *
import tkFileDialog as filedialog
from level import Level, Coord, Tiles
class FuelChangeState(object):
def __init__(self, f):
self.fuel = Frac(f)
def __call__(self, lvl, x, y):
lvl.fuel[Coord(x,y)] = self.fuel
class TileChangeState(object):
def __init__(self, c):
self.tile = Tiles[c]
def __call__(self, lvl, x, y):
lvl[x,y] = self.tile
class StartChangeState(object):
def __call__(self, lvl, x, y):
lvl.start = Coord(x,y)
class EndChangeState(object):
def __call__(self, lvl, x, y):
lvl.end = Coord(x,y)
# Gui for inputting variables
class Editor(Frame):
def __init__(self, master):
'''
Creates all of the buttons and boxes for the GUI based on the rules provided
'''
# Get the root window
self.master = master
self.level = Level()
self.state = None
self.file_opt = options = {
'defaultextension': '.ilv',
'filetypes': [('Math Island Level', '.ilv')],
'parent': self.master,
'title': 'Math Island Level',
}
self.master.bind("<Escape>", lambda e:self.master.destroy())
header = Frame(self.master)
header.pack(side=TOP)
Button(header, text="Save", command=self.save).pack(side=LEFT)
Button(header, text="Load", command=self.load).pack(side=LEFT)
Button(header, text="Quit", command=self.master.destroy).pack(side=LEFT)
infoframe = Frame(self.master)
infoframe.pack(side=TOP)
Label(infoframe, text="Title: ").pack(side=LEFT)
self.title = StringVar()
Entry(infoframe, textvariable=self.title).pack(side=LEFT)
Label(infoframe, text="Text: ").pack(side=LEFT)
self.text = Text(infoframe, width=60, height=4)
self.text.pack()
fuelframe = Frame(self.master)
fuelframe.pack(side=BOTTOM)
for i in range(9):
Button(fuelframe, text="{0}/8".format(i),
command=lambda i=i: self.setstate(FuelChangeState(Frac(i,8)))
).pack(side=LEFT)
Button(fuelframe, text="Starting Fuel", command=self.setfuel).pack(side=LEFT)
fuelframe = Frame(self.master)
scrollbar = Scrollbar(fuelframe, orient=VERTICAL)
self.fuellist = Listbox(fuelframe, yscrollcommand=scrollbar.set)
scrollbar.config(command=self.fuellist.yview)
scrollbar.pack(side=RIGHT, fill=Y)
self.fuellist.pack(side=LEFT, fill=BOTH, expand=1)
fuelframe.pack(side=LEFT)
mapframe = Frame(self.master)
mapframe.pack(side=LEFT)
self.btns = [[None for i in range(self.level.width)]
for j in range(self.level.height)]
for y in range(self.level.height):
for x in range(self.level.width):
self.btns[x][y] = btn = Button(
mapframe, text=self.level[x, y].char,
command=lambda x=x, y=y: self.apply(x, y))
btn.grid(row=y, column=x, sticky=N+E+S+W)
sb = Button(mapframe, text="Start", command=lambda:self.setstate(StartChangeState()))
sb.grid(row=self.level.height, column=0, columnspan=self.level.width//2, sticky=N+E+S+W)
sb = Button(mapframe, text="End", command=lambda:self.setstate(EndChangeState()))
sb.grid(row=self.level.height, column=self.level.width//2, columnspan=self.level.width//2, sticky=N+E+S+W)
tileframe = Frame(self.master)
tileframe.pack(side=RIGHT)
getcost = lambda t: getattr(Tiles[t], "cost", None)
tiles = sorted(Tiles, key=getcost)
for i, c in enumerate(itertools.chain(('.','w'), tiles)):
btn = Button(tileframe,
text="{0}: {1}".format(c, getcost(c)),
command=lambda c=c: self.setstate(TileChangeState(c)))
btn.grid(column=(i//10), row=(i%10), sticky=N+E+S+W)
self.redraw()
'''
TODO: START/END/STARTFUEL BUTTONS
'''
def redraw(self):
self.level.clean()
ms = "({0.x}, {0.y}) = {1}"
self.fuellist.delete(0, END) # clear
self.fuellist.insert(END, "Start Fuel: {}".format(self.level.startfuel))
self.fuellist.insert(END, ms.format(self.level.start, "Start"))
self.fuellist.insert(END, ms.format(self.level.end, "End"))
for key, value in self.level.fuel.items():
self.fuellist.insert(END, ms.format(key, value))
for y in range(self.level.height):
for x in range(self.level.width):
self.btns[x][y].config(text=self.level[x,y].char)
def reset(self):
self.level = Level()
self.build()
def setstate(self, state):
self.state = state
def setfuel(self):
if isinstance(self.state, FuelChangeState):
self.level.startfuel = self.state.fuel
self.redraw()
def apply(self, x, y):
if self.state:
self.state(self.level, x, y)
self.redraw()
def save(self):
self.level.title = self.title.get().strip()
self.level.text = self.text.get(1.0, END).strip()
fn = filedialog.asksaveasfilename(**self.file_opt)
if not fn: return
with open(fn, "w") as fi:
fi.write(repr(self.level))
def load(self):
fn = filedialog.askopenfilename(**self.file_opt)
if fn:
self.level = Level(fn)
self.title.set(self.level.title)
self.text.delete(1.0, END)
self.text.insert(1.0, self.level.text)
self.redraw()
def main():
root = Tk()
root.title('Math Island Level Editor')
app = Editor(root)
root.mainloop()
if __name__ == "__main__":
main()
|
{
"content_hash": "dcdf7e0c3701881ec7c32cfa332346de",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 114,
"avg_line_length": 34.112994350282484,
"alnum_prop": 0.5808214640609474,
"repo_name": "liam-middlebrook/math-island",
"id": "998ac72303ca069c68840dd8c9772b129a7dd83d",
"size": "6061",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "LevelEditor.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "31169"
}
],
"symlink_target": ""
}
|
import tests.periodicities.period_test as per
per.buildModel((15 , 'S' , 1600));
|
{
"content_hash": "b9c5637ca01dac9f1eedc1d46b9c2212",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 45,
"avg_line_length": 20.75,
"alnum_prop": 0.7108433734939759,
"repo_name": "antoinecarme/pyaf",
"id": "619a26496d1006e99d105e750acd4b08d90a3c63",
"size": "83",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/periodicities/Second/Cycle_Second_1600_S_15.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
}
|
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("SVC_rbf" , "BreastCancer" , "sqlite")
|
{
"content_hash": "cf5569bb9025cf20999236b7cec17706",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 72,
"avg_line_length": 33.75,
"alnum_prop": 0.7703703703703704,
"repo_name": "antoinecarme/sklearn2sql_heroku",
"id": "bda290b403ca0d1e51ae1e41a1f0fb98c25a5ff6",
"size": "135",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/classification/BreastCancer/ws_BreastCancer_SVC_rbf_sqlite_code_gen.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "507043"
},
{
"name": "Procfile",
"bytes": "37"
},
{
"name": "Python",
"bytes": "1021137"
},
{
"name": "R",
"bytes": "2521"
}
],
"symlink_target": ""
}
|
"""beanmachine-specific conversion code."""
from .inference_data import InferenceData
from .base import dict_to_dataset, requires
class BMConverter:
"""Encapsulate Bean Machine specific logic."""
def __init__(
self,
*,
sampler=None,
coords=None,
dims=None,
) -> None:
self.sampler = sampler
self.coords = coords
self.dims = dims
import beanmachine.ppl as bm
self.beanm = bm
if "posterior" in self.sampler.namespaces:
self.posterior = self.sampler.namespaces["posterior"].samples
if "posterior_predictive" in self.sampler.namespaces:
self.posterior_predictive = self.sampler.namespaces["posterior_predictive"].samples
if self.sampler.log_likelihoods is not None:
self.log_likelihoods = self.sampler.log_likelihoods
if self.sampler.observations is not None:
self.observations = self.sampler.observations
@requires("posterior")
def posterior_to_xarray(self):
"""Convert the posterior to an xarray dataset."""
data = {k: v.detach().cpu().numpy() for k, v in self.posterior.items()}
return dict_to_dataset(data, library=self.beanm, coords=self.coords, dims=self.dims)
@requires("posterior_predictive")
def posterior_predictive_to_xarray(self):
"""Convert posterior_predictive samples to xarray."""
data = {k: v.detach().cpu().numpy() for k, v in self.posterior_predictive.items()}
return dict_to_dataset(data, library=self.beanm, coords=self.coords, dims=self.dims)
@requires("log_likelihoods")
def log_likelihood_to_xarray(self):
data = {k: v.detach().cpu().numpy() for k, v in self.log_likelihoods.items()}
return dict_to_dataset(data, library=self.beanm, coords=self.coords, dims=self.dims)
@requires("observations")
def observed_data_to_xarray(self):
"""Convert observed data to xarray."""
data = {k: v.detach().cpu().numpy() for k, v in self.observations.items()}
return dict_to_dataset(
data, library=self.beanm, coords=self.coords, dims=self.dims, default_dims=[]
)
def to_inference_data(self):
"""Convert all available data to an InferenceData object."""
return InferenceData(
**{
"posterior": self.posterior_to_xarray(),
"posterior_predictive": self.posterior_predictive_to_xarray(),
"log_likelihood": self.log_likelihood_to_xarray(),
"observed_data": self.observed_data_to_xarray(),
}
)
def from_beanmachine(
sampler=None,
*,
coords=None,
dims=None,
):
"""Convert Bean Machine MonteCarloSamples object into an InferenceData object.
For a usage example read the
:ref:`Creating InferenceData section on from_beanmachine <creating_InferenceData>`
Parameters
----------
sampler : bm.MonteCarloSamples
Fitted MonteCarloSamples object from Bean Machine
coords : dict of {str : array-like}
Map of dimensions to coordinates
dims : dict of {str : list of str}
Map variable names to their coordinates
"""
return BMConverter(
sampler=sampler,
coords=coords,
dims=dims,
).to_inference_data()
|
{
"content_hash": "868f2d05547435d0265a72ef73fe44d0",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 95,
"avg_line_length": 33.74747474747475,
"alnum_prop": 0.6315474408859623,
"repo_name": "arviz-devs/arviz",
"id": "5247b1e86997358d55040adc7cc4f10851f9d0f5",
"size": "3341",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "arviz/data/io_beanmachine.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5900"
},
{
"name": "Dockerfile",
"bytes": "1771"
},
{
"name": "HTML",
"bytes": "1343"
},
{
"name": "Jupyter Notebook",
"bytes": "641262"
},
{
"name": "Makefile",
"bytes": "688"
},
{
"name": "PowerShell",
"bytes": "2668"
},
{
"name": "Python",
"bytes": "1634423"
},
{
"name": "R",
"bytes": "248"
},
{
"name": "Shell",
"bytes": "7276"
},
{
"name": "TeX",
"bytes": "24620"
}
],
"symlink_target": ""
}
|
from .base import TestBase
class TestStore(TestBase):
async def test_store(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_select(b'INBOX')
transport.push_readline(
b'store1 STORE * +FLAGS (\\Seen)\r\n')
transport.push_write(
b'* 4 FETCH (FLAGS (\\Recent \\Seen))\r\n'
b'store1 OK STORE completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_store_silent(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_select(b'INBOX')
transport.push_readline(
b'store1 STORE * +FLAGS.SILENT (\\Seen)\r\n')
transport.push_write(
b'store1 OK STORE completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_uid_store(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_select(b'INBOX')
transport.push_readline(
b'store1 UID STORE * +FLAGS (\\Seen)\r\n')
transport.push_write(
b'* 4 FETCH (FLAGS (\\Recent \\Seen) UID 104)\r\n'
b'store1 OK UID STORE completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_store_add_recent(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_select(b'INBOX')
transport.push_readline(
b'store1 STORE 1 +FLAGS (\\Recent)\r\n')
transport.push_write(
b'* 1 FETCH (FLAGS (\\Seen))\r\n'
b'store1 OK STORE completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_store_remove_recent(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_select(b'INBOX')
transport.push_readline(
b'store1 STORE * -FLAGS (\\Recent)\r\n')
transport.push_write(
b'* 4 FETCH (FLAGS (\\Recent))\r\n'
b'store1 OK STORE completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_store_set_non_recent(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_select(b'INBOX')
transport.push_readline(
b'store1 STORE * FLAGS ()\r\n')
transport.push_write(
b'* 4 FETCH (FLAGS (\\Recent))\r\n'
b'store1 OK STORE completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_store_invalid(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_select(b'INBOX', 4, 1)
transport.push_readline(
b'store1 STORE * +FLAGS (\\Invalid)\r\n')
transport.push_write(
b'* 4 FETCH (FLAGS (\\Recent))\r\n'
b'store1 OK STORE completed.\r\n')
transport.push_logout()
await self.run(transport)
|
{
"content_hash": "db38297312ab081eedf10320ea4738ff",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 62,
"avg_line_length": 36.96551724137931,
"alnum_prop": 0.5892412935323383,
"repo_name": "icgood/pymap",
"id": "d59975a87c731a461c7714c2fd02e9a7a6b7a38e",
"size": "3217",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "test/server/test_store.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "390"
},
{
"name": "Lua",
"bytes": "15194"
},
{
"name": "Python",
"bytes": "857930"
},
{
"name": "Shell",
"bytes": "389"
}
],
"symlink_target": ""
}
|
'''
Created on Mar 28, 2015
Copyright 2015, Institute for Systems Biology.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@author: michael
'''
import logging
import urllib
import util
def process_metadata_current(config, run_dir, log_name):
"""
return type:
barcode2term2value: for each sample barcode, finds the AliquotUUID and CENTER_CODE values and sets it as the DataCenterCode field
"""
log = logging.getLogger(log_name)
log.info('start processing metadata.current.txt')
barcode2term2value = {}
metadataURL = config['downloads']['metadata_current']
try:
# metadata = util.getURLData(metadataURL, 'metadata.current.txt', log)
metadata = urllib.urlopen(metadataURL)
contents = metadata.read()
lines = contents.split('\n')
util.post_run_file(run_dir, 'metadata.current.txt', contents)
except Exception as e:
log.exception('problem fetching metadata.current.txt')
if 'test' == config['mode']:
metadata = open('metadata.current.txt')
lines = metadata.read()
lines = lines.split('\n')
log.warning('using local copy for testing purposes')
else:
raise e
try:
column2term = config['metadata_locations']['metadata.current.txt']
headers = lines[0].split('\t')
column2index = {}
for column in column2term:
column2index[column] = headers.index(column)
except Exception as e:
log.exception('problem parsing metadata.current.txt header: %s' % (headers))
raise e
try:
for line in lines[1:]:
if not line:
continue
fields = line.split('\t')
term2value = {}
for column, term in column2term.iteritems():
term2value[term] = fields[column2index[column]]
barcode2term2value[fields[1]] = term2value
except Exception as e:
log.exception('problem parsing metadata.current.txt: %s' % (line))
raise e
log.info('finished processing metadata.current.txt')
return barcode2term2value
|
{
"content_hash": "c012c987e5d4d5be0654997b2f042867",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 137,
"avg_line_length": 36.04,
"alnum_prop": 0.6337402885682575,
"repo_name": "isb-cgc/ISB-CGC-data-proc",
"id": "0e91b2eee7c210b827a3f532e9b9eff12ce5f334",
"size": "2703",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "data_upload/util/process_metadata_current.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "6576"
},
{
"name": "Python",
"bytes": "1169886"
},
{
"name": "Shell",
"bytes": "1068"
}
],
"symlink_target": ""
}
|
"""Generates .rst files for .py files encountered in the SRC_DIR
:author: Eric Vasquez
:contact: eric.vasquez@calxeda.com
:copyright: (c) 2012-2013, Calxeda Inc.
"""
import os
import sys
import glob
# #
# The actual NAME of your API, as if you were to import it.
API_NAME = 'cxmanage_api'
# #
# SRC_DIR is one directory level up from the docs dir
# assuming a file structure like:
# +--API/
# |---src_file1.py
# |---src_fileN.py
# +---docs/
# |--Makefile
# +--generate_api_rst.py
SRC_DIR = os.path.dirname(os.path.abspath('.'))
RST_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'source')
# #
# Any files you don't want docs generated for (no extension needed)
#
# EXAMPLE: To exclude foo.py and bar.py ...
# BLACK_LIST = ['foo', 'bar']
#
BLACKLIST = ['__init__']
# #
# Add any custom title you want for a specific python source file.
#
# EXAMPLE:
# TITLES = {'some_obscure_script_name' : 'Some Less Obscure Title'}
#
TITLES = {
'simg' : 'SIMG',
'crc32' : 'CRC32',
'ubootenv' : 'U-Boot Environment',
}
def get_source(source_dir):
"""Iterates recursively over dirs and gets all of the <filename>.py files.
:param source_dir: The (absolute) path to the source directory.
:type source_dir: string
:return: A mapping of file names to filename Titles.
:rtype: dictionary
"""
if (not source_dir.endswith('/')):
source_dir += '/'
source = {API_NAME : {}}
paths = glob.glob(os.path.join(source_dir, '*.py'))
for path in paths:
f_path, _ = os.path.splitext(path)
f_name = f_path.split(source_dir)[1]
if (not f_name in BLACKLIST):
if TITLES.has_key(f_name):
source[API_NAME][f_name] = TITLES[f_name]
else:
source[API_NAME][f_name] = f_name.title()
else:
print 'Skipping docgen for source file: %s' % path
return source
def parse_source(src_file):
"""Parses a given source file to get class and function names.
:param src_file: A file path. (abspath)
:type src_file: string
:return: a dictionary mapping class to methods and a list of functions
:rtype: dictionary
"""
def _get_object_name(line):
"""Takes a class, function or method declaration and gets the name."""
name = line.split()[1].split('(')[0].strip()
return name.rstrip(':')
#
# Parse Source
#
classes, functions = {}, []
with open(src_file, 'r') as file_:
class_ = None
lines = file_.readlines()
for line in lines:
if (line.startswith('class ')):
name = _get_object_name(line)
class_ = name
classes[name] = []
elif (line.startswith(' def ') and line.count('(')):
if (class_):
name = _get_object_name(line)
if (not name.startswith('_')):
classes[class_].append(name)
elif (line.startswith('def ') and line.count('(')):
functions.append(_get_object_name(line))
for class_name, function_names in classes.items():
classes[class_name] = sorted(list(set(function_names)))
return classes, sorted(list(set(functions)))
def main():
"""Entry point for this script."""
src_files = get_source(SRC_DIR)
for package, module in src_files.items():
for module_name, module_title in module.items():
doc = os.path.join(RST_DIR, '%s.rst' % module_name)
with open('%s' % doc, 'w') as rst_file:
print 'Generating Sphinx Docs for: %s' % doc
py_src = os.path.join(SRC_DIR, '%s.py' % module_name)
classes, functions = parse_source(py_src)
rst_file.write(module_title + '\n')
rst_file.write('_' * len(module_title) + '\n\n')
if (len(functions) > 0):
rst_file.write('.. currentmodule:: %s.%s\n\n' %
(package, module_name))
rst_file.write('.. autosummary::\n')
rst_file.write(' :nosignatures:\n\n')
for func in functions:
rst_file.write(' %s\n' % func)
rst_file.write('\n')
if (classes != {}):
rst_file.write('.. currentmodule:: %s.%s\n\n' %
(package, module_name))
rst_file.write('.. autosummary::\n')
rst_file.write(' :nosignatures:\n\n')
for class_name in classes.keys():
rst_file.write(' %s\n' % class_name)
rst_file.write('\n')
for class_name, function_list in classes.items():
if (len(function_list) > 0):
rst_file.write(' .. automethod::\n')
for function_name in function_list:
rst_file.write(' %s.%s\n' %
(class_name, function_name))
rst_file.write('\n\n')
for func in functions:
rst_file.write('.. autofunction:: %s\n' % func)
for class_name in classes.keys():
rst_file.write('.. autoclass:: %s\n' % class_name)
rst_file.write(' :members:\n')
if (module_name not in BLACKLIST):
rst_file.write(' :inherited-members:\n')
rst_file.write(' :show-inheritance:\n')
rst_file.write('\n')
# Go!
if __name__ == '__main__':
sys.exit(main())
# End of file: ./docs/generate_api_rst.py
|
{
"content_hash": "9e30d3172c3ec029ec4a0dd9d3a17726",
"timestamp": "",
"source": "github",
"line_count": 172,
"max_line_length": 78,
"avg_line_length": 34.122093023255815,
"alnum_prop": 0.5091156926222525,
"repo_name": "SilverLiningSystems/cxmanage-test",
"id": "776dabe9bfbec2db0d6d96c685bc73816f17165d",
"size": "5869",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "cxmanage_api/docs/generate_api_rst.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "5672"
},
{
"name": "Python",
"bytes": "416796"
},
{
"name": "Shell",
"bytes": "2366"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import os
BROKER_TRANSPORT = "memory"
#: Don't want log output when running suite.
CELERYD_HIJACK_ROOT_LOGGER = False
CELERY_RESULT_BACKEND = "cache"
CELERY_CACHE_BACKEND = "memory"
CELERY_RESULT_DBURI = "sqlite:///test.db"
CELERY_SEND_TASK_ERROR_EMAILS = False
CELERY_DEFAULT_QUEUE = "testcelery"
CELERY_DEFAULT_EXCHANGE = "testcelery"
CELERY_DEFAULT_ROUTING_KEY = "testcelery"
CELERY_QUEUES = {"testcelery": {"binding_key": "testcelery"}}
CELERYD_LOG_COLOR = False
# Tyrant results tests (only executed if installed and running)
TT_HOST = os.environ.get("TT_HOST") or "localhost"
TT_PORT = int(os.environ.get("TT_PORT") or 1978)
# Redis results tests (only executed if installed and running)
CELERY_REDIS_HOST = os.environ.get("REDIS_HOST") or "localhost"
CELERY_REDIS_PORT = int(os.environ.get("REDIS_PORT") or 6379)
CELERY_REDIS_DB = os.environ.get("REDIS_DB") or 0
CELERY_REDIS_PASSWORD = os.environ.get("REDIS_PASSWORD")
# Mongo results tests (only executed if installed and running)
CELERY_MONGODB_BACKEND_SETTINGS = {
"host": os.environ.get("MONGO_HOST") or "localhost",
"port": os.environ.get("MONGO_PORT") or 27017,
"database": os.environ.get("MONGO_DB") or "celery_unittests",
"taskmeta_collection": os.environ.get("MONGO_TASKMETA_COLLECTION") or
"taskmeta_collection",
}
if os.environ.get("MONGO_USER"):
CELERY_MONGODB_BACKEND_SETTINGS["user"] = os.environ.get("MONGO_USER")
if os.environ.get("MONGO_PASSWORD"):
CELERY_MONGODB_BACKEND_SETTINGS["password"] = \
os.environ.get("MONGO_PASSWORD")
|
{
"content_hash": "679b7d23a8896695fba9ee02a3ff0f96",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 74,
"avg_line_length": 36.20454545454545,
"alnum_prop": 0.7206528562460766,
"repo_name": "kumar303/rockit",
"id": "db53989257df5b6290c1580abe39fb6b8c70089a",
"size": "1593",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vendor-local/celery/tests/config.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "4587"
},
{
"name": "Puppet",
"bytes": "6677"
},
{
"name": "Python",
"bytes": "4139254"
},
{
"name": "Ruby",
"bytes": "1462"
},
{
"name": "Shell",
"bytes": "3065"
}
],
"symlink_target": ""
}
|
from wtforms.validators import ValidationError
class Unique(object):
# Shout out to exploreflask.com for this approach
def __init__(self, model, field, message=u'This element already exists.'):
self.model = model
self.field = field
self.message = message
def __call__(self, form, field):
check = self.model.query.filter(self.field == field.data).first()
if check:
if form.is_edit:
pass
else:
raise ValidationError(self.message)
|
{
"content_hash": "4f56d2d90df1b0b9cac3abcd6a7acb8a",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 78,
"avg_line_length": 31.705882352941178,
"alnum_prop": 0.601113172541744,
"repo_name": "chriswilley/leash",
"id": "eb157631ec166d6665a3828e395f95ab47697d59",
"size": "539",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "leash/leash/validators.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13416"
},
{
"name": "HTML",
"bytes": "61525"
},
{
"name": "JavaScript",
"bytes": "15462"
},
{
"name": "Python",
"bytes": "89225"
}
],
"symlink_target": ""
}
|
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1NetworkPolicySpec(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, ingress=None, pod_selector=None):
"""
V1beta1NetworkPolicySpec - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'ingress': 'list[V1beta1NetworkPolicyIngressRule]',
'pod_selector': 'V1LabelSelector'
}
self.attribute_map = {
'ingress': 'ingress',
'pod_selector': 'podSelector'
}
self._ingress = ingress
self._pod_selector = pod_selector
@property
def ingress(self):
"""
Gets the ingress of this V1beta1NetworkPolicySpec.
List of ingress rules to be applied to the selected pods. Traffic is allowed to a pod if namespace.networkPolicy.ingress.isolation is undefined and cluster policy allows it, OR if the traffic source is the pod's local node, OR if the traffic matches at least one ingress rule across all of the NetworkPolicy objects whose podSelector matches the pod. If this field is empty then this NetworkPolicy does not affect ingress isolation. If this field is present and contains at least one rule, this policy allows any traffic which matches at least one of the ingress rules in this list.
:return: The ingress of this V1beta1NetworkPolicySpec.
:rtype: list[V1beta1NetworkPolicyIngressRule]
"""
return self._ingress
@ingress.setter
def ingress(self, ingress):
"""
Sets the ingress of this V1beta1NetworkPolicySpec.
List of ingress rules to be applied to the selected pods. Traffic is allowed to a pod if namespace.networkPolicy.ingress.isolation is undefined and cluster policy allows it, OR if the traffic source is the pod's local node, OR if the traffic matches at least one ingress rule across all of the NetworkPolicy objects whose podSelector matches the pod. If this field is empty then this NetworkPolicy does not affect ingress isolation. If this field is present and contains at least one rule, this policy allows any traffic which matches at least one of the ingress rules in this list.
:param ingress: The ingress of this V1beta1NetworkPolicySpec.
:type: list[V1beta1NetworkPolicyIngressRule]
"""
self._ingress = ingress
@property
def pod_selector(self):
"""
Gets the pod_selector of this V1beta1NetworkPolicySpec.
Selects the pods to which this NetworkPolicy object applies. The array of ingress rules is applied to any pods selected by this field. Multiple network policies can select the same set of pods. In this case, the ingress rules for each are combined additively. This field is NOT optional and follows standard label selector semantics. An empty podSelector matches all pods in this namespace.
:return: The pod_selector of this V1beta1NetworkPolicySpec.
:rtype: V1LabelSelector
"""
return self._pod_selector
@pod_selector.setter
def pod_selector(self, pod_selector):
"""
Sets the pod_selector of this V1beta1NetworkPolicySpec.
Selects the pods to which this NetworkPolicy object applies. The array of ingress rules is applied to any pods selected by this field. Multiple network policies can select the same set of pods. In this case, the ingress rules for each are combined additively. This field is NOT optional and follows standard label selector semantics. An empty podSelector matches all pods in this namespace.
:param pod_selector: The pod_selector of this V1beta1NetworkPolicySpec.
:type: V1LabelSelector
"""
if pod_selector is None:
raise ValueError("Invalid value for `pod_selector`, must not be `None`")
self._pod_selector = pod_selector
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
{
"content_hash": "9b89cda81fc29d1a939dcd468310189f",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 590,
"avg_line_length": 41.73571428571429,
"alnum_prop": 0.6404244395002567,
"repo_name": "skuda/client-python",
"id": "cc4debca787a4718cc81519e149c8b79730309e3",
"size": "5860",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubernetes/client/models/v1beta1_network_policy_spec.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5907789"
},
{
"name": "Shell",
"bytes": "8195"
}
],
"symlink_target": ""
}
|
"""Utility functions for writing Webathena-based APIs in Bottle"""
import base64
import ccaches
import json
import moira
import os
import tempfile
from bottle import request
MOIRA_TIME_FORMAT = "%d-%b-%Y %H:%M:%S"
def webathena(function):
"""
A decorator that loads a Kerberos ticket from the base64 encoded "webathena"
url paramater and stores it in a ccache for the duration of request
processing. This allows programs and libraries such as python-moira to
autheticate.
Selected code borrowed from davidben's shellinabox example in the Webathena
source tree. https://github.com/davidben/webathena.
"""
def wrapped(*args, **kwargs):
# Extract credential from request
ticket_data = request.query["webathena"]
if not ticket_data:
raise KeyError("Missing Webathena ticket!")
credential = json.loads(base64.b64decode(ticket_data))
with tempfile.NamedTemporaryFile(prefix="webathena_ccache_") as ccache:
# Write credentials to a temporary krb5 ccache
ccache.write(ccaches.make_ccache(credential))
ccache.flush()
os.environ["KRB5CCNAME"] = ccache.name
# Run the inner function while in the with..as; return
return function(*args, **kwargs)
return wrapped
def moira_auth(client_name):
"""
A decorator that opens an authenticated Moira session before the wrapped
function is executed. Goes well with @webathena, above.
"""
def wrapper(function):
def wrapped(*args, **kwargs):
moira.connect()
moira.auth(client_name)
return function(*args, **kwargs)
return wrapped
return wrapper
def json_api(function):
"""
A decorator that automatically JSON-encodes output.
"""
def wrapped(*args, **kwargs):
result = function(*args, **kwargs)
return json.dumps(result)
return wrapped
|
{
"content_hash": "3823c2d04b39cd1e5b19d34508791251",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 80,
"avg_line_length": 31.983606557377048,
"alnum_prop": 0.6617119425935418,
"repo_name": "btidor/mailto",
"id": "25d22cbd4be6f794b5fb2224b66ff51036a66295",
"size": "1951",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bottle_webathena.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "499"
},
{
"name": "CSS",
"bytes": "924"
},
{
"name": "HTML",
"bytes": "4132"
},
{
"name": "JavaScript",
"bytes": "31516"
},
{
"name": "Python",
"bytes": "12724"
}
],
"symlink_target": ""
}
|
from unittest import mock
from neutron_lib.services.trunk import constants
from neutron.services.trunk.seg_types import validators
from neutron.tests import base
class ValidatorsTestCase(base.BaseTestCase):
def test_add_validator_raises_keyerror_on_redefinition(self):
self.assertRaises(KeyError,
validators.add_validator,
constants.SEGMENTATION_TYPE_VLAN, mock.ANY)
def test_add_validator_add_new_type(self):
validators.add_validator('foo', lambda: None)
self.assertIn('foo', validators._supported)
def test_get_validator(self):
self.assertIsNotNone(validators.get_validator(
constants.SEGMENTATION_TYPE_VLAN))
def test_get_validator_raises_keyerror_on_missing_validator(self):
self.assertRaises(KeyError,
validators.get_validator, 'my_random_seg_type')
|
{
"content_hash": "d916570114286a984f901b60b4d49a5c",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 73,
"avg_line_length": 34.96153846153846,
"alnum_prop": 0.6831683168316832,
"repo_name": "mahak/neutron",
"id": "7dd8b3d79e8f0a304552b2551cd502f71af69832",
"size": "1455",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "neutron/tests/unit/services/trunk/seg_types/test_validators.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jinja",
"bytes": "2773"
},
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "15942116"
},
{
"name": "Ruby",
"bytes": "1257"
},
{
"name": "Shell",
"bytes": "83270"
}
],
"symlink_target": ""
}
|
from scrapy import Spider
from scrapy.exceptions import DropItem
from scrapy import settings
from scrapy.conf import settings
import json
import csv
import codecs
from scrapy.xlib.pydispatch import dispatcher
from scrapy import signals
from scrapy.exporters import JsonLinesItemExporter
from scrapy.exporters import CsvItemExporter
from datetime import datetime
class JsonLinesExportPipeline(object):
nbLines = 0
nbFiles = 0
def __init__(self):
dispatcher.connect(self.spider_opened, signals.spider_opened)
dispatcher.connect(self.spider_closed, signals.spider_closed)
self.files = {}
def spider_opened(self, spider):
i = datetime.now()
file = codecs.open('%s_items_%s_%s.json' % (spider.name, self.nbFiles, i.strftime('%Y-%m-%dT%H-%M-%S')), 'w+b')
self.files[spider] = file
self.exporter = JsonLinesItemExporter(file, ensure_ascii=False)
self.exporter.start_exporting()
def process_item(self, item, spider):
if self.nbLines >= 10000:
self.nbFiles = self.nbFiles + 1
self.nbLines = 0
i = datetime.now()
file = codecs.open('%s_items_%s_%s.json' % (spider.name, self.nbFiles, i.strftime('%Y-%m-%dT%H-%M-%S')), 'w+b')
self.files[spider] = file
self.exporter = JsonLinesItemExporter(file, ensure_ascii=False)
else:
self.nbLines = self.nbLines + 1
self.exporter.export_item(item)
return item
def spider_closed(self, spider):
self.exporter.finish_exporting()
file = self.files.pop(spider)
file.close()
class CsvExportPipeline(object):
nbLines = 0
nbFiles = 0
def __init__(self):
dispatcher.connect(self.spider_opened, signals.spider_opened)
dispatcher.connect(self.spider_closed, signals.spider_closed)
#self.fields_to_export = [
# 'evo'
#]
self.files = {}
def spider_opened(self, spider):
file = codecs.open('%s_items_%s_%s.csv' % (spider.name, self.nbFiles, datetime.now().strftime('%Y-%m-%dT%H-%M-%S')), 'w+b')
self.files[spider] = file
self.csv_exporter = CsvItemExporter(file, quoting=csv.QUOTE_ALL)
#self.exporter.fields_to_export = ['names','stars','subjects','reviews']
self.csv_exporter.start_exporting()
def process_item(self, item, spider):
if self.nbLines >= 10000:
self.nbFiles = self.nbFiles + 1
self.nbLines = 0
file = codecs.open('%s_items_%s_%s.csv' % (spider.name, self.nbFiles, datetime.now().strftime('%Y-%m-%dT%H-%M-%S')), 'w+b')
self.files[spider] = file
self.csv_exporter = CsvItemExporter(file, quoting=csv.QUOTE_ALL)
else:
self.nbLines = self.nbLines + 1
self.csv_exporter.export_item(item)
return item
def spider_closed(self, spider):
self.csv_exporter.finish_exporting()
file = self.files.pop(spider)
file.close()
"""
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
if item['url'] in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(item['url'])
return item
"""
|
{
"content_hash": "7eab700e15eb2babc5754b2fccf2b575",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 135,
"avg_line_length": 33.77777777777778,
"alnum_prop": 0.6148325358851675,
"repo_name": "Gachette/ProjetImmo",
"id": "7d7c5dd0f965634add966bae2c3b91511bffe42a",
"size": "3537",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Scraping/scrapybot/scrapybot/pipelines.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "94110"
}
],
"symlink_target": ""
}
|
"""This example gets all text ads for a given ad group. To add an ad, run
add_text_ads.py.
Tags: AdGroupAdService.get
Api: AdWordsOnly
"""
__author__ = 'api.kwinter@gmail.com (Kevin Winter)'
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import AdWordsClient
PAGE_SIZE = 500
ad_group_id = 'INSERT_AD_GROUP_ID_HERE'
def main(client, ad_group_id):
# Initialize appropriate service.
ad_group_ad_service = client.GetAdGroupAdService(
'https://adwords-sandbox.google.com', 'v201109_1')
# Construct selector and get all ads for a given ad group.
offset = 0
selector = {
'fields': ['Id', 'AdGroupId', 'Status'],
'predicates': [
{
'field': 'AdGroupId',
'operator': 'EQUALS',
'values': [ad_group_id]
},
{
'field': 'AdType',
'operator': 'EQUALS',
'values': ['TEXT_AD']
}
],
'paging': {
'startIndex': str(offset),
'numberResults': str(PAGE_SIZE)
}
}
more_pages = True
while more_pages:
page = ad_group_ad_service.Get(selector)[0]
# Display results.
if 'entries' in page:
for ad in page['entries']:
print ('Ad with id \'%s\', status \'%s\', and of type \'%s\' was found.'
% (ad['ad']['id'], ad['status'], ad['ad']['Ad_Type']))
else:
print 'No ads were found.'
offset += PAGE_SIZE
selector['paging']['startIndex'] = str(offset)
more_pages = offset < int(page['totalNumEntries'])
print
print ('Usage: %s units, %s operations' % (client.GetUnits(),
client.GetOperations()))
if __name__ == '__main__':
# Initialize client object.
client = AdWordsClient(path=os.path.join('..', '..', '..', '..', '..'))
main(client, ad_group_id)
|
{
"content_hash": "7ec4483675849429a775892e56004dbc",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 80,
"avg_line_length": 27.055555555555557,
"alnum_prop": 0.5487679671457906,
"repo_name": "nearlyfreeapps/python-googleadwords",
"id": "9338e0248591504927bc9192339aabd9e2ac442d",
"size": "2566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/adspygoogle/adwords/v201109_1/basic_operations/get_text_ads.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "10581"
},
{
"name": "Python",
"bytes": "1394721"
}
],
"symlink_target": ""
}
|
option_doctestglob = "README.txt"
|
{
"content_hash": "0d73ba7ec3671a38df3d7d7c83fac67a",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 33,
"avg_line_length": 34,
"alnum_prop": 0.7647058823529411,
"repo_name": "RonnyPfannschmidt/iniconfig",
"id": "f122ddde29a26a092d4cc3104d2d7137cdcb6789",
"size": "34",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "testing/conftest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14244"
}
],
"symlink_target": ""
}
|
import sys
from collections import defaultdict
# rasmus libs
from rasmus import util
# compbio libs
from . import fasta, seqlib
from seqlib import *
#=============================================================================
# Alignment functions
def new_align(aln=None):
"""Makes a new alignment object based on the given object
given return
----- ------
dict FastaDict
other other
"""
if aln is None:
return fasta.FastaDict()
elif isinstance(aln, SeqDict):
return type(aln)()
else:
return fasta.FastaDict()
def mapalign(aln, keyfunc=lambda x: x, valfunc=lambda x: x):
"""Maps the keys and values of an alignment"""
aln2 = new_align(aln)
for key, val in aln.iteritems():
aln2[keyfunc(key)] = valfunc(val)
return aln2
def subalign(aln, cols):
"""Returns an alignment with a subset of the columns (cols)"""
return mapalign(aln, valfunc=lambda x: "".join(util.mget(x, cols)))
def remove_empty_columns(aln):
"""
Removes any column from an alignment 'aln' that contains only gaps
A new alignment is returned
"""
ind = []
seqs = aln.values()
for i in range(aln.alignlen()):
for seq in seqs:
if seq[i] != "-":
ind.append(i)
break
return subalign(aln, ind)
def remove_gapped_columns(aln):
"""Removes any column form an alignment 'aln' that contains a gap
A new alignment is returned
"""
cols = zip(* aln.values())
ind = util.find(lambda col: "-" not in col, cols)
return subalign(aln, ind)
def require_nseqs(aln, n):
"""
Keep only columns with atleast 'n' non gapped sequences
"""
seqs = aln.values()
ind = [i for i in range(aln.alignlen())
if sum(1 for seq in seqs if seq[i] != "-") >= n]
return subalign(aln, ind)
def get_seq_overlap(seq1, seq2):
"""
Count number of overlapping bases between two gapped sequences
"""
overlap = 0
for i in range(len(seq1)):
if seq1[i] != "-" and seq2[i] != "-":
overlap += 1
return overlap
def calc_conservation_string(aln):
"""Returns a string of stars representing the conservation of an alignment"""
percids = calc_conservation(aln)
# find identity positions
identity = ""
for pid in percids:
if pid == 1:
identity += "*"
elif pid > .5:
identity += "."
else:
identity += " "
return identity
def calc_conservation(aln):
"""Returns a list of percent matching in each column of an alignment"""
length = len(aln.values()[0])
seqs = aln.values()
percids = []
# find identity positions
identity = ""
for i in xrange(length):
chars = util.hist_dict(util.cget(seqs, i))
if "-" in chars: del chars["-"]
if len(chars) == 0:
percids.append(0.0)
else:
pid = max(chars.values()) / float(len(aln))
percids.append(pid)
return percids
def print_align(aln, seqwidth = 59, spacing=2, extra=fasta.FastaDict(),
out=sys.stdout, order=None):
"""Pretty print an alignment"""
if order == None:
order = aln.keys()
namewidth = max(map(len, order)) + spacing
def mkname(name, namewidth):
name2 = name[:namewidth]
name2 += " " * (namewidth - len(name2))
return name2
identity = calc_conservation_string(aln)
# print alignment
for i in xrange(0, len(aln.values()[0]), seqwidth):
# print sequences
for name in order:
print >>out, "%s %s" % (mkname(name, namewidth),
aln[name][i:i+seqwidth])
# print extra
for name in extra.keys():
print >>out, "%s %s" % (mkname(name, namewidth),
extra[name][i:i+seqwidth])
# print identity
print >>out, (" "*namewidth) + " " + identity[i:i+seqwidth]
print >>out
def revtranslate_align(aaseqs, dnaseqs, check=False, trim=False):
"""Reverse translates aminoacid alignment into DNA alignment
Must supply original ungapped DNA.
"""
align = new_align(aaseqs)
for name, seq in aaseqs.iteritems():
try:
dna = dnaseqs[name].upper()
dnalen = len(dna)
aalen = sum(int(a != "-") for a in seq)
if len(dna) != aalen * 3:
if trim:
# make dna a multiple of three
dna = dna[:(len(dna) // 3) * 3]
if len(dna) > aalen * 3:
# trim dna
dna = dna[:aalen*3]
else:
# trim peptide to match nucleotide
j = 0
for i in xrange(len(seq)):
if seq[i] != '-':
j += 1
if j > len(dna) // 3:
seq = seq[:i] + "-" * (len(seq) - i)
break
aalen2 = sum(int(a != "-") for a in seq)
assert len(dna) == aalen2 * 3, (
len(dna), aalen2 * 3)
util.logger("trim dna (%d) and pep (%d)" %
(dnalen - len(dna), aalen - aalen2))
else:
# is last residue X?
for i in xrange(len(seq)-1, -1, -1):
if seq[i] == "-":
continue
if seq[i] == "X":
# repair
seq = seq[:i] + "-" * (len(seq)-i)
dna = dna[:-3] #-(len(dna) % 3)]
break
align[name] = revtranslate(seq, dna, check=check)
except TranslateError, e:
raise
return align
#=============================================================================
# four fold degeneracy
def mark_codon_pos(seq, pos=0):
"""
return the codon position for each base in a gapped sequence
codon
ATG
012
gaps are given codon pos -1
Ns are counted as bases
"""
codons = []
for base in seq:
if base != "-":
codons.append(pos)
pos = (pos + 1) % 3
else:
codons.append(-1)
return codons
def make_codon_pos_align(aln):
"""Get the codon position of every base in an alignment"""
def func(seq):
dct = {-1: "-",
0: "0",
1: "1",
2: "2"}
return "".join(util.mget(dct, mark_codon_pos(seq)))
return mapalign(aln, valfunc=func)
def find_aligned_codons(aln):
"""Returns the columns indices of the alignment that represent aligned
codons.
"""
ind = range(aln.alignlen())
# throw out codons with non mod 3 gaps
ind2 = []
for i in range(0, aln.alignlen(), 3):
bad = False
for key, val in aln.iteritems():
codon = val[i:i+3]
if "-" in codon and codon != "---":
bad = True
break
if not bad:
ind2.extend([i, i+1, i+2])
return ind2
def filter_aligned_codons(aln):
"""filters an alignment for only aligned codons"""
ind = find_align_codons(aln)
return subalign(aln, ind)
def find_four_fold(aln):
"""Returns index of all columns in alignment that are completely
fourfold degenerate
Assumes that columns are already filtered for aligned codons
"""
# create peptide alignment
pepAln = mapalign(aln, valfunc=translate)
# find peptide conservation
pepcons = []
pep = []
for i in xrange(pepAln.alignlen()):
# get a column from the peptide alignment
col = [seq[i] for seq in pepAln.itervalues()]
# compute the histogram of the column.
# ignore gaps '-' and non-translated 'X'
hist = util.hist_dict(col)
if "-" in hist:
del hist["-"]
if "X" in hist:
del hist["X"]
# column is conserved if only one AA appears
if len(hist) == 1:
pepcons.append(True)
pep.append(hist.keys()[0])
else:
pepcons.append(False)
pep.append("X")
# find four-fold sites in conserved peptides
ind = []
for i in range(0, len(aln.values()[0]), 3):
# process only those columns that are conserved at the peptide level
if pepcons[i//3]:
degen = AA_DEGEN[pep[i//3]]
for j in range(3):
if degen[j] == 4:
ind.append(i+j)
return ind
def filter_four_fold(aln):
"""returns an alignment of only four-fold degenerate sites from an
alignment of coding sequences
This function performs the following steps:
1. remove all codon columns that don't have 0 or 3 gaps
2. keep all codon columns that code for identical AA
3. if the codon column codes for a 4D AA, then keep its 3rd position
"""
aln_codons = filter_align_codons(aln)
ind = find_four_fold(aln_codons)
return subalign(aln_codons, ind)
def calc_four_fold_dist_matrix(aln):
names = aln.keys()
mat = []
# calc upper triangular
for i in range(len(names)):
mat.append([0.0] * (i+1))
for j in range(i+1, len(names)):
ind = find_four_fold(aln.get([names[i], names[j]]))
mismatches = 0
for k in ind:
if aln[names[i]][k] != aln[names[j]][k]:
mismatches += 1
if len(ind) == 0:
mat[-1].append(1.0)
else:
mat[-1].append(mismatches / float(len(ind)))
# make symmetric
for j in range(len(names)):
for i in range(j):
mat[j][i] = mat[i][j]
return mat
def find_degen(aln):
"""Determine the degeneracy of each column in an alignment"""
codon_ind = find_align_codons(aln)
aln2 = subalign(aln, codon_ind)
pep_aln = mapalign(aln2, valfunc=translate)
pep = pep_aln.values()[0]
identies = calc_conservation(pep_aln)
degens = [-1] * aln.alignlen()
for i in range(0, len(codon_ind), 3):
if pep[i/3] == "X":
continue
degen = AA_DEGEN[pep[i/3]]
if identies[i/3] == 1.0:
for j in range(3):
degens[codon_ind[i+j]] = degen[j]
return degens
def make_degen_str(aln):
"""Returns a string containing the degeneracy for each column
in an alignment
"""
degens = find_degen(aln)
degenmap = {-1: " ",
0: "0",
1: "1",
2: "2",
3: "3",
4: "4"}
return "".join(util.mget(degenmap, degens))
def print_degen(aln, **args):
"""Pretty print an alignment with its degeneracy for each column"""
extra = fasta.FastaDict()
extra["DEGEN"] = make_degen_str(aln)
print_align(aln, extra=extra, **args)
#=============================================================================
# Position Specific Scoring Matrix (PSSM)
def align2pssm(aln, pseudocounts = {}):
pssm = []
denom = float(len(aln)) + sum(pseudocounts.values())
for i in xrange(aln.alignlen()):
freqs = defaultdict(lambda: 0)
for j in xrange(len(aln)):
freqs[aln[j][i]] += 1
for key in pseudocounts:
freqs[key] += pseudocounts[key]
for key in freqs:
freqs[key] = math.log(freqs[key] / denom, 2)
pssm.append(freqs)
return pssm
def pssmSeq(pssm, seq):
score = 0.0
for i in xrange(len(seq)):
score += pssm[i][seq[i]]
return score
#=============================================================================
# Coordinate conversions
#
# Coordinate systems
#
# 1. local
# 01234567
# ATGCTGCG
#
# 2. align
# 012222345567
# ATG---CTG-CG
#
# 3. global
# coordinate on chromosome on positive strand
#
# There should only be two kinds of indexing
# 1. 0-based, end exclusive (local/align coordinates)
# 2. 1-based, end inclusive (global coordinates)
#
class CoordConverter (object):
"""Converts between coordinate systems on a gapped sequence"""
def __init__(self, seq):
self.local2alignLookup = local2align(seq)
self.align2localLookup = align2local(seq)
def local2align(self, i, clamp=False):
if clamp:
return self.local2alignLookup[int(util.clamp(i, 0,
len(self.local2alignLookup)-1))]
else:
return self.local2alignLookup[i]
def align2local(self, i, clamp=False):
if clamp:
return self.align2localLookup[int(util.clamp(i, 0,
len(self.align2localLookup)-1))]
else:
return self.align2localLookup[i]
def global2local(self, gobal_coord, start, end, strand):
"""Returns local coordinate in a global region"""
return global2local(gobal_coord, start, end, strand)
def local2global(self, local_coord, start, end, strand):
"""Return global coordinate within a region from a local coordinate"""
local2global(local_coord, start, end, strand)
def global2align(self, global_coord, start, end, strand):
local_coord = global2local(global_coord, start, end, strand)
# throw exception for out of bounds
if local_coord < 0 or \
local_coord >= len(alignLookup):
raise Exception("coordinate outside [start, end]")
return self.local2alignLookup[local_coord]
def align2global(self, align_coord, start, end, strand):
local_coord = self.align2localLookup[align_coord]
return local2global(local_coord, start, end, strand)
def local2align(seq):
"""
Returns list of indices of non-gap characters
'ATG---CTG-CG' ==> [0,1,2,6,7,8,10,11]
Used to go from local -> align space
"""
lookup = []
for i in xrange(len(seq)):
if seq[i] == "-": continue
lookup.append(i)
return lookup
def align2local(seq):
"""
Returns list such that
'ATG---CTG-CG' ==> [0,1,2,2,2,3,4,5,5,6,7]
Used to go from align -> local space
"""
i = -1
lookup = []
for c in seq:
if c != "-":
i += 1
lookup.append(i)
return lookup
def global2local(gobal_coord, start, end, strand):
"""Returns local coordinate in a global region"""
# swap if strands disagree
if strand == 1:
return gobal_coord - start
else:
return end - gobal_coord
def local2global(local_coord, start, end, strand):
"""Return global coordinate within a region from a local coordinate"""
# swap if strands disagree
if strand == 1:
return local_coord + start
else:
return end - local_coord
def global2align(global_coord, start, end, strand, alignLookup):
local_coord = global2local(global_coord, start, end, strand)
# throw exception for out of bounds
if local_coord < 0 or \
local_coord >= len(alignLookup):
raise Exception("coordinate outside [start, end]")
return alignLookup[local_coord]
def align2global(align_coord, start, end, strand, localLookup):
local_coord = localLookup[align_coord]
return local2global(local_coord, start, end, strand)
#=============================================================================
# old code
'''
def findAlignCodons(aln):
"""find all columns of aligned codons"""
codonAln = mapalign(aln, valfunc=mark_codon_pos)
cols = map(util.hist_dict, zip(* codonAln.values()))
ind = []
codon = []
gaps = defaultdict(lambda: 0)
for i in range(len(cols)):
if len(cols[i]) == 1:
codon.append(i)
elif len(cols[i]) == 2 and -1 in cols[i]:
for key, val in aln.iteritems():
if val[i] == "-":
gaps[key] += 1
codon.append(i)
else:
codon = []
if len(codon) == 3:
if len(gaps) == 0 or \
util.unique([x % 3 for x in gaps.values()]) == [0]:
ind.extend(codon)
codon = []
for key in gaps:
gaps[key] = 0
return ind
def findFourFold(aln):
"""Returns index of all columns in alignment that are completely
fourfold degenerate
"""
aln = filterAlignCodons(aln)
pepAln = mapalign(aln, valfunc=translate)
pep = pepAln.values()[0]
# pep conservation
pepcons = []
for i in xrange(pepAln.alignlen()):
col = [seq[i] for seq in pepAln.itervalues()]
hist = util.hist_dict(col)
if "-" in hist:
del hist["-"]
if "X" in hist:
del hist["X"]
pepcons.append(len(hist) == 1)
ind = []
for i in range(0, len(aln.values()[0]), 3):
if pepcons[i//3]:
degen = AA_DEGEN[pep[i//3]]
for j in range(3):
if degen[j] == 4:
ind.append(i+j)
return ind
'''
|
{
"content_hash": "6498c65ea5ca8628c97ed56580b9b8d7",
"timestamp": "",
"source": "github",
"line_count": 687,
"max_line_length": 81,
"avg_line_length": 25.922852983988356,
"alnum_prop": 0.5104160817564153,
"repo_name": "mdrasmus/argweaver",
"id": "7a27b61a7a4bc50dc6d4d451249a1d4116ea106a",
"size": "17878",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "argweaver/deps/compbio/alignlib.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "33565"
},
{
"name": "C++",
"bytes": "1600820"
},
{
"name": "CSS",
"bytes": "1085"
},
{
"name": "HTML",
"bytes": "2742"
},
{
"name": "JavaScript",
"bytes": "101979"
},
{
"name": "Makefile",
"bytes": "4330"
},
{
"name": "Python",
"bytes": "1105013"
},
{
"name": "Shell",
"bytes": "4816"
}
],
"symlink_target": ""
}
|
import urwid
import re
class SearchResultsLinesBuilder(object):
def __init__(self):
self.lines = []
self.current_filename = ''
def add_line(self, text):
if self.filter_out(text):
return
self.parse_text(text)
line = self.format_line(text)
self.decorate_and_append(line)
return self
def decorate_and_append(self, line, decoration='normal'):
decorated = self.decorate(line, decoration)
decorated.filename = self.current_filename
self.lines.append(decorated)
def filter_out(self, text):
if 'Binary' in text:
return True
return False
def parse_text(self, text):
filename_match = re.match('^([a-zA-Z_\/.]+):', text)
if filename_match:
this_filename = filename_match.group(1)
if this_filename != self.current_filename:
self.decorate_and_append("")
self.decorate_and_append(this_filename, 'filename')
self.current_filename = filename_match.group(1)
def format_line(self, text):
filecontents_match = re.match('^[a-zA-Z_\/.]+:\s*(.*)$', text)
if filecontents_match:
return filecontents_match.group(1)
return text
# return 'foo'
def build(self):
lines = self.lines
self.lines = []
return lines
def decorate(self, text, decoration='normal'):
line = urwid.Text(text)
new_map = lambda attr: urwid.AttrMap(line, attr, 'reveal focus')
return new_map(decoration)
|
{
"content_hash": "35bbc0db418b8c040f933e82990881d7",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 72,
"avg_line_length": 29.962264150943398,
"alnum_prop": 0.5793450881612091,
"repo_name": "axs221/scout",
"id": "c366b2700fe3ec40056456ec9f57be0fe6f755f2",
"size": "1606",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/searchResultsLinesBuilder.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11182"
}
],
"symlink_target": ""
}
|
from mhvdb2 import app
app.run()
|
{
"content_hash": "9b94ca8c299e04412bc802b9b030dbb0",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 22,
"avg_line_length": 16.5,
"alnum_prop": 0.7575757575757576,
"repo_name": "makehackvoid/mhvdb2",
"id": "f74d32d1ef804916556b035fe66052485a4b75a8",
"size": "33",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "runserver.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "535"
},
{
"name": "Dockerfile",
"bytes": "1347"
},
{
"name": "HTML",
"bytes": "18390"
},
{
"name": "Makefile",
"bytes": "297"
},
{
"name": "Python",
"bytes": "42057"
}
],
"symlink_target": ""
}
|
"""
Test breakpoint command with AT_comp_dir set to symbolic link.
"""
from __future__ import print_function
import os
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
_EXE_NAME = 'CompDirSymLink' # Must match Makefile
_SRC_FILE = 'relative.cpp'
_COMP_DIR_SYM_LINK_PROP = 'plugin.symbol-file.dwarf.comp-dir-symlink-paths'
class CompDirSymLinkTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break inside main().
self.line = line_number(
os.path.join(self.getSourceDir(), "main.cpp"),
'// Set break point at this line.')
@skipIf(hostoslist=["windows"])
def test_symlink_paths_set(self):
pwd_symlink = self.create_src_symlink()
self.doBuild(pwd_symlink)
self.runCmd(
"settings set %s %s" %
(_COMP_DIR_SYM_LINK_PROP, pwd_symlink))
src_path = self.getBuildArtifact(_SRC_FILE)
lldbutil.run_break_set_by_file_and_line(self, src_path, self.line)
@skipIf(hostoslist=no_match(["linux"]))
def test_symlink_paths_set_procselfcwd(self):
os.chdir(self.getBuildDir())
pwd_symlink = '/proc/self/cwd'
self.doBuild(pwd_symlink)
self.runCmd(
"settings set %s %s" %
(_COMP_DIR_SYM_LINK_PROP, pwd_symlink))
src_path = self.getBuildArtifact(_SRC_FILE)
# /proc/self/cwd points to a realpath form of current directory.
src_path = os.path.realpath(src_path)
lldbutil.run_break_set_by_file_and_line(self, src_path, self.line)
@skipIf(hostoslist=["windows"])
def test_symlink_paths_unset(self):
pwd_symlink = self.create_src_symlink()
self.doBuild(pwd_symlink)
self.runCmd('settings clear ' + _COMP_DIR_SYM_LINK_PROP)
src_path = self.getBuildArtifact(_SRC_FILE)
self.assertRaises(
AssertionError,
lldbutil.run_break_set_by_file_and_line,
self,
src_path,
self.line)
def create_src_symlink(self):
pwd_symlink = self.getBuildArtifact('pwd_symlink')
if os.path.exists(pwd_symlink):
os.unlink(pwd_symlink)
os.symlink(self.getBuildDir(), pwd_symlink)
self.addTearDownHook(lambda: os.remove(pwd_symlink))
return pwd_symlink
def doBuild(self, pwd_symlink):
self.build(None, None, {'PWD': pwd_symlink})
exe = self.getBuildArtifact(_EXE_NAME)
self.runCmd('file ' + exe, CURRENT_EXECUTABLE_SET)
|
{
"content_hash": "de55a3fafe0a6a7a71b2896be962160a",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 75,
"avg_line_length": 33.848101265822784,
"alnum_prop": 0.6278982797307404,
"repo_name": "llvm-mirror/lldb",
"id": "5b2c35fe5f76b19cfaa46f824a5bb75ae670798d",
"size": "2674",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "packages/Python/lldbsuite/test/functionalities/breakpoint/comp_dir_symlink/TestCompDirSymLink.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "131618"
},
{
"name": "C",
"bytes": "195293"
},
{
"name": "C++",
"bytes": "23346708"
},
{
"name": "CMake",
"bytes": "167302"
},
{
"name": "DTrace",
"bytes": "334"
},
{
"name": "LLVM",
"bytes": "6106"
},
{
"name": "Makefile",
"bytes": "50396"
},
{
"name": "Objective-C",
"bytes": "106956"
},
{
"name": "Objective-C++",
"bytes": "24806"
},
{
"name": "Perl",
"bytes": "72175"
},
{
"name": "Python",
"bytes": "3669886"
},
{
"name": "Shell",
"bytes": "6573"
},
{
"name": "Vim script",
"bytes": "8434"
}
],
"symlink_target": ""
}
|
"""
Implement a cost-proportional method of setting variable-specific rho values
for the progressive hedging algorithm only for first stage variables in a two
stage stochastic problem formulation. Automatically retrieve cost parameters
from the active objective function for those variables.
See CP(*) strategy described in Watson, J. P., & Woodruff, D. L. (2011).
Progressive hedging innovations for a class of stochastic mixed-integer
resource allocation problems. Computational Management Science.
Note, sympy is a pre-requisite. Install via `sudo pip install sympy`
Implementation notes-------------------------------------------------------
(Benjamin): This script is based on rhosetter.py, but modified to set Rho
values only for the variables contained in the first stage costs Expression.
For medium to large scale problems setting Rho for every varible takes up
a significant amount of time, both in parsing the objective function with
sympify and in going through the scenario tree looking for the variable.
The progressive hedging algorithm only requires Rho values to be set (or
to have a default value) for variables located in branch nodes.
In this bilevel power grid planning problem example, first stage costs
include all investment in generation and transmission, while second stage
costs include operational expenses, such as variable O&M and fuel costs.
Therefore, Rho values must only be set for investment variables, which are
located in the root node. This sped up the rho setting process for a small-
medium sized system scale problem (the Chile grid) by a factor of 10. For
larger systems, the benefit increases.
TODO: Implement this in a more generalized way in order to support multistage
optimizations.
"""
import StringIO
from re import findall
from sympy import sympify
def ph_rhosetter_callback(ph, scenario_tree, scenario):
# This Rho coefficient is set to 1.0 to implement the CP(1.0) strategy
# that Watson & Woodruff report as a good trade off between convergence
# to the extensive form optimum and number of PH iterations.
rho_coefficient = 1.0
scenario_instance = scenario._instance
symbol_map = scenario_instance._ScenarioTreeSymbolMap
# This component name must match the expression used for first stage
# costs defined in the ReferenceModel.
FSCostsExpr = scenario_instance.find_component("InvestmentCost")
string_out = StringIO.StringIO()
FSCostsExpr.expr.to_string(ostream=string_out)
FSCostsExpr_as_str = string_out.getvalue()
string_out.close()
# Find indexed variables like BuildCap[2030, CA_LADWP] using a regular
# expression. See python documentation. The first part (?<=[^a-zA-Z])
# ensures search pattern is not preceeded by a letter. The regex returns
# two parts because I used two sets of parenthesis. I don't care about the
# second parenthesis that returns the indexed bits, just the larger part
pattern = "(?<=[^a-zA-Z])([a-zA-Z][a-zA-Z_0-9]*(\[[^]]*\])?)"
component_by_alias = {}
variable_list = findall(pattern, FSCostsExpr_as_str)
for (cname, index_as_str) in variable_list:
component = scenario_instance.find_component(cname)
alias = "x" + str(id(component))
component_by_alias[alias] = component
FSCostsExpr_as_str = FSCostsExpr_as_str.replace(cname, alias)
# After the variables+indexes have clean names,
# parse the equation with sympify
FSCostsExpr_parsed = sympify(FSCostsExpr_as_str)
for (alias, component) in component_by_alias.iteritems():
variable_id = symbol_map.getSymbol(component)
coefficient = FSCostsExpr_parsed.coeff(alias)
set_rho = False
# Replace the for loop in the rhosetter.py script for a single
# if statement to only set variables at the root node.
root_node = scenario_tree.findRootNode()
if variable_id in root_node._standard_variable_ids:
ph.setRhoOneScenario(
root_node,
scenario,
variable_id,
coefficient * rho_coefficient)
set_rho = True
break
if set_rho == False:
print("Warning! Could not find tree node for variable {}; rho not set.".format(component.cname()))
|
{
"content_hash": "3fe075dc5066b8614593baa8ef87e3db",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 110,
"avg_line_length": 45.294736842105266,
"alnum_prop": 0.7125261445503137,
"repo_name": "OCM-Lab-PUC/switch-chile",
"id": "82f1b276208b0680329f02b9620be7846e2d0b2d",
"size": "4440",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/3zone_toy_stochastic_PySP/rhosetter-FS-only.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "644965"
},
{
"name": "Shell",
"bytes": "148"
}
],
"symlink_target": ""
}
|
from datetime import datetime
import turtle
def ensure_tuple(value):
if not isinstance(value, tuple):
value = (value, value)
return value
def grow(size, angle, depth, size_decrease = None, angle_decrease = None, worker = None):
size = ensure_tuple(size)
angle = ensure_tuple(angle)
if angle_decrease is None:
angle_decrease = lambda a: a
if size_decrease is None:
size_decrease = lambda x: (x[0]-1, x[1]-1)
if worker is None:
worker = turtle.Turtle()
if min(size) > depth:
worker.right(angle[1])
worker.forward(size[1])
grow(size_decrease(size), angle_decrease(angle), depth,\
size_decrease, angle_decrease, worker)
worker.left(180)
worker.forward(size[1])
worker.right(180 - sum(angle))
worker.forward(size[0])
grow(size_decrease(size), angle_decrease(angle), depth,\
size_decrease, angle_decrease, worker)
worker.left(180)
worker.forward(size[0])
worker.left(180 - angle[0])
def circle(n, step, action=None, worker=None):
if worker is None:
worker = turtle.Turtle()
pen = worker.pen()
angle = +1
if n < 0:
n = -1*n
angle = -1
for i in range(n):
worker.forward(step);
worker.right(angle*360/n)
if action is not None:
action(worker, i)
worker.pen(pen)
def change(worker, i, one, two):
if i % one == 0:
worker.width(worker.width() + 1)
if i % two == 0:
worker.pen(pendown = not worker.pen()['pendown'])
def staircase():
for i in range(100):
a.forward(1*((i+1)**1.2))
a.right(89)
def line(lengths, pendowns=True, widths=None, worker=None):
if worker is None:
worker = turtle.Turtle()
if not isinstance(lengths, tuple):
lengths = (lengths,)
if not isinstance(pendowns, tuple):
pendowns = (pendowns,)*len(lengths)
width = worker.width()
if widths is None:
widths = width
if not isinstance(widths, tuple):
widths = (widths,)*len(lengths)
length = 0
pen = worker.pen()
for index in range(len(lengths)):
length += lengths[index]
worker.pen(pendown=pendowns[index], pensize=widths[index])
worker.forward(lengths[index])
worker.penup()
worker.backward(length)
worker.pen(pen)
def clock(worker=None):
if worker is None:
worker = turtle.Turtle()
worker.right(3)
time = datetime.time(datetime.now())
for i in range(60):
worker.penup()
if i % 5 == 0:
worker.left(93)
line(20, worker = worker)
worker.right(93)
if i == int((time.hour % 12)*5 + 5*time.minute/60 + 0.5):
worker.right(87)
line((25, 40), (False, True), 3, worker = worker)
worker.left(87)
if i == time.minute:
worker.right(87)
line((10, 56), (False, True), 2, worker = worker)
worker.left(87)
if i == time.second:
worker.right(87)
line((5, 62), (False, True), 1, worker = worker)
worker.left(87)
worker.forward(6)
worker.right(6)
def spiral_new():
color ("red")
for i in range (12):
if i%2 == 0:
for i in range (10):
forward (5)
right (10)
H -= 40
else:
for i in range (5):
forward (5)
right (10)
H -= 20
setheading (H)
penup ()
goto (0,0)
pendown ()
def spiral_old():
for i in range (12):
color ("red")
if i%2 == 0:
for i in range (10):
forward (5)
right (10)
for i in range (10):
left (10)
backward (5)
right(40)
else:
for i in range (5):
forward (5)
right (10)
for i in range (5):
left (10)
backward(5)
right(20)
if __name__ == '__main__':
# demonstration of some complex patterns:
# switch angles each step, decrease size in half every second step
grow((128, 128, 10), # size of branches and hidden parameter (step count)
(30, 0), # angles (to be switched each step)
0, # when step counter reaches 0, turtle will stop
lambda x: (x[0]/(x[2] % 2 and [2] or [1])[0],
x[1]/(x[2] % 2 and [2] or [1])[0],
x[2] - 1),
lambda a: (a[1], a[0]))
alex = turtle.Turtle()
for i in range(10):
alex.left(36)
circle(36*2, 10, lambda turtle, i: change(turtle, i, 4, 3), worker=alex)
circle(-36*2, 10, lambda turtle, i: change(turtle, i, 4, 3), worker=alex)
|
{
"content_hash": "defdea79a0166ee0fc227ba7b127b7e6",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 89,
"avg_line_length": 29.05325443786982,
"alnum_prop": 0.5142566191446029,
"repo_name": "sukhmel/linux.scripts",
"id": "0d05a15dde9fb527b8e9239281c3e8ae4b9873f7",
"size": "4930",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/turtle_fractal.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "23453"
},
{
"name": "Shell",
"bytes": "10087"
},
{
"name": "VimL",
"bytes": "8000"
}
],
"symlink_target": ""
}
|
import random
import json
import sqlite3
import string
import os
import re
class database(object):
def __init__(self,dbase,fpath):
self.database = dbase
self.filepath = fpath
if not os.path.exists(self.database):
self.con = sqlite3.connect(self.database)
query = "create table bookmarks(id integer primary key autoincrement,folder varchar,name varchar,url varchar,date integer);"
self.sql(query)
SQL_FTS = "CREATE VIRTUAL TABLE search_index USING fts4(id INT, content);"
self.sql(SQL_FTS)
self.import_from_file()
self.con.commit()
else:
self.con = sqlite3.connect(self.database)
def _extract_data(self):
#path = "/home/alphawing/.config/chromium/Default/Bookmarks"
f = open(self.filepath,'rU')
a = json.load(f)
b = ''
rt = 'roots'
#f = open("./data/bookmark_data",'wb')
def recfunc(children,parent,s):
for item in children:
if item['type'] == 'url':
name = item['name']
url = item['url']
date = int(item['date_added'])
parent = parent
s.append((parent,name,url,date))
#f.write(s.encode('utf-8'))
elif item['type'] == 'folder':
recfunc(item['children'],item['name'],s)
bm = []
recfunc(a[rt]['bookmark_bar']['children'],'bookmark_bar',bm)
return bm
def _process_data(self,inp):
random.shuffle(inp)
tag = []
clas = []
data = []
punct = set(string.punctuation)
for tup in inp:
z = ''.join(x if x not in punct else " " for x in tup[0])
z = re.sub(r'[ ]+',r' ',z)
tag.append(z)
clas.append(tup[1])
pass
f = open("./data/bookmark_features","w")
f.write("\n".join(tag).encode("utf8"))
f = open("./data/bookmark_classes","w")
f.write("\n".join(clas).encode("utf8"))
def import_from_file(self):
data = self._extract_data()
query = "insert into bookmarks(folder,name,url,date) values(?,?,?,?)"
SQL_POPULATE = "insert into search_index (id,content) select id,folder || ' ' || name || ' ' || url from bookmarks"
for tup in data:
self.sql(query,tup)
#self.con.execute("insert into bookmarks(folder,name,url,date) values(?,?,?,?)",tup)
self.sql(SQL_POPULATE)
a = self.sqlselect("select * from search_index")
self.con.commit()
def get_features(self):
query = "select name ||' '|| url , folder from bookmarks"
a = self.sqlselect(query)
self._process_data(a)
def sql(self,SQL,param = None):
if param:
self.con.execute(SQL,param)
else:
self.con.execute(SQL)
def sqlselect(self,SQL,param = None):
if param:
a = self.con.execute(SQL,param).fetchall()
else:
a = self.con.execute(SQL).fetchall()
return a
def add(self,tup):
query = "insert into bookmarks(folder,name,url,date) values(?,?,?,?)"
SQL_POPULATE = "insert into search_index (id,content) select id,folder || ' ' || name || ' ' || url from bookmarks where date = ?"
self.sql(query,tup)
self.sql(SQL_POPULATE,(tup[3],))
def delete(self,tup = None):
query = "delete from bookmarks where name = ? and date = ?"
self.sql(query,tup)
def search(self,text):
SEARCH_FTS = "SELECT * FROM bookmarks WHERE id IN ( SELECT id FROM search_index WHERE content MATCH :Content)ORDER BY name "
return self.sqlselect(SEARCH_FTS,dict(Content = text))
|
{
"content_hash": "3ff87c910af1a6e7f0024e40ba20f26f",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 132,
"avg_line_length": 22.116438356164384,
"alnum_prop": 0.6401362650975534,
"repo_name": "alphawing/SmartBookmarks",
"id": "b150e67f33a45067f092490dbcfc4650793ad732",
"size": "3229",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SmartBookmarks/bookmark_db.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "22584"
}
],
"symlink_target": ""
}
|
{
'name': 'Indian - Accounting',
'version': '1.0',
'description': """
Indian Accounting: Chart of Account.
====================================
Indian accounting chart and localization.
OpenERP allows to manage Indian Accounting by providing Two Formats Of Chart of Accounts i.e Indian Chart Of Accounts - Standard and Indian Chart Of Accounts - Schedule VI.
Note: The Schedule VI has been revised by MCA and is applicable for all Balance Sheet made after
31st March, 2011. The Format has done away with earlier two options of format of Balance
Sheet, now only Vertical format has been permitted Which is Supported By OpenERP.
""",
'author': ['OpenERP SA'],
'category': 'Localization/Account Charts',
'depends': [
'account',
'account_chart'
],
'demo': [],
'data': [
'l10n_in_tax_code_template.xml',
'l10n_in_standard_chart.xml',
'l10n_in_standard_tax_template.xml',
'l10n_in_schedule6_chart.xml',
'l10n_in_schedule6_tax_template.xml',
'l10n_in_wizard.xml',
],
'auto_install': False,
'installable': True,
'images': ['images/config_chart_l10n_in.jpeg','images/l10n_in_chart.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
{
"content_hash": "1f4a655ef4825d47e50caa63a0394364",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 172,
"avg_line_length": 36.25714285714286,
"alnum_prop": 0.6493301812450749,
"repo_name": "diogocs1/comps",
"id": "8dd815022dd18650e7cb9e3f3e37aae573a5139d",
"size": "2248",
"binary": false,
"copies": "82",
"ref": "refs/heads/master",
"path": "web/addons/l10n_in/__openerp__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "701"
},
{
"name": "CSS",
"bytes": "856533"
},
{
"name": "HTML",
"bytes": "299671"
},
{
"name": "Java",
"bytes": "620166"
},
{
"name": "JavaScript",
"bytes": "5844302"
},
{
"name": "Makefile",
"bytes": "21002"
},
{
"name": "PHP",
"bytes": "14259"
},
{
"name": "Python",
"bytes": "10647376"
},
{
"name": "Ruby",
"bytes": "220"
},
{
"name": "Shell",
"bytes": "17746"
},
{
"name": "XSLT",
"bytes": "120278"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('blog', '0003_remove_post_content'),
]
operations = [
migrations.AddField(
model_name='post',
name='content',
field=models.TextField(default=django.utils.timezone.now),
preserve_default=False,
),
]
|
{
"content_hash": "45de5ff5bc1505f82734538634077b61",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 70,
"avg_line_length": 22.9,
"alnum_prop": 0.6135371179039302,
"repo_name": "takdeerkaur/takdeer.ca",
"id": "189632a96af011d5ec59fe029d987bedcde271ea",
"size": "531",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blog/migrations/0004_post_content.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "128073"
},
{
"name": "HTML",
"bytes": "28614"
},
{
"name": "JavaScript",
"bytes": "43232"
},
{
"name": "PHP",
"bytes": "1076"
}
],
"symlink_target": ""
}
|
import itertools
import operator
import sys
if sys.version_info[0] != 3:
from functools import reduce
from functools import wraps
from peewee import *
from playhouse.tests.base import compiler
from playhouse.tests.base import database_initializer
from playhouse.tests.base import log_console
from playhouse.tests.base import ModelTestCase
from playhouse.tests.base import PeeweeTestCase
from playhouse.tests.base import skip_unless
from playhouse.tests.base import test_db
from playhouse.tests.models import *
compound_db = database_initializer.get_in_memory_database()
class CompoundBase(Model):
class Meta:
database = compound_db
class Alpha(CompoundBase):
alpha = IntegerField()
class Beta(CompoundBase):
beta = IntegerField()
other = IntegerField(default=0)
class Gamma(CompoundBase):
gamma = IntegerField()
other = IntegerField(default=1)
class TestCompoundSelectSQL(PeeweeTestCase):
def setUp(self):
super(TestCompoundSelectSQL, self).setUp()
compound_db.compound_select_parentheses = False # Restore default.
self.a1 = Alpha.select(Alpha.alpha).where(Alpha.alpha < 2)
self.a2 = Alpha.select(Alpha.alpha).where(Alpha.alpha > 5)
self.b1 = Beta.select(Beta.beta).where(Beta.beta < 3)
self.b2 = Beta.select(Beta.beta).where(Beta.beta > 4)
def test_simple_sql(self):
lhs = Alpha.select(Alpha.alpha)
rhs = Beta.select(Beta.beta)
sql, params = (lhs | rhs).sql()
self.assertEqual(sql, (
'SELECT "t1"."alpha" FROM "alpha" AS t1 UNION '
'SELECT "t2"."beta" FROM "beta" AS t2'))
sql, params = (
Alpha.select(Alpha.alpha) |
Beta.select(Beta.beta) |
Gamma.select(Gamma.gamma)).sql()
self.assertEqual(sql, (
'SELECT "t1"."alpha" FROM "alpha" AS t1 UNION '
'SELECT "t2"."beta" FROM "beta" AS t2 UNION '
'SELECT "t3"."gamma" FROM "gamma" AS t3'))
sql, params = (
Alpha.select(Alpha.alpha) |
(Beta.select(Beta.beta) |
Gamma.select(Gamma.gamma))).sql()
self.assertEqual(sql, (
'SELECT "t3"."alpha" FROM "alpha" AS t3 UNION '
'SELECT "t1"."beta" FROM "beta" AS t1 UNION '
'SELECT "t2"."gamma" FROM "gamma" AS t2'))
def test_simple_same_model(self):
queries = [Alpha.select(Alpha.alpha) for i in range(3)]
lhs = queries[0] | queries[1]
compound = lhs | queries[2]
sql, params = compound.sql()
self.assertEqual(sql, (
'SELECT "t1"."alpha" FROM "alpha" AS t1 UNION '
'SELECT "t2"."alpha" FROM "alpha" AS t2 UNION '
'SELECT "t3"."alpha" FROM "alpha" AS t3'))
lhs = queries[0]
compound = lhs | (queries[1] | queries[2])
sql, params = compound.sql()
self.assertEqual(sql, (
'SELECT "t3"."alpha" FROM "alpha" AS t3 UNION '
'SELECT "t1"."alpha" FROM "alpha" AS t1 UNION '
'SELECT "t2"."alpha" FROM "alpha" AS t2'))
def test_where_clauses(self):
sql, params = (self.a1 | self.a2).sql()
self.assertEqual(sql, (
'SELECT "t1"."alpha" FROM "alpha" AS t1 WHERE ("t1"."alpha" < ?) '
'UNION '
'SELECT "t2"."alpha" FROM "alpha" AS t2 WHERE ("t2"."alpha" > ?)'))
self.assertEqual(params, [2, 5])
sql, params = (self.a1 | self.b1).sql()
self.assertEqual(sql, (
'SELECT "t1"."alpha" FROM "alpha" AS t1 WHERE ("t1"."alpha" < ?) '
'UNION '
'SELECT "t2"."beta" FROM "beta" AS t2 WHERE ("t2"."beta" < ?)'))
self.assertEqual(params, [2, 3])
sql, params = (self.a1 | self.b1 | self.a2 | self.b2).sql()
self.assertEqual(sql, (
'SELECT "t1"."alpha" FROM "alpha" AS t1 WHERE ("t1"."alpha" < ?) '
'UNION '
'SELECT "t2"."beta" FROM "beta" AS t2 WHERE ("t2"."beta" < ?) '
'UNION '
'SELECT "t4"."alpha" FROM "alpha" AS t4 WHERE ("t4"."alpha" > ?) '
'UNION '
'SELECT "t3"."beta" FROM "beta" AS t3 WHERE ("t3"."beta" > ?)'))
self.assertEqual(params, [2, 3, 5, 4])
def test_outer_limit(self):
sql, params = (self.a1 | self.a2).limit(3).sql()
self.assertEqual(sql, (
'SELECT "t1"."alpha" FROM "alpha" AS t1 WHERE ("t1"."alpha" < ?) '
'UNION '
'SELECT "t2"."alpha" FROM "alpha" AS t2 WHERE ("t2"."alpha" > ?) '
'LIMIT 3'))
def test_union_in_from(self):
compound = (self.a1 | self.a2).alias('cq')
sql, params = Alpha.select(compound.c.alpha).from_(compound).sql()
self.assertEqual(sql, (
'SELECT "cq"."alpha" FROM ('
'SELECT "t1"."alpha" FROM "alpha" AS t1 WHERE ("t1"."alpha" < ?) '
'UNION '
'SELECT "t2"."alpha" FROM "alpha" AS t2 WHERE ("t2"."alpha" > ?)'
') AS cq'))
compound = (self.a1 | self.b1 | self.b2).alias('cq')
sql, params = Alpha.select(SQL('1')).from_(compound).sql()
self.assertEqual(sql, (
'SELECT 1 FROM ('
'SELECT "t1"."alpha" FROM "alpha" AS t1 WHERE ("t1"."alpha" < ?) '
'UNION '
'SELECT "t2"."beta" FROM "beta" AS t2 WHERE ("t2"."beta" < ?) '
'UNION '
'SELECT "t3"."beta" FROM "beta" AS t3 WHERE ("t3"."beta" > ?)'
') AS cq'))
self.assertEqual(params, [2, 3, 4])
def test_parentheses(self):
compound_db.compound_select_parentheses = True
sql, params = (self.a1 | self.a2).sql()
self.assertEqual(sql, (
'(SELECT "t1"."alpha" FROM "alpha" AS t1 '
'WHERE ("t1"."alpha" < ?)) '
'UNION '
'(SELECT "t2"."alpha" FROM "alpha" AS t2 '
'WHERE ("t2"."alpha" > ?))'))
self.assertEqual(params, [2, 5])
def test_multiple_with_parentheses(self):
compound_db.compound_select_parentheses = True
queries = [Alpha.select(Alpha.alpha) for i in range(3)]
lhs = queries[0] | queries[1]
compound = lhs | queries[2]
sql, params = compound.sql()
self.assertEqual(sql, (
'((SELECT "t1"."alpha" FROM "alpha" AS t1) UNION '
'(SELECT "t2"."alpha" FROM "alpha" AS t2)) UNION '
'(SELECT "t3"."alpha" FROM "alpha" AS t3)'))
lhs = queries[0]
compound = lhs | (queries[1] | queries[2])
sql, params = compound.sql()
self.assertEqual(sql, (
'(SELECT "t3"."alpha" FROM "alpha" AS t3) UNION '
'((SELECT "t1"."alpha" FROM "alpha" AS t1) UNION '
'(SELECT "t2"."alpha" FROM "alpha" AS t2))'))
def test_inner_limit(self):
compound_db.compound_select_parentheses = True
a1 = Alpha.select(Alpha.alpha).where(Alpha.alpha < 2).limit(2)
a2 = Alpha.select(Alpha.alpha).where(Alpha.alpha > 5).limit(4)
sql, params = (a1 | a2).limit(3).sql()
self.assertEqual(sql, (
'(SELECT "t1"."alpha" FROM "alpha" AS t1 WHERE ("t1"."alpha" < ?) '
'LIMIT 2) '
'UNION '
'(SELECT "t2"."alpha" FROM "alpha" AS t2 WHERE ("t2"."alpha" > ?) '
'LIMIT 4) '
'LIMIT 3'))
def test_union_subquery(self):
union = (Alpha.select(Alpha.alpha) |
Beta.select(Beta.beta))
query = Alpha.select().where(Alpha.alpha << union)
sql, params = query.sql()
self.assertEqual(sql, (
'SELECT "t1"."id", "t1"."alpha" '
'FROM "alpha" AS t1 WHERE ("t1"."alpha" IN ('
'SELECT "t1"."alpha" FROM "alpha" AS t1 '
'UNION '
'SELECT "t2"."beta" FROM "beta" AS t2))'))
class TestCompoundSelectQueries(ModelTestCase):
requires = [User, UniqueModel, OrderedModel]
# User -> username, UniqueModel -> name, OrderedModel -> title
test_values = {
User.username: ['a', 'b', 'c', 'd'],
OrderedModel.title: ['a', 'c', 'e'],
UniqueModel.name: ['b', 'd', 'e'],
}
def setUp(self):
super(TestCompoundSelectQueries, self).setUp()
for field, values in self.test_values.items():
for value in values:
field.model_class.create(**{field.name: value})
def requires_op(op):
def decorator(fn):
@wraps(fn)
def inner(self):
if op in test_db.compound_operations:
return fn(self)
else:
log_console('"%s" not supported, skipping %s' %
(op, fn.__name__))
return inner
return decorator
def assertValues(self, query, expected):
self.assertEqual(sorted(query.tuples()),
[(x,) for x in sorted(expected)])
def assertPermutations(self, op, expected):
fields = {
User: User.username,
UniqueModel: UniqueModel.name,
OrderedModel: OrderedModel.title,
}
for key in itertools.permutations(fields.keys(), 2):
if key in expected:
left, right = key
query = op(left.select(fields[left]).order_by(),
right.select(fields[right]).order_by())
# Ensure the sorted tuples returned from the query are equal
# to the sorted values we expected for this combination.
self.assertValues(query, expected[key])
@requires_op('UNION')
def test_union(self):
all_letters = ['a', 'b', 'c', 'd', 'e']
self.assertPermutations(operator.or_, {
(User, UniqueModel): all_letters,
(User, OrderedModel): all_letters,
(UniqueModel, User): all_letters,
(UniqueModel, OrderedModel): all_letters,
(OrderedModel, User): all_letters,
(OrderedModel, UniqueModel): all_letters,
})
@requires_op('UNION ALL')
def test_union(self):
all_letters = ['a', 'b', 'c', 'd', 'e']
users = User.select(User.username)
uniques = UniqueModel.select(UniqueModel.name)
query = users.union_all(uniques)
results = [row[0] for row in query.tuples()]
self.assertEqual(sorted(results), ['a', 'b', 'b', 'c', 'd', 'd', 'e'])
@requires_op('UNION')
def test_union_from(self):
uq = (User
.select(User.username.alias('name'))
.where(User.username << ['a', 'b', 'd']))
oq = (OrderedModel
.select(OrderedModel.title.alias('name'))
.where(OrderedModel.title << ['a', 'b'])
.order_by())
iq = (UniqueModel
.select(UniqueModel.name.alias('name'))
.where(UniqueModel.name << ['c', 'd']))
union_q = (uq | oq | iq).alias('union_q')
query = (User
.select(union_q.c.name)
.from_(union_q)
.order_by(union_q.c.name.desc()))
self.assertEqual([row[0] for row in query.tuples()], ['d', 'b', 'a'])
@requires_op('UNION')
def test_union_count(self):
a = User.select().where(User.username == 'a')
c_and_d = User.select().where(User.username << ['c', 'd'])
self.assertEqual(a.count(), 1)
self.assertEqual(c_and_d.count(), 2)
union = a | c_and_d
self.assertEqual(union.wrapped_count(), 3)
overlapping = User.select() | c_and_d
self.assertEqual(overlapping.wrapped_count(), 4)
@requires_op('INTERSECT')
def test_intersect(self):
self.assertPermutations(operator.and_, {
(User, UniqueModel): ['b', 'd'],
(User, OrderedModel): ['a', 'c'],
(UniqueModel, User): ['b', 'd'],
(UniqueModel, OrderedModel): ['e'],
(OrderedModel, User): ['a', 'c'],
(OrderedModel, UniqueModel): ['e'],
})
@requires_op('EXCEPT')
def test_except(self):
self.assertPermutations(operator.sub, {
(User, UniqueModel): ['a', 'c'],
(User, OrderedModel): ['b', 'd'],
(UniqueModel, User): ['e'],
(UniqueModel, OrderedModel): ['b', 'd'],
(OrderedModel, User): ['e'],
(OrderedModel, UniqueModel): ['a', 'c'],
})
@requires_op('INTERSECT')
@requires_op('EXCEPT')
def test_symmetric_difference(self):
self.assertPermutations(operator.xor, {
(User, UniqueModel): ['a', 'c', 'e'],
(User, OrderedModel): ['b', 'd', 'e'],
(UniqueModel, User): ['a', 'c', 'e'],
(UniqueModel, OrderedModel): ['a', 'b', 'c', 'd'],
(OrderedModel, User): ['b', 'd', 'e'],
(OrderedModel, UniqueModel): ['a', 'b', 'c', 'd'],
})
def test_model_instances(self):
union = (User.select(User.username) |
UniqueModel.select(UniqueModel.name))
query = union.order_by(SQL('username').desc()).limit(3)
self.assertEqual([user.username for user in query],
['e', 'd', 'c'])
@requires_op('UNION')
@requires_op('INTERSECT')
def test_complex(self):
left = User.select(User.username).where(User.username << ['a', 'b'])
right = UniqueModel.select(UniqueModel.name).where(
UniqueModel.name << ['b', 'd', 'e'])
query = (left & right).order_by(SQL('1'))
self.assertEqual(list(query.dicts()), [{'username': 'b'}])
query = (left | right).order_by(SQL('1'))
self.assertEqual(list(query.dicts()), [
{'username': 'a'},
{'username': 'b'},
{'username': 'd'},
{'username': 'e'}])
@requires_op('UNION')
def test_union_subquery(self):
union = (User.select(User.username).where(User.username == 'a') |
UniqueModel.select(UniqueModel.name))
query = (User
.select(User.username)
.where(User.username << union)
.order_by(User.username.desc()))
self.assertEqual(list(query.dicts()), [
{'username': 'd'},
{'username': 'b'},
{'username': 'a'}])
@skip_unless(lambda: isinstance(test_db, PostgresqlDatabase))
class TestCompoundWithOrderLimit(ModelTestCase):
requires = [User]
def setUp(self):
super(TestCompoundWithOrderLimit, self).setUp()
for username in ['a', 'b', 'c', 'd', 'e', 'f']:
User.create(username=username)
def test_union_with_order_limit(self):
lhs = (User
.select(User.username)
.where(User.username << ['a', 'b', 'c']))
rhs = (User
.select(User.username)
.where(User.username << ['d', 'e', 'f']))
cq = (lhs.order_by(User.username.desc()).limit(2) |
rhs.order_by(User.username.desc()).limit(2))
results = [user.username for user in cq]
self.assertEqual(sorted(results), ['b', 'c', 'e', 'f'])
cq = cq.order_by(cq.c.username.desc()).limit(3)
results = [user.username for user in cq]
self.assertEqual(results, ['f', 'e', 'c'])
|
{
"content_hash": "c6d96405f13835cb9fb3150cc0e2f83f",
"timestamp": "",
"source": "github",
"line_count": 402,
"max_line_length": 79,
"avg_line_length": 38.014925373134325,
"alnum_prop": 0.5297081533830651,
"repo_name": "fuzeman/peewee",
"id": "627e46e809afaf6e2c931eeac936c9d149d2a1cf",
"size": "15282",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "playhouse/tests/test_compound_queries.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "903582"
},
{
"name": "Shell",
"bytes": "4502"
}
],
"symlink_target": ""
}
|
from tools.load import LoadMatrix
from sg import sg
lm=LoadMatrix()
traindat=lm.load_numbers('../data/fm_train_real.dat')
testdat=lm.load_numbers('../data/fm_test_real.dat')
train_label=lm.load_labels('../data/label_train_twoclass.dat')
parameter_list=[[traindat,testdat, train_label,10,2.1,1.2,1e-5,False],
[traindat,testdat,train_label,10,2.1,1.3,1e-4,False]]
def classifier_gpbtsvm (fm_train_real=traindat,fm_test_real=testdat,
label_train_twoclass=train_label,
size_cache=10, width=2.1,C=1.2,
epsilon=1e-5,use_bias=False):
sg('set_features', 'TRAIN', fm_train_real)
sg('set_kernel', 'GAUSSIAN', 'REAL', size_cache, width)
sg('set_labels', 'TRAIN', label_train_twoclass)
sg('new_classifier', 'GPBTSVM')
sg('svm_epsilon', epsilon)
sg('c', C)
sg('svm_use_bias', use_bias)
sg('train_classifier')
sg('set_features', 'TEST', fm_test_real)
result=sg('classify')
return result
if __name__=='__main__':
print('GPBTSVM')
classifier_gpbtsvm(*parameter_list[0])
|
{
"content_hash": "e3d081fa388aca9f48099dbaef695215",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 70,
"avg_line_length": 29.848484848484848,
"alnum_prop": 0.6954314720812182,
"repo_name": "Saurabh7/shogun",
"id": "ead63a86d792c66ac51d4e551c244079adae755f",
"size": "985",
"binary": false,
"copies": "22",
"ref": "refs/heads/master",
"path": "examples/undocumented/python_static/classifier_gpbtsvm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "104870"
},
{
"name": "C++",
"bytes": "11435353"
},
{
"name": "CMake",
"bytes": "213091"
},
{
"name": "Lua",
"bytes": "1204"
},
{
"name": "M",
"bytes": "10020"
},
{
"name": "Makefile",
"bytes": "452"
},
{
"name": "Matlab",
"bytes": "66047"
},
{
"name": "Perl",
"bytes": "31939"
},
{
"name": "Perl6",
"bytes": "15714"
},
{
"name": "Protocol Buffer",
"bytes": "1476"
},
{
"name": "Python",
"bytes": "431160"
},
{
"name": "R",
"bytes": "53362"
},
{
"name": "Ruby",
"bytes": "59"
},
{
"name": "Shell",
"bytes": "17074"
}
],
"symlink_target": ""
}
|
"""
Orthogonal Distance Regression (:mod:`scipy.odr`)
=================================================
Introduction
------------
Why Orthogonal Distance Regression (ODR)? Sometimes one has
measurement errors in the explanatory (a.k.a., "independent")
variable(s), not just the response (a.k.a., "dependent") variable(s).
Ordinary Least Squares (OLS) fitting procedures treat the data for
explanatory variables as fixed, i.e., not subject to error of any kind.
Furthermore, OLS procedures require that the response variables be an
explicit function of the explanatory variables; sometimes making the
equation explicit is impractical and/or introduces errors. ODR can
handle both of these cases with ease, and can even reduce to the OLS
case if that is sufficient for the problem.
ODRPACK is a FORTRAN-77 library for performing ODR with possibly
non-linear fitting functions. It uses a modified trust-region
Levenberg-Marquardt-type algorithm [1]_ to estimate the function
parameters. The fitting functions are provided by Python functions
operating on NumPy arrays. The required derivatives may be provided
by Python functions as well, or may be estimated numerically. ODRPACK
can do explicit or implicit ODR fits, or it can do OLS. Input and
output variables may be multi-dimensional. Weights can be provided to
account for different variances of the observations, and even
covariances between dimensions of the variables.
odr provides two interfaces: a single function, and a set of
high-level classes that wrap that function; please refer to their
docstrings for more information. While the docstring of the function
odr does not have a full explanation of its arguments, the classes do,
and arguments of the same name usually have the same requirements.
Furthermore, the user is urged to at least skim the `ODRPACK User's
Guide <http://docs.scipy.org/doc/external/odrpack_guide.pdf>`_ -
"Know Thy Algorithm."
Use
---
See the docstrings of `odr.odrpack` and the functions and classes for
usage instructions. The ODRPACK User's Guide (linked above) is also
quite helpful.
References
----------
.. [1] P. T. Boggs and J. E. Rogers, "Orthogonal Distance Regression,"
in "Statistical analysis of measurement error models and
applications: proceedings of the AMS-IMS-SIAM joint summer research
conference held June 10-16, 1989," Contemporary Mathematics,
vol. 112, pg. 186, 1990.
.. currentmodule:: scipy.odr
Modules
-------
.. autosummary::
:toctree: generated/
odrpack Python wrappers for FORTRAN77 ODRPACK.
models Model instances for use with odrpack.
Classes
-------
.. autosummary::
:toctree: generated/
ODR Gathers all info & manages the main fitting routine.
Data Stores the data to fit.
Model Stores information about the function to be fit.
Output
RealData Weights as actual std. dev.s and/or covariances.
odr_error
odr_stop
Functions
---------
.. autosummary::
:toctree: generated/
odr
"""
postpone_import = 1
|
{
"content_hash": "edb2d545cdc8f70e5c8895c666f36185",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 71,
"avg_line_length": 34.56818181818182,
"alnum_prop": 0.7350427350427351,
"repo_name": "scipy/scipy-svn",
"id": "e42eba0c3a51136cb5672f562337c88c7b1d89f7",
"size": "3042",
"binary": false,
"copies": "55",
"ref": "refs/heads/master",
"path": "scipy/odr/info.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "8532454"
},
{
"name": "C++",
"bytes": "6602032"
},
{
"name": "FORTRAN",
"bytes": "5895476"
},
{
"name": "Objective-C",
"bytes": "4346"
},
{
"name": "Python",
"bytes": "4753723"
},
{
"name": "Shell",
"bytes": "1742"
}
],
"symlink_target": ""
}
|
# Hive Netius System
# Copyright (c) 2008-2020 Hive Solutions Lda.
#
# This file is part of Hive Netius System.
#
# Hive Netius System is free software: you can redistribute it and/or modify
# it under the terms of the Apache License as published by the Apache
# Foundation, either version 2.0 of the License, or (at your option) any
# later version.
#
# Hive Netius System is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Apache License for more details.
#
# You should have received a copy of the Apache License along with
# Hive Netius System. If not, see <http://www.apache.org/licenses/>.
__author__ = "João Magalhães <joamag@hive.pt>"
""" The author(s) of the module """
__version__ = "1.0.0"
""" The version of the module """
__revision__ = "$LastChangedRevision$"
""" The revision number of the module """
__date__ = "$LastChangedDate$"
""" The last change date of the module """
__copyright__ = "Copyright (c) 2008-2020 Hive Solutions Lda."
""" The copyright for the module """
__license__ = "Apache License, Version 2.0"
""" The license for the module """
import os
import sys
import copy
import math
import uuid
import time
import struct
import hashlib
import netius.common
import netius.clients
REFRESH_TIME = 30.0
""" The default time in between refresh call in the torrent task
each refresh operation should perform some operations (eg: DHT
refresh, tracker re-retrieval, etc) """
ID_STRING = "NE1000"
""" Text value that is going to be used to identify the agent
of torrent against the other peers, should be a join of both
the abbreviation of the agent name and version """
BLOCK_SIZE = 16384
""" The typical size of block that is going to be retrieved
using the current torrent infra-structure, this value conditions
most of the torrent operations and should be defined carefully """
THRESHOLD_END = 10485760
""" The threshold value from which the task is considered to be
under the ending stage, from this stage on a new strategy for the
download may apply as it is more difficult to get blocks """
MAX_MISSING = 16
""" The maximum number of unmarked values to be displayed in missing,
this controls the size of the missing lists, note that this is only
a default value, that may be overriden at runtime """
PEER_PATHS = ("peers.txt", "~/peers.txt", "\\peers.txt")
""" The sequence defining the various paths that are going to be search
trying to find the (static) peers file with format host:ip in each line """
class Pieces(netius.Observable):
"""
Class that represents the logical structure of a file that is
divided into pieces and blocks as a hierarchy, this class is
responsible for the management of the data structures of such
data storage model.
A piece is considered to be the basic unit of a torrent file
and each piece is composed by multiple blocks, note that the
number of blocks is fixed for all the pieces of a file.
"""
def __init__(self, length, number_pieces, number_blocks):
netius.Observable.__init__(self)
self.length = length
self.piece_length = number_blocks * BLOCK_SIZE
self.number_pieces = number_pieces
self.number_blocks = number_blocks
self.final_blocks = self.piece_blocks(self.number_pieces - 1)
self.bitfield = [True for _index in netius.legacy.xrange(number_pieces)]
self.mask = [True for _index in netius.legacy.xrange(self.total_blocks)]
def piece(self, index):
return self.bitfield[index]
def piece_blocks(self, index):
is_last = index == self.number_pieces - 1
if not is_last: return self.number_blocks
piece_size = self.piece_size(index)
number_blocks = math.ceil(piece_size / float(BLOCK_SIZE))
return int(number_blocks)
def piece_size(self, index):
is_last = index == self.number_pieces - 1
if not is_last: return self.number_blocks * BLOCK_SIZE
modulus = self.length % self.piece_length
if modulus == 0: return self.piece_length
return modulus
def block(self, index, begin):
base = index * self.number_blocks
block_index = begin // BLOCK_SIZE
return self.mask[base + block_index]
def block_size(self, index, begin):
block_index = begin // BLOCK_SIZE
is_last_piece = index == self.number_pieces - 1
if not is_last_piece: return BLOCK_SIZE
is_last_block = block_index == self.final_blocks - 1
if not is_last_block: return BLOCK_SIZE
piece_size = self.piece_size(index)
modulus = piece_size % BLOCK_SIZE
if modulus == 0: return BLOCK_SIZE
return modulus
def pop_block(self, bitfield, mark = True):
index = 0
result = self._and(bitfield, self.bitfield)
for bit in result:
if bit == True: break
index += 1
if index == len(result): return None
begin = self.update_block(index, mark = mark)
length = self.block_size(index, begin)
return (index, begin, length)
def push_block(self, index, begin):
self.mark_block(index, begin, value = True)
def mark_piece(self, index, value = False):
base = index * self.number_blocks
block_count = self.piece_blocks(index)
for block_index in netius.legacy.xrange(block_count):
self.mask[base + block_index] = value
self.bitfield[index] = value
def mark_block(self, index, begin, value = False):
base = index * self.number_blocks
block_index = begin // BLOCK_SIZE
self.mask[base + block_index] = value
self.trigger("block", self, index, begin)
self.update_piece(index)
def update_block(self, index, mark = True):
base = index * self.number_blocks
block_count = self.piece_blocks(index)
for block_index in netius.legacy.xrange(block_count):
state = self.mask[base + block_index]
if state == True: break
begin = block_index * BLOCK_SIZE
if mark: self.mark_block(index, begin)
return begin
def update_piece(self, index):
# calculates the base index value for the block sequence
# of the current piece (going to be used in access), then
# determines the total number of block for piece to update
# and then sets the initial piece state as false (not marked)
base = index * self.number_blocks
block_count = self.piece_blocks(index)
piece_state = False
# iterates over the complete set of blocks for the current
# piece trying to determine if it has already been completely
# unmarked (all the blocks unmarked accordingly)
for block_index in netius.legacy.xrange(block_count):
state = self.mask[base + block_index]
if state == False: continue
piece_state = True
break
# updates the state of the current piece in the bit field,
# note that the false value indicates that the piece has been
# unmarked (and this is considered the objective)
self.bitfield[index] = piece_state
if piece_state == True: return
# triggers the piece event indicating that a new piece has
# been completely unmarked according to rules
self.trigger("piece", self, index)
# iterates over the complete set of bit values in the (pieces)
# bit field to verify if the file has been completely unmarked
# in case it did not returns the control flow to caller
for bit in self.bitfield:
if bit == True: return
# triggers the complete event to any of the handlers indicating
# that the current torrent file has been completely unmarked
# and then no more pieces are pending to be unmarked
self.trigger("complete", self)
@property
def total_pieces(self):
return self.number_pieces
@property
def marked_pieces(self):
counter = 0
for bit in self.bitfield:
if bit == True: continue
counter += 1
return counter
@property
def missing_pieces(self, max_missing = MAX_MISSING):
missing_count = self.total_pieces - self.marked_pieces
if missing_count > max_missing: return []
missing = []
for index in netius.legacy.xrange(self.total_pieces):
bit = self.bitfield[index]
if bit == False: continue
missing.append(index)
return missing
@property
def total_blocks(self):
base_blocks = (self.number_pieces - 1) * self.number_blocks
return base_blocks + self.final_blocks
@property
def marked_blocks(self):
counter = 0
for bit in self.mask:
if bit == True: continue
counter += 1
return counter
@property
def missing_blocks(self, max_missing = MAX_MISSING):
missing_count = self.total_blocks - self.marked_blocks
if missing_count > max_missing: return []
missing = []
for index in netius.legacy.xrange(self.total_blocks):
bit = self.mask[index]
if bit == False: continue
missing.append(index)
return missing
def _and(self, first, second):
result = []
for _first, _second in zip(first, second):
if _first and _second: value = True
else: value = False
result.append(value)
return result
class TorrentTask(netius.Observable):
"""
Describes a task (operation) that is going to be performed
using the peer to peer mesh network of the torrent protocol.
Each of the download operations should be able to be described
by this task object (for latter reference).
This should be considered the main interface to interact from
a developer point of view, as such the methods should represent
a proper easily described interface.
"""
def __init__(self, owner, target_path, torrent_path = None, info_hash = None):
netius.Observable.__init__(self)
self.owner = owner
self.target_path = target_path
self.torrent_path = torrent_path
self.info_hash = info_hash
self.start = time.time()
self.uploaded = 0
self.downloaded = 0
self.unchoked = 0
self.next_refresh = self.start + REFRESH_TIME
self.connections = []
self.peers = []
self.peers_m = {}
def load(self):
if self.torrent_path: self.info = self.load_info(self.torrent_path)
else: self.info = dict(info_hash = self.info_hash)
self.pieces_tracker()
self.peers_dht()
self.peers_tracker()
self.peers_file()
self.load_file()
self.load_pieces()
def unload(self):
self.owner = None
self.unload_file()
self.unload_pieces()
self.disconnect_peers()
def on_close(self, connection):
is_unchoked = connection.choked == netius.clients.UNCHOKED
self.connections.remove(connection)
self.unchoked -= 1 if is_unchoked else 0
def ticks(self):
if time.time() < self.next_refresh: return
self.refresh()
def refresh(self):
self.peers_dht()
self.peers_tracker()
self.connect_peers()
self.next_refresh = time.time() + REFRESH_TIME
self.trigger("refresh", self)
def on_choked(self, connection):
self.unchoked -= 1
def on_unchoked(self, connection):
self.unchoked += 1
def on_block(self, pieces, index, begin):
self.trigger("block", self, index, begin)
def on_piece(self, pieces, index):
try: self.verify_piece(index)
except netius.DataError:
self.refute_piece(index)
else:
self.confirm_piece(index)
self.trigger("piece", self, index)
def on_complete(self, pieces):
self.trigger("complete", self)
def on_dht(self, response):
# verifies if the response is valid and in case it's not
# returns immediately to avoid any erroneous parsing
if not response: return
# retrieves the payload for the response and then uses it
# to retrieves the nodes part of the response for parsing
# of the peers that are going to be added (to the task)
payload = response.get_payload()
nodes = payload.get("nodes", "")
# creates the list that will hold the final set of peers
# parsed from the nodes string, this is going to be used
# to extend the list of peers in the task
peers = []
# splits the current nodes list into a set of chunks of
# a pre-defined size and then iterates over all of them
# creating the proper peer dictionary for each of them
chunks = [chunk for chunk in netius.common.chunks(nodes, 26)]
for chunk in chunks:
chunk = netius.legacy.bytes(chunk)
peer_id, address, port = struct.unpack("!20sLH", chunk)
ip = netius.common.addr_to_ip4(address)
peer = dict(id = peer_id, ip = ip, port = port)
peers.append(peer)
# in case no valid peers have been parsed there's no need
# to continue with the processing, nothing to be done
if not peers: return
# extends the currently defined peers list in the current
# torrent task with the ones that have been discovered
self.extend_peers(peers)
# retrieves the reference to the host id from the request
# that originated the current response and then converts it
# into the proper string representation to be used in logging
request = response.request
host = request.host
# prints a debug message about the peer loading that has just occurred, this
# may be used for the purpose of development (and traceability)
self.owner.debug("Received %d peers from DHT peer '%s'" % (len(peers), host))
def on_tracker(self, client, parser, result):
# extracts the data (string) contents of the HTTP response and in case
# there're none of them continues the loop as there's nothing to be
# processed from this tracker response (invalid response)
data = result["data"]
if not data: return
# tries to decode the provided data from the tracker using the bencoder
# and extracts the peers part of the message to be processed
response = netius.common.bdecode(data)
peers = response["peers"]
# verifies if the provided peers part is not compact (already a dictionary)
# if that's the case there's nothing remaining to be done, otherwise extra
# processing must be done to
if isinstance(peers, dict): self.extend_peers(peers)
# need to normalize the peer structure by decoding the peers string into a
# set of address port sub strings (as defined in torrent specification)
else:
peers = [peer for peer in netius.common.chunks(peers, 6)]
for peer in peers:
peer = netius.legacy.bytes(peer)
address, port = struct.unpack("!LH", peer)
ip = netius.common.addr_to_ip4(address)
peer = dict(ip = ip, port = port)
self.add_peer(peer)
# prints a debug message about the peer loading that has just occurred, this
# may be used for the purpose of development (and traceability)
self.owner.debug("Received %d peers from '%s'" % (len(peers), parser.owner.base))
# refreshes the connection with the peers because new peers have been added
# to the current task and there may be new connections pending
self.connect_peers()
def load_info(self, torrent_path):
file = open(torrent_path, "rb")
try: data = file.read()
finally: file.close()
struct = netius.common.bdecode(data)
struct["info_hash"] = self.info_hash = netius.common.info_hash(struct)
return struct
def load_file(self):
if self._is_single(): return self.load_single()
else: return self.load_multiple()
def load_single(self):
# retrieves the length of the current (single file) and
# the name of the associated file
size = self.info["length"]
name = self.info["info"]["name"]
# runs the normalization process on the target path so that
# it may be used on a more flexible way
target_path = os.path.expanduser(self.target_path)
target_path = os.path.normpath(target_path)
# determines if the target path is a directory and if that's
# not the case creates the appropriate directories so that
# they area available for the file stream creation
is_dir = os.path.isdir(target_path)
if not is_dir: os.makedirs(target_path)
# creates the "final" file path from the target path and the
# name of the file and then constructs a file stream with the
# path and the size information and opens it, note that the
# opening operation is expensive as it allocates the file
file_path = os.path.join(target_path, name)
self.file = netius.common.FileStream(file_path, size)
self.file.open()
def load_multiple(self):
files = self.info["files"]
size = self.info["length"]
name = self.info["info"]["name"]
target_path = os.path.expanduser(self.target_path)
target_path = os.path.normpath(target_path)
dir_path = os.path.join(target_path, name)
is_dir = os.path.isdir(dir_path)
if not is_dir: os.makedirs(dir_path)
self.file = netius.common.FilesStream(dir_path, size, files)
self.file.open()
def unload_file(self):
if not self.file: return
self.file.close()
self.file = None
def load_pieces(self):
length = self.info["length"]
number_pieces = self.info["number_pieces"]
number_blocks = self.info["number_blocks"]
self.requested = Pieces(length, number_pieces, number_blocks)
self.stored = Pieces(length, number_pieces, number_blocks)
self.stored.bind("block", self.on_block)
self.stored.bind("piece", self.on_piece)
self.stored.bind("complete", self.on_complete)
def unload_pieces(self):
if self.requested: self.requested.destroy()
if self.stored: self.stored.destroy()
self.requested = None
self.stored = None
def pieces_tracker(self):
info = self.info.get("info", {})
pieces = info.get("pieces", "")
length = info.get("length", None)
files = info.get("files", [])
piece_length = info.get("piece length", 1)
number_blocks = math.ceil(float(piece_length) / float(BLOCK_SIZE))
number_blocks = int(number_blocks)
pieces_l = [piece for piece in netius.common.chunks(pieces, 20)]
pieces_count = len(pieces_l)
files_length = sum(file["length"] for file in files)
self.info["pieces"] = pieces_l
self.info["length"] = length or files_length or pieces_count * piece_length
self.info["files"] = files
self.info["number_pieces"] = pieces_count
self.info["number_blocks"] = number_blocks
def set_data(self, data, index, begin):
# retrieves the current status of the block in the stored
# pieces structure and in case it's already stored returns
# immediately as this is a duplicated block setting, possible
# in the last part of the file retrieval (end game)
block = self.stored.block(index, begin)
if not block: return
# retrieves the size of a piece and uses that value together
# with the block begin offset to calculate the final file offset
# value to be passed to the write data operations (for handling)
piece_length = self.info["info"]["piece length"]
offset = index * piece_length + begin
self.write_data(data, offset)
# marks the current block as stored so that no other equivalent
# operation is performed (avoiding duplicated operations)
self.stored.mark_block(index, begin)
def write_data(self, data, offset):
# seek the proper file position (according to passed offset)
# and then writes the received data under that position,
# flushing the file contents afterwards to avoid file corruption
self.file.seek(offset)
self.file.write(data)
self.file.flush()
def set_dht(self, peer_t, port):
# tries to retrieve the peer associated with the provided peer tuple
# and in case it succeeds sets the proper DHT (port) value in the peer
# so that it may latter be used for DHT based operations
peer = self.peers_m.get(peer_t, None)
if not peer: return
peer["dht"] = port
def peers_dht(self):
if not self.info_hash: return
for peer in self.peers:
port = peer.get("dht", None)
if not port: continue
host = peer["ip"]
self.owner.dht_client.get_peers(
host = host,
port = port,
peer_id = self.owner.peer_id,
info_hash = self.info_hash,
callback = self.on_dht
)
self.owner.debug("Requested peers from DHT peer '%s'" % host)
def peers_tracker(self):
"""
Tries to retrieve as much information as possible about the
peers from the currently loaded tracker information.
It's possible that no tracker information exits for the current
task and for such situations no state change will occur.
"""
# retrieves both the announce and the announce list structure from
# the current info dictionary and uses both of them to create the
# final list containing the various addresses of trackers, then
# iterates over each of the trackers to retrieve the information
# about the various peers associated with the torrent file
announce = self.info.get("announce", None)
announce_list = self.info.get("announce-list", [[announce]])
for tracker in announce_list:
# iterates over the complete set of tracker urls to try to retrieve
# the various trackers from each of them
for tracker_url in tracker:
# retrieves the first element of the tracker structure as the
# URL of it and then verifies that it references an HTTP based
# tracker (as that's the only one supported)
is_http = tracker_url.startswith(("http://", "https://"))
if not is_http: continue
# runs the get HTTP retrieval call (blocking call) so that it's
# possible to retrieve the contents for the announce of the tracker
# this is an asynchronous call and the on tracker callback will be
# called at the end of the process with the message
self.owner.http_client.get(
tracker_url,
params = dict(
info_hash = self.info_hash,
peer_id = self.owner.peer_id,
port = 6881,
uploaded = self.uploaded,
downloaded = self.downloaded,
left = self.left(),
compact = 1,
no_peer_id = 0,
event = "started",
numwant = 50,
key = self.owner.get_id()
),
on_result = self.on_tracker
)
# prints a debug message about the request for peer that was just
# performed in order to provide some debugging information
self.owner.debug("Requested peers using '%s'" % tracker_url)
def peers_file(self):
for path in PEER_PATHS:
path = os.path.expanduser(path)
path = os.path.normpath(path)
if not os.path.exists(path): continue
file = open(path, "r")
for line in file:
line = line.strip()
host, port = line.split(":", 1)
port = int(port)
peer = dict(ip = host, port = port)
self.add_peer(peer)
def connect_peers(self):
for peer in self.peers: self.connect_peer(peer)
def disconnect_peers(self):
connections = copy.copy(self.connections)
for connection in connections: connection.close(flush = True)
def connect_peer(self, peer):
if not peer["new"]: return
peer["new"] = False
self.owner.debug("Connecting to peer '%s:%d'" % (peer["ip"], peer["port"]))
connection = self.owner.client.peer(self, peer["ip"], peer["port"])
self.connections.append(connection)
connection.bind("close", self.on_close)
connection.bind("choked", self.on_choked)
connection.bind("unchoked", self.on_unchoked)
def info_string(self):
return "==== STATUS ====\n" +\
"peers := %d\n" % len(self.peers) +\
"connections := %d\n" % len(self.connections) +\
"choked := %d\n" % (len(self.connections) - self.unchoked) +\
"unchoked := %d\n" % self.unchoked +\
"pieces := %d/%d\n" % (self.stored.marked_pieces, self.stored.total_pieces) +\
"blocks := %d/%d\n" % (self.stored.marked_blocks, self.stored.total_blocks) +\
"pieces miss := %s\n" % self.stored.missing_pieces +\
"blocks miss := %s\n" % self.stored.missing_blocks +\
"percent := %.2f % %\n" % self.percent() +\
"left := %d/%d bytes\n" % (self.left(), self.info["length"]) +\
"speed := %s/s" % self.speed_s()
def left(self):
size = self.info["length"]
return size - self.downloaded
def speed(self):
"""
Retrieves a float number representing the global speed
of the task in bytes per second, this value is computed
using the original creation time of the task and so it
may not represent the most accurate speedup.
:rtype: float
:return: The current speed of download, defined as bytes
per second from the original task creation time.
"""
current = time.time()
delta = current - self.start
bytes_second = self.downloaded / delta
return bytes_second
def speed_s(self):
return netius.common.size_round_unit(
self.speed(),
space = True,
reduce = False
)
def percent(self):
size = self.info["length"]
return float(self.downloaded) / float(size) * 100.0
def pop_block(self, bitfield):
left = self.left()
is_end = left < THRESHOLD_END
structure = self.stored if is_end else self.requested
if not structure: return None
return structure.pop_block(bitfield, mark = not is_end)
def push_block(self, index, begin):
if not self.requested: return
self.requested.push_block(index, begin)
def verify_piece(self, index):
self._verify_piece(index, self.file)
def confirm_piece(self, index):
piece_size = self.stored.piece_size(index)
self.downloaded += piece_size
def refute_piece(self, index):
self.requested.mark_piece(index, value = True)
self.stored.mark_piece(index, value = True)
self.owner.warning("Refuted piece '%d' (probably invalid)" % index)
def extend_peers(self, peers):
for peer in peers: self.add_peer(peer)
def add_peer(self, peer):
peer_t = (peer["ip"], peer["port"])
if peer_t in self.peers_m: return
peer["time"] = time.time()
peer["new"] = True
self.peers_m[peer_t] = peer
self.peers.append(peer)
def remove_peer(self, peer):
peer_t = (peer["ip"], peer["port"])
if not peer_t in self.peers_m: return
del self.peers_m[peer_t]
self.peers.remove(peer)
def _is_single(self):
files = self.info.get("files", [])
return False if files else True
def _verify_piece(self, index, file):
piece = self.info["pieces"][index]
piece_length = self.info["info"]["piece length"]
file.seek(index * piece_length)
pending = self.stored.piece_size(index)
hash = hashlib.sha1()
while True:
if pending == 0: break
count = BLOCK_SIZE if pending > BLOCK_SIZE else pending
data = file.read(count)
hash.update(data)
pending -= count
digest = hash.digest()
piece = netius.legacy.bytes(piece)
if digest == piece: return
raise netius.DataError("Verifying piece index '%d'" % index)
class TorrentServer(netius.ContainerServer):
def __init__(self, *args, **kwargs):
netius.ContainerServer.__init__(self, *args, **kwargs)
self.peer_id = self._generate_id()
self.client = netius.clients.TorrentClient(
thread = False,
*args,
**kwargs
)
self.http_client = netius.clients.HTTPClient(
thread = False,
*args,
**kwargs
)
self.dht_client = netius.clients.DHTClient(
thread = False,
*args,
**kwargs
)
self.tasks = []
self.add_base(self.client)
self.add_base(self.http_client)
self.add_base(self.dht_client)
def cleanup(self):
netius.ContainerServer.cleanup(self)
self.cleanup_tasks()
self.client.destroy()
def ticks(self):
netius.ContainerServer.ticks(self)
for task in self.tasks: task.ticks()
def download(self, target_path, torrent_path = None, info_hash = None, close = False):
"""
Starts the "downloading" process of a torrent associated file
using the defined peer to peer torrent strategy using either
the provided torrent path as reference or just the info hash
of the file that is going to be downloaded.
Note that if only the info hash is provided a DHT based strategy
is going to be used to retrieve the peers list.
The returned value is the task entity representing the newly created
task for the downloading of the requested file, this object may be
used for the operations and listening of events.
:type target_path: String
:param target_path: The path to the directory that will be used to store
the binary information resulting from the download, this directory may also
be used to store some temporary information on state of download.
:type torrent_path: String
:param torrent_path: The path to the file that contains the torrent
information that is going to be used for file processing.
:type info_hash: String
:param info_hash: The info hash value of the file that is going
to be downloaded, may be used for magnet torrents (DHT).
:type close: bool
:param close: If the server infra-structure should be close (process ends)
at the end of the download, this is not the default behavior (multiple download).
:rtype: TorrentTask
:return: The torrent task object that represents the task that has been
created for downloading of the requested file.
"""
def on_complete(task):
owner = task.owner
self.remove_task(task)
if close: owner.close()
task = TorrentTask(
self,
target_path,
torrent_path = torrent_path,
info_hash = info_hash
)
task.load()
task.connect_peers()
task.bind("complete", on_complete)
self.tasks.append(task)
return task
def add_task(self, task):
self.tasks.append(task)
def remove_task(self, task):
task.unload()
self.tasks.remove(task)
def cleanup_tasks(self):
tasks = copy.copy(self.tasks)
for task in tasks: self.remove_task(task)
def _generate_id(self):
random = str(uuid.uuid4())
random = netius.legacy.bytes(random)
hash = hashlib.sha1(random)
digest = hash.hexdigest()
id = "-%s-%s" % (ID_STRING, digest[:12])
return id
if __name__ == "__main__":
import logging
if len(sys.argv) > 1: file_path = sys.argv[1]
else: file_path = "\\file.torrent"
def on_start(server):
task = server.download("~/Downloads", file_path, close = True)
task.bind("piece", on_piece)
task.bind("complete", on_complete)
def on_piece(task, index):
percent = task.percent()
speed_s = task.speed_s()
left = task.left()
percent = int(percent)
print(task.info_string())
print("[%d%%] - %d bytes (%s/s)" % (percent, left, speed_s))
def on_complete(task):
print("Download completed")
server = TorrentServer(level = logging.DEBUG)
server.bind("start", on_start)
server.serve(env = True)
else:
__path__ = []
|
{
"content_hash": "bc271e91f775e36a5447588b60884c16",
"timestamp": "",
"source": "github",
"line_count": 895,
"max_line_length": 95,
"avg_line_length": 38.67039106145251,
"alnum_prop": 0.5964172204565155,
"repo_name": "hivesolutions/netius",
"id": "7a6e9e168d4057a4dad04bc110587d39e9207fdf",
"size": "34656",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/netius/servers/torrent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1400497"
}
],
"symlink_target": ""
}
|
from setuptools import setup
requires = [
'bottle',
'bottle_mongo',
'pymongo'
]
setup(
name='kala',
version='0.5.1',
packages=[''],
url='https://github.com/cloudbuy/kala',
license='MIT',
author='Paul Etherton',
author_email='paul@pjetherton.co.uk',
description='Simple read-only REST API for mongoDB',
install_requires=requires,
entry_points={
'console_scripts': [
'kala = kala:main'
]
}
)
|
{
"content_hash": "d241c7a8f802705d24a657e7cb593513",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 56,
"avg_line_length": 19.833333333333332,
"alnum_prop": 0.5798319327731093,
"repo_name": "damoxc/kala",
"id": "0c7ccaa00d984de9c740f1c50263faa9a5490f62",
"size": "476",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3904"
}
],
"symlink_target": ""
}
|
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('comics', '0004_auto_20180314_2133'),
]
operations = [
migrations.AlterField(
model_name='issue',
name='gallery',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='gallery.Gallery'),
),
]
|
{
"content_hash": "346dce83a6ec010b4a79f198d38d089f",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 129,
"avg_line_length": 26.352941176470587,
"alnum_prop": 0.6294642857142857,
"repo_name": "evanepio/dotmanca",
"id": "6219d59fd6964dd49fd79219b9ea216df2d3e756",
"size": "497",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "comics/migrations/0005_auto_20181103_1730.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2704"
},
{
"name": "Dockerfile",
"bytes": "1874"
},
{
"name": "HTML",
"bytes": "12635"
},
{
"name": "Makefile",
"bytes": "192"
},
{
"name": "Python",
"bytes": "83822"
},
{
"name": "Shell",
"bytes": "860"
}
],
"symlink_target": ""
}
|
from ..servicecalls.validatelogin import ValidateLogin
class ConsumerCredentialManager:
userId = None
result = None
def __init__(self, username, password):
self.logged_in = False
self.username = username
self.password = password
self.userId = None
self.result = None
self.check_login()
def check_login(self):
try:
validate_result = ValidateLogin(self.username, self.password)
self.userId = validate_result.Client.ID
userId = self.userId
self.result = validate_result
except:
userId = None
self.userId = None
|
{
"content_hash": "967211852dc76326b840f84f2f890509",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 73,
"avg_line_length": 26.6,
"alnum_prop": 0.5969924812030075,
"repo_name": "vacoj/MINDBODY-Django",
"id": "4d1ff100cc40936b300660a1c8e3b223201868f8",
"size": "666",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "web/session/login_manager.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4686"
},
{
"name": "HTML",
"bytes": "43096"
},
{
"name": "JavaScript",
"bytes": "6232"
},
{
"name": "Python",
"bytes": "145902"
}
],
"symlink_target": ""
}
|
import base64
from django import template
from django.utils import translation
from django.utils.encoding import force_unicode
from django.conf import settings
import assets
register = template.Library()
@register.simple_tag()
def next_lang():
cur_language = translation.get_language()
if cur_language == "fr":
return "en"
#elif cur_language == "en":
# return "ar"
return "fr"
@register.filter('intchart')
def intchart(value):
orig = force_unicode(value)
new = orig.replace(",", ".")
if new == "0":
return "null"
if orig == new:
return new
else:
return intchart(new)
@register.filter('amount')
def amount(value):
orig = force_unicode(value)
new = orig.replace(",", ".")
if orig == new:
return new
else:
return intchart(new)
@register.simple_tag()
def assets_version():
return assets.VERSION
@register.simple_tag()
def tracking_code():
code = settings.TRACKING_CODE
if code:
return base64.decodestring(code)
return ""
|
{
"content_hash": "687bfc1be5a3dec481d0f3f0c6774742",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 47,
"avg_line_length": 19.29090909090909,
"alnum_prop": 0.6343072573044298,
"repo_name": "aert/assets",
"id": "96522e9a818740f673b14e7c4f2d38989d1b4f0d",
"size": "1061",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "assets/association/templatetags/assets_tags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1000"
},
{
"name": "JavaScript",
"bytes": "8999"
},
{
"name": "Python",
"bytes": "58220"
},
{
"name": "Ruby",
"bytes": "1305"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'YwotTransfer'
db.create_table('marketing_ywottransfer', (
('id', self.gf('uuidfield.fields.UUIDField')(unique=True, max_length=32, primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('ywot_username', self.gf('django.db.models.fields.CharField')(unique=True, max_length=100)),
('ywot_password', self.gf('django.db.models.fields.CharField')(max_length=128, blank=True)),
('ywot_email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('valid_signature', self.gf('django.db.models.fields.CharField')(max_length=100)),
('transfer_status', self.gf('django.db.models.fields.NullBooleanField')(null=True, blank=True)),
('local_acct', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
))
db.send_create_signal('marketing', ['YwotTransfer'])
def backwards(self, orm):
# Deleting model 'YwotTransfer'
db.delete_table('marketing_ywottransfer')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'marketing.tumblrlead': {
'Meta': {'object_name': 'TumblrLead'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email_address': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'time_emailed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'time_last_scraped': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'tumblr_user': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'ywot_link_present': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'ywot_refers': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'ywot_username': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'ywot_world_name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'marketing.ywottransfer': {
'Meta': {'object_name': 'YwotTransfer'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'local_acct': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'transfer_status': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'valid_signature': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'ywot_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'ywot_password': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'ywot_username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
}
}
complete_apps = ['marketing']
|
{
"content_hash": "3572d059664a4070d7500e9642eb684e",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 182,
"avg_line_length": 73.6,
"alnum_prop": 0.5687929061784897,
"repo_name": "reverie/jotleaf.com",
"id": "0e87605580d5d27fe9b2c75058387deb7a058dbf",
"size": "7016",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jotleaf/marketing/migrations/0004_auto__add_ywottransfer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "260767"
},
{
"name": "CoffeeScript",
"bytes": "236120"
},
{
"name": "HTML",
"bytes": "133720"
},
{
"name": "JavaScript",
"bytes": "279484"
},
{
"name": "PHP",
"bytes": "865"
},
{
"name": "Python",
"bytes": "442635"
},
{
"name": "Shell",
"bytes": "1026"
}
],
"symlink_target": ""
}
|
__author__ = 'Ofner Mario'
from datetime import datetime
class Logger():
# --------------------------------------------------------------------------------
# - Initializations
# --------------------------------------------------------------------------------
global_log_level = "DEBUG"
log_microsec_enabled = True
timestamp_format_str = ""
def __init__(self):
self.timestamp_format_str = "%Y-%m-%d %d:%m:%S"
if self.log_microsec_enabled:
self.timestamp_format_str += ".%f"
def current_timestamp(self):
current_ts = datetime.now()
return current_ts.strftime(self.timestamp_format_str)
def severity_check(self, _log_level):
# todo: implement different levels and dependencies
return True
def log_level_shortcut(self, _log_level):
if _log_level == "DEBUG":
return "D"
def msg_constructor(self, _message, _log_level):
# todo: limit log messages by length or sth.
log_message = self.current_timestamp() + " " + self.log_level_shortcut(_log_level) + ": " + str(_message)
return log_message
# --------------------------------------------------------------------------------
# - Logging
# --------------------------------------------------------------------------------
def print(self, _message, _log_level = None):
if _log_level is None:
_log_level = "DEBUG"
if self.severity_check(_log_level):
print(self.msg_constructor(_message, _log_level))
|
{
"content_hash": "d3e4ca851bab8506742f3883641a1286",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 113,
"avg_line_length": 28.925925925925927,
"alnum_prop": 0.46094750320102434,
"repo_name": "steelion/python-tools",
"id": "9c12e1ef29c77b1594804b85cef1f2c65afe516d",
"size": "1562",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mao/toolbox/logger.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "73888"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/ship/components/shield_generator/shared_shd_sfs_imperial_deluxe_standard.iff"
result.attribute_template_id = 8
result.stfName("space/space_item","shd_sfs_imperial_deluxe_standard_n")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "396808aa50b33e1b6108930586f71978",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 113,
"avg_line_length": 28.76923076923077,
"alnum_prop": 0.7272727272727273,
"repo_name": "anhstudios/swganh",
"id": "43907cd5273f8a784f6e89581d913447f6fc6ae2",
"size": "519",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/tangible/ship/components/shield_generator/shared_shd_sfs_imperial_deluxe_standard.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
}
|
bl_info = {
"name": "POV-Ray 3.7",
"author": "Campbell Barton, Silvio Falcinelli, Maurice Raybaud, Constantin Rahn, Bastien Montagne",
"version": (0, 0, 9),
"blender": (2, 5, 7),
"location": "Render > Engine > POV-Ray 3.7",
"description": "Basic POV-Ray 3.7 integration for blender",
"warning": "both POV-Ray 3.7 and this script are beta",
"wiki_url": "http://wiki.blender.org/index.php/Extensions:2.6/Py/"\
"Scripts/Render/POV-Ray",
"tracker_url": "https://projects.blender.org/tracker/index.php?"\
"func=detail&aid=23145",
"category": "Render"}
if "bpy" in locals():
import imp
imp.reload(ui)
imp.reload(render)
imp.reload(update_files)
else:
import bpy
from bpy.props import StringProperty, BoolProperty, IntProperty, FloatProperty, \
FloatVectorProperty, EnumProperty, PointerProperty
from . import ui
from . import render
from . import update_files
###############################################################################
# Scene POV properties.
###############################################################################
class RenderPovSettingsScene(bpy.types.PropertyGroup):
# File Options
tempfiles_enable = BoolProperty(
name="Enable Tempfiles",
description="Enable the OS-Tempfiles. Otherwise set the path where to save the files",
default=True)
deletefiles_enable = BoolProperty(
name="Delete files",
description="Delete files after rendering. Doesn't work with the image",
default=True)
scene_name = StringProperty(
name="Scene Name",
description="Name of POV-Ray scene to create. Empty name will use the name of " \
"the blend file",
default="", maxlen=1024)
scene_path = StringProperty(
name="Export scene path",
# description="Path to directory where the exported scene (POV and INI) is created", # Bug in POV-Ray RC3
description="Path to directory where the files are created",
default="", maxlen=1024, subtype="DIR_PATH")
renderimage_path = StringProperty(
name="Rendered image path",
description="Full path to directory where the rendered image is saved",
default="", maxlen=1024, subtype="DIR_PATH")
list_lf_enable = BoolProperty(
name="LF in lists",
description="Enable line breaks in lists (vectors and indices). Disabled: " \
"lists are exported in one line",
default=True)
# Not a real pov option, just to know if we should write
radio_enable = BoolProperty(
name="Enable Radiosity",
description="Enable POV-Rays radiosity calculation",
default=False)
radio_display_advanced = BoolProperty(
name="Advanced Options",
description="Show advanced options",
default=False)
media_enable = BoolProperty(
name="Enable Media",
description="Enable POV-Rays atmospheric media",
default=False)
media_samples = IntProperty(
name="Samples",
description="Number of samples taken from camera to first object " \
"encountered along ray path for media calculation",
min=1, max=100, default=35)
media_color = FloatVectorProperty(
name="Media Color", description="The atmospheric media color",
precision=4, step=0.01, min=0, soft_max=1,
default=(0.001, 0.001, 0.001), options={'ANIMATABLE'}, subtype='COLOR')
baking_enable = BoolProperty(
name="Enable Baking",
description="Enable POV-Rays texture baking",
default=False)
indentation_character = EnumProperty(
name="Indentation",
description="Select the indentation type",
items=(("0", "None", "No indentation"),
("1", "Tabs", "Indentation with tabs"),
("2", "Spaces", "Indentation with spaces")),
default="2")
indentation_spaces = IntProperty(
name="Quantity of spaces",
description="The number of spaces for indentation",
min=1, max=10, default=4)
comments_enable = BoolProperty(
name="Enable Comments",
description="Add comments to pov file",
default=True)
# Real pov options
command_line_switches = StringProperty(
name="Command Line Switches",
description="Command line switches consist of a + (plus) or - (minus) sign, followed " \
"by one or more alphabetic characters and possibly a numeric value",
default="", maxlen=500)
antialias_enable = BoolProperty(
name="Anti-Alias", description="Enable Anti-Aliasing",
default=True)
antialias_method = EnumProperty(
name="Method",
description="AA-sampling method. Type 1 is an adaptive, non-recursive, super-sampling "\
"method. Type 2 is an adaptive and recursive super-sampling method",
items=(("0", "non-recursive AA", "Type 1 Sampling in POV-Ray"),
("1", "recursive AA", "Type 2 Sampling in POV-Ray")),
default="1")
antialias_depth = IntProperty(
name="Antialias Depth", description="Depth of pixel for sampling",
min=1, max=9, default=3)
antialias_threshold = FloatProperty(
name="Antialias Threshold", description="Tolerance for sub-pixels",
min=0.0, max=1.0, soft_min=0.05, soft_max=0.5, default=0.1)
jitter_enable = BoolProperty(
name="Jitter",
description="Enable Jittering. Adds noise into the sampling process (it should be " \
"avoided to use jitter in animation)",
default=True)
jitter_amount = FloatProperty(
name="Jitter Amount", description="Amount of jittering",
min=0.0, max=1.0, soft_min=0.01, soft_max=1.0, default=1.0)
antialias_gamma = FloatProperty(
name="Antialias Gamma",
description="POV-Ray compares gamma-adjusted values for super sampling. Antialias " \
"Gamma sets the Gamma before comparison",
min=0.0, max=5.0, soft_min=0.01, soft_max=2.5, default=2.5)
max_trace_level = IntProperty(
name="Max Trace Level",
description="Number of reflections/refractions allowed on ray path",
min=1, max=256, default=5)
photon_spacing = FloatProperty(
name="Spacing",
description="Average distance between photons on surfaces. half this get four times " \
"as many surface photons",
min=0.001, max=1.000, soft_min=0.001, soft_max=1.000, default=0.005, precision=3)
photon_max_trace_level = IntProperty(
name="Max Trace Level",
description="Number of reflections/refractions allowed on ray path",
min=1, max=256, default=5)
photon_adc_bailout = FloatProperty(
name="ADC Bailout",
description="The adc_bailout for photons. Use adc_bailout = " \
"0.01 / brightest_ambient_object for good results",
min=0.0, max=1000.0, soft_min=0.0, soft_max=1.0, default=0.1, precision=3)
photon_gather_min = IntProperty(
name="Gather Min", description="Minimum number of photons gathered for each point",
min=1, max=256, default=20)
photon_gather_max = IntProperty(
name="Gather Max", description="Maximum number of photons gathered for each point",
min=1, max=256, default=100)
radio_adc_bailout = FloatProperty(
name="ADC Bailout",
description="The adc_bailout for radiosity rays. Use " \
"adc_bailout = 0.01 / brightest_ambient_object for good results",
min=0.0, max=1000.0, soft_min=0.0, soft_max=1.0, default=0.01, precision=3)
radio_always_sample = BoolProperty(
name="Always Sample",
description="Only use the data from the pretrace step and not gather " \
"any new samples during the final radiosity pass",
default=True)
radio_brightness = FloatProperty(
name="Brightness",
description="Amount objects are brightened before being returned " \
"upwards to the rest of the system",
min=0.0, max=1000.0, soft_min=0.0, soft_max=10.0, default=1.0)
radio_count = IntProperty(
name="Ray Count",
description="Number of rays for each new radiosity value to be calculated " \
"(halton sequence over 1600)",
min=1, max=10000, soft_max=1600, default=35)
radio_error_bound = FloatProperty(
name="Error Bound",
description="One of the two main speed/quality tuning values, " \
"lower values are more accurate",
min=0.0, max=1000.0, soft_min=0.1, soft_max=10.0, default=1.8)
radio_gray_threshold = FloatProperty(
name="Gray Threshold",
description="One of the two main speed/quality tuning values, " \
"lower values are more accurate",
min=0.0, max=1.0, soft_min=0, soft_max=1, default=0.0)
radio_low_error_factor = FloatProperty(
name="Low Error Factor",
description="Just enough samples is slightly blotchy. Low error changes error " \
"tolerance for less critical last refining pass",
min=0.0, max=1.0, soft_min=0.0, soft_max=1.0, default=0.5)
# max_sample - not available yet
radio_media = BoolProperty(
name="Media", description="Radiosity estimation can be affected by media",
default=False)
radio_minimum_reuse = FloatProperty(
name="Minimum Reuse",
description="Fraction of the screen width which sets the minimum radius of reuse " \
"for each sample point (At values higher than 2% expect errors)",
min=0.0, max=1.0, soft_min=0.1, soft_max=0.1, default=0.015, precision=3)
radio_nearest_count = IntProperty(
name="Nearest Count",
description="Number of old ambient values blended together to " \
"create a new interpolated value",
min=1, max=20, default=5)
radio_normal = BoolProperty(
name="Normals", description="Radiosity estimation can be affected by normals",
default=False)
radio_recursion_limit = IntProperty(
name="Recursion Limit",
description="how many recursion levels are used to calculate " \
"the diffuse inter-reflection",
min=1, max=20, default=3)
radio_pretrace_start = FloatProperty(
name="Pretrace Start",
description="Fraction of the screen width which sets the size of the " \
"blocks in the mosaic preview first pass",
min=0.01, max=1.00, soft_min=0.02, soft_max=1.0, default=0.08)
radio_pretrace_end = FloatProperty(
name="Pretrace End",
description="Fraction of the screen width which sets the size of the blocks " \
"in the mosaic preview last pass",
min=0.001, max=1.00, soft_min=0.01, soft_max=1.00, default=0.04, precision=3)
###############################################################################
# Material POV properties.
###############################################################################
class RenderPovSettingsMaterial(bpy.types.PropertyGroup):
irid_enable = BoolProperty(
name="Enable Iridescence",
description="Newton's thin film interference (like an oil slick on a puddle of " \
"water or the rainbow hues of a soap bubble.)",
default=False)
mirror_use_IOR = BoolProperty(
name="Correct Reflection",
description="Use same IOR as raytrace transparency to calculate mirror reflections. " \
"More physically correct",
default=False)
mirror_metallic = BoolProperty(
name="Metallic Reflection",
description="mirror reflections get colored as diffuse (for metallic materials)",
default=False)
conserve_energy = BoolProperty(
name="Conserve Energy",
description="Light transmitted is more correctly reduced by mirror reflections, " \
"also the sum of diffuse and translucency gets reduced below one ",
default=True)
irid_amount = FloatProperty(
name="amount",
description="Contribution of the iridescence effect to the overall surface color. " \
"As a rule of thumb keep to around 0.25 (25% contribution) or less, " \
"but experiment. If the surface is coming out too white, try lowering " \
"the diffuse and possibly the ambient values of the surface",
min=0.0, max=1.0, soft_min=0.01, soft_max=1.0, default=0.25)
irid_thickness = FloatProperty(
name="thickness",
description="A very thin film will have a high frequency of color changes while a " \
"thick film will have large areas of color",
min=0.0, max=1000.0, soft_min=0.1, soft_max=10.0, default=1)
irid_turbulence = FloatProperty(
name="turbulence", description="This parameter varies the thickness",
min=0.0, max=10.0, soft_min=0.000, soft_max=1.0, default=0)
interior_fade_color = FloatVectorProperty(
name="Fade Color", description="Color of filtered attenuation for transparent materials",
precision=4, step=0.01, min=0.0, soft_max=1.0,
default=(0, 0, 0), options={'ANIMATABLE'}, subtype='COLOR')
caustics_enable = BoolProperty(
name="Caustics",
description="use only fake refractive caustics (default) or photon based " \
"reflective/refractive caustics",
default=True)
fake_caustics = BoolProperty(
name="Fake Caustics", description="use only (Fast) fake refractive caustics",
default=True)
fake_caustics_power = FloatProperty(
name="Fake caustics power",
description="Values typically range from 0.0 to 1.0 or higher. Zero is no caustics. " \
"Low, non-zero values give broad hot-spots while higher values give " \
"tighter, smaller simulated focal points",
min=0.00, max=10.0, soft_min=0.00, soft_max=1.10, default=0.1)
photons_refraction = BoolProperty(
name="Refractive Photon Caustics", description="more physically correct",
default=False)
photons_dispersion = FloatProperty(
name="Chromatic Dispersion",
description="Light passing through will be separated according to wavelength. " \
"This ratio of refractive indices for violet to red controls how much " \
"the colors are spread out 1 = no dispersion, good values are 1.01 to 1.1",
min=1.0000, max=10.000, soft_min=1.0000, soft_max=1.1000, precision=4, default=1.0000)
photons_dispersion_samples = IntProperty(
name="Dispersion Samples", description="Number of color-steps for dispersion",
min=2, max=128, default=7)
photons_reflection = BoolProperty(
name="Reflective Photon Caustics",
description="Use this to make your Sauron's ring ;-P",
default=False)
refraction_type = EnumProperty(
items=[("0", "None", "use only reflective caustics"),
("1", "Fake Caustics", "use fake caustics"),
("2", "Photons Caustics", "use photons for refractive caustics")],
name="Refractive",
description="use fake caustics (fast) or true photons for refractive Caustics",
default="1")
##################################CustomPOV Code############################
replacement_text = StringProperty(
name="Declared name:",
description="Type the declared name in custom POV code or an external " \
".inc it points at. texture {} expected",
default="")
###############################################################################
# Texture POV properties.
###############################################################################
class RenderPovSettingsTexture(bpy.types.PropertyGroup):
#Custom texture gamma
tex_gamma_enable = BoolProperty(
name="Enable custom texture gamma",
description="Notify some custom gamma for which texture has been precorrected " \
"without the file format carrying it and only if it differs from your " \
"OS expected standard (see pov doc)",
default=False)
tex_gamma_value = FloatProperty(
name="Custom texture gamma",
description="value for which the file was issued e.g. a Raw photo is gamma 1.0",
min=0.45, max=5.00, soft_min=1.00, soft_max=2.50, default=1.00)
##################################CustomPOV Code############################
#Only DUMMIES below for now:
replacement_text = StringProperty(
name="Declared name:",
description="Type the declared name in custom POV code or an external .inc " \
"it points at. pigment {} expected",
default="")
###############################################################################
# Object POV properties.
###############################################################################
class RenderPovSettingsObject(bpy.types.PropertyGroup):
#Importance sampling
importance_value = FloatProperty(
name="Radiosity Importance",
description="Priority value relative to other objects for sampling radiosity rays. " \
"Increase to get more radiosity rays at comparatively small yet " \
"bright objects",
min=0.01, max=1.00, default=1.00)
#Collect photons
collect_photons = BoolProperty(
name="Receive Photon Caustics",
description="Enable object to collect photons from other objects caustics. Turn " \
"off for objects that don't really need to receive caustics (e.g. objects" \
" that generate caustics often don't need to show any on themselves)",
default=True)
#Photons spacing_multiplier
spacing_multiplier = FloatProperty(
name="Photons Spacing Multiplier",
description="Multiplier value relative to global spacing of photons. " \
"Decrease by half to get 4x more photons at surface of " \
"this object (or 8x media photons than specified in the globals",
min=0.01, max=1.00, default=1.00)
##################################CustomPOV Code############################
#Only DUMMIES below for now:
replacement_text = StringProperty(
name="Declared name:",
description="Type the declared name in custom POV code or an external .inc " \
"it points at. Any POV shape expected e.g: isosurface {}",
default="")
###############################################################################
# Camera POV properties.
###############################################################################
class RenderPovSettingsCamera(bpy.types.PropertyGroup):
#DOF Toggle
dof_enable = BoolProperty(
name="Depth Of Field", description="EnablePOV-Ray Depth Of Field ",
default=False)
#Aperture (Intensity of the Blur)
dof_aperture = FloatProperty(
name="Aperture",
description="Similar to a real camera's aperture effect over focal blur (though not " \
"in physical units and independant of focal length). " \
"Increase to get more blur",
min=0.01, max=1.00, default=0.25)
#Aperture adaptive sampling
dof_samples_min = IntProperty(
name="Samples Min", description="Minimum number of rays to use for each pixel",
min=1, max=128, default=96)
dof_samples_max = IntProperty(
name="Samples Max", description="Maximum number of rays to use for each pixel",
min=1, max=128, default=128)
dof_variance = IntProperty(
name="Variance",
description="Minimum threshold (fractional value) for adaptive DOF sampling (up " \
"increases quality and render time). The value for the variance should " \
"be in the range of the smallest displayable color difference",
min=1, max=100000, soft_max=10000, default=256)
dof_confidence = FloatProperty(
name="Confidence",
description="Probability to reach the real color value. Larger confidence values " \
"will lead to more samples, slower traces and better images",
min=0.01, max=0.99, default=0.90)
##################################CustomPOV Code############################
#Only DUMMIES below for now:
replacement_text = StringProperty(
name="Texts in blend file",
description="Type the declared name in custom POV code or an external .inc " \
"it points at. camera {} expected",
default="")
###############################################################################
# Text POV properties.
###############################################################################
class RenderPovSettingsText(bpy.types.PropertyGroup):
custom_code = BoolProperty(
name="Custom Code",
description="Add this text at the top of the exported POV-Ray file",
default=False)
def register():
bpy.utils.register_module(__name__)
bpy.types.Scene.pov = PointerProperty(type=RenderPovSettingsScene)
bpy.types.Material.pov = PointerProperty(type=RenderPovSettingsMaterial)
bpy.types.Texture.pov = PointerProperty(type=RenderPovSettingsTexture)
bpy.types.Object.pov = PointerProperty(type=RenderPovSettingsObject)
bpy.types.Camera.pov = PointerProperty(type=RenderPovSettingsCamera)
bpy.types.Text.pov = PointerProperty(type=RenderPovSettingsText)
def unregister():
bpy.utils.unregister_module(__name__)
del bpy.types.Scene.pov
del bpy.types.Material.pov
del bpy.types.Texture.pov
del bpy.types.Object.pov
del bpy.types.Camera.pov
del bpy.types.Text.pov
if __name__ == "__main__":
register()
|
{
"content_hash": "c12acddd2e938d0907bf04de4cafc966",
"timestamp": "",
"source": "github",
"line_count": 511,
"max_line_length": 118,
"avg_line_length": 45.49706457925636,
"alnum_prop": 0.5725837670437438,
"repo_name": "kellpossible/VoxelEditor",
"id": "1b7299ecd94a012bc14320d7c4c5b0e3de3e54a2",
"size": "24064",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ScriptResearch/render_povray/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "478983"
}
],
"symlink_target": ""
}
|
from .partials import *
INSTALLED_APPS += [
"debug_toolbar",
"django_extensions",
]
MIDDLEWARE_CLASSES += [
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
|
{
"content_hash": "fc130650707a35d00642210c919f640a",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 54,
"avg_line_length": 16,
"alnum_prop": 0.6818181818181818,
"repo_name": "jupiny/EnglishDiary",
"id": "3cf75fba94ae787547e7bcf4d695874b5328193a",
"size": "176",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "english_diary/english_diary/settings/development.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "23180"
},
{
"name": "HTML",
"bytes": "49041"
},
{
"name": "JavaScript",
"bytes": "62217"
},
{
"name": "Makefile",
"bytes": "205"
},
{
"name": "Python",
"bytes": "74625"
}
],
"symlink_target": ""
}
|
"""
Drivers for san-stored volumes.
The unique thing about a SAN is that we don't expect that we can run the volume
controller on the SAN hardware. We expect to access it over SSH or some API.
"""
import base64
import httplib
import os
import paramiko
import random
import socket
import string
import uuid
from lxml import etree
from cinder import exception
from cinder import flags
from cinder.openstack.common import log as logging
from cinder.openstack.common import cfg
from cinder.openstack.common import jsonutils
from cinder import utils
import cinder.volume.driver
LOG = logging.getLogger(__name__)
san_opts = [
cfg.BoolOpt('san_thin_provision',
default=True,
help='Use thin provisioning for SAN volumes?'),
cfg.StrOpt('san_ip',
default='',
help='IP address of SAN controller'),
cfg.StrOpt('san_login',
default='admin',
help='Username for SAN controller'),
cfg.StrOpt('san_password',
default='',
help='Password for SAN controller'),
cfg.StrOpt('san_private_key',
default='',
help='Filename of private key to use for SSH authentication'),
cfg.StrOpt('san_clustername',
default='',
help='Cluster name to use for creating volumes'),
cfg.IntOpt('san_ssh_port',
default=22,
help='SSH port to use with SAN'),
cfg.BoolOpt('san_is_local',
default=False,
help='Execute commands locally instead of over SSH; '
'use if the volume service is running on the SAN device'),
cfg.StrOpt('san_zfs_volume_base',
default='rpool/',
help='The ZFS path under which to create zvols for volumes.'),
]
FLAGS = flags.FLAGS
FLAGS.register_opts(san_opts)
class SanISCSIDriver(cinder.volume.driver.ISCSIDriver):
"""Base class for SAN-style storage volumes
A SAN-style storage value is 'different' because the volume controller
probably won't run on it, so we need to access is over SSH or another
remote protocol.
"""
def __init__(self, *args, **kwargs):
super(SanISCSIDriver, self).__init__(*args, **kwargs)
self.run_local = FLAGS.san_is_local
def _build_iscsi_target_name(self, volume):
return "%s%s" % (FLAGS.iscsi_target_prefix, volume['name'])
def _connect_to_ssh(self):
ssh = paramiko.SSHClient()
#TODO(justinsb): We need a better SSH key policy
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
if FLAGS.san_password:
ssh.connect(FLAGS.san_ip,
port=FLAGS.san_ssh_port,
username=FLAGS.san_login,
password=FLAGS.san_password)
elif FLAGS.san_private_key:
privatekeyfile = os.path.expanduser(FLAGS.san_private_key)
# It sucks that paramiko doesn't support DSA keys
privatekey = paramiko.RSAKey.from_private_key_file(privatekeyfile)
ssh.connect(FLAGS.san_ip,
port=FLAGS.san_ssh_port,
username=FLAGS.san_login,
pkey=privatekey)
else:
msg = _("Specify san_password or san_private_key")
raise exception.InvalidInput(reason=msg)
return ssh
def _execute(self, *cmd, **kwargs):
if self.run_local:
return utils.execute(*cmd, **kwargs)
else:
check_exit_code = kwargs.pop('check_exit_code', None)
command = ' '.join(cmd)
return self._run_ssh(command, check_exit_code)
def _run_ssh(self, command, check_exit_code=True):
#TODO(justinsb): SSH connection caching (?)
ssh = self._connect_to_ssh()
#TODO(justinsb): Reintroduce the retry hack
ret = utils.ssh_execute(ssh, command, check_exit_code=check_exit_code)
ssh.close()
return ret
def ensure_export(self, context, volume):
"""Synchronously recreates an export for a logical volume."""
pass
def create_export(self, context, volume):
"""Exports the volume."""
pass
def remove_export(self, context, volume):
"""Removes an export for a logical volume."""
pass
def check_for_setup_error(self):
"""Returns an error if prerequisites aren't met."""
if not self.run_local:
if not (FLAGS.san_password or FLAGS.san_private_key):
raise exception.InvalidInput(
reason=_('Specify san_password or san_private_key'))
# The san_ip must always be set, because we use it for the target
if not (FLAGS.san_ip):
raise exception.InvalidInput(reason=_("san_ip must be set"))
def _collect_lines(data):
"""Split lines from data into an array, trimming them """
matches = []
for line in data.splitlines():
match = line.strip()
matches.append(match)
return matches
def _get_prefixed_values(data, prefix):
"""Collect lines which start with prefix; with trimming"""
matches = []
for line in data.splitlines():
line = line.strip()
if line.startswith(prefix):
match = line[len(prefix):]
match = match.strip()
matches.append(match)
return matches
class SolarisISCSIDriver(SanISCSIDriver):
"""Executes commands relating to Solaris-hosted ISCSI volumes.
Basic setup for a Solaris iSCSI server:
pkg install storage-server SUNWiscsit
svcadm enable stmf
svcadm enable -r svc:/network/iscsi/target:default
pfexec itadm create-tpg e1000g0 ${MYIP}
pfexec itadm create-target -t e1000g0
Then grant the user that will be logging on lots of permissions.
I'm not sure exactly which though:
zfs allow justinsb create,mount,destroy rpool
usermod -P'File System Management' justinsb
usermod -P'Primary Administrator' justinsb
Also make sure you can login using san_login & san_password/san_private_key
"""
def __init__(self, *cmd, **kwargs):
super(SolarisISCSIDriver, self).__init__(*cmd,
execute=self._execute,
**kwargs)
def _execute(self, *cmd, **kwargs):
new_cmd = ['pfexec']
new_cmd.extend(cmd)
return super(SolarisISCSIDriver, self)._execute(*new_cmd,
**kwargs)
def _view_exists(self, luid):
(out, _err) = self._execute('/usr/sbin/stmfadm',
'list-view', '-l', luid,
check_exit_code=False)
if "no views found" in out:
return False
if "View Entry:" in out:
return True
msg = _("Cannot parse list-view output: %s") % out
raise exception.VolumeBackendAPIException(data=msg)
def _get_target_groups(self):
"""Gets list of target groups from host."""
(out, _err) = self._execute('/usr/sbin/stmfadm', 'list-tg')
matches = _get_prefixed_values(out, 'Target group: ')
LOG.debug("target_groups=%s" % matches)
return matches
def _target_group_exists(self, target_group_name):
return target_group_name not in self._get_target_groups()
def _get_target_group_members(self, target_group_name):
(out, _err) = self._execute('/usr/sbin/stmfadm',
'list-tg', '-v', target_group_name)
matches = _get_prefixed_values(out, 'Member: ')
LOG.debug("members of %s=%s" % (target_group_name, matches))
return matches
def _is_target_group_member(self, target_group_name, iscsi_target_name):
return iscsi_target_name in (
self._get_target_group_members(target_group_name))
def _get_iscsi_targets(self):
(out, _err) = self._execute('/usr/sbin/itadm', 'list-target')
matches = _collect_lines(out)
# Skip header
if len(matches) != 0:
assert 'TARGET NAME' in matches[0]
matches = matches[1:]
targets = []
for line in matches:
items = line.split()
assert len(items) == 3
targets.append(items[0])
LOG.debug("_get_iscsi_targets=%s" % (targets))
return targets
def _iscsi_target_exists(self, iscsi_target_name):
return iscsi_target_name in self._get_iscsi_targets()
def _build_zfs_poolname(self, volume):
zfs_poolname = '%s%s' % (FLAGS.san_zfs_volume_base, volume['name'])
return zfs_poolname
def create_volume(self, volume):
"""Creates a volume."""
if int(volume['size']) == 0:
sizestr = '100M'
else:
sizestr = '%sG' % volume['size']
zfs_poolname = self._build_zfs_poolname(volume)
# Create a zfs volume
cmd = ['/usr/sbin/zfs', 'create']
if FLAGS.san_thin_provision:
cmd.append('-s')
cmd.extend(['-V', sizestr])
cmd.append(zfs_poolname)
self._execute(*cmd)
def _get_luid(self, volume):
zfs_poolname = self._build_zfs_poolname(volume)
zvol_name = '/dev/zvol/rdsk/%s' % zfs_poolname
(out, _err) = self._execute('/usr/sbin/sbdadm', 'list-lu')
lines = _collect_lines(out)
# Strip headers
if len(lines) >= 1:
if lines[0] == '':
lines = lines[1:]
if len(lines) >= 4:
assert 'Found' in lines[0]
assert '' == lines[1]
assert 'GUID' in lines[2]
assert '------------------' in lines[3]
lines = lines[4:]
for line in lines:
items = line.split()
assert len(items) == 3
if items[2] == zvol_name:
luid = items[0].strip()
return luid
msg = _('LUID not found for %(zfs_poolname)s. '
'Output=%(out)s') % locals()
raise exception.VolumeBackendAPIException(data=msg)
def _is_lu_created(self, volume):
luid = self._get_luid(volume)
return luid
def delete_volume(self, volume):
"""Deletes a volume."""
zfs_poolname = self._build_zfs_poolname(volume)
self._execute('/usr/sbin/zfs', 'destroy', zfs_poolname)
def local_path(self, volume):
# TODO(justinsb): Is this needed here?
escaped_group = FLAGS.volume_group.replace('-', '--')
escaped_name = volume['name'].replace('-', '--')
return "/dev/mapper/%s-%s" % (escaped_group, escaped_name)
def ensure_export(self, context, volume):
"""Synchronously recreates an export for a logical volume."""
#TODO(justinsb): On bootup, this is called for every volume.
# It then runs ~5 SSH commands for each volume,
# most of which fetch the same info each time
# This makes initial start stupid-slow
return self._do_export(volume, force_create=False)
def create_export(self, context, volume):
return self._do_export(volume, force_create=True)
def _do_export(self, volume, force_create):
# Create a Logical Unit (LU) backed by the zfs volume
zfs_poolname = self._build_zfs_poolname(volume)
if force_create or not self._is_lu_created(volume):
zvol_name = '/dev/zvol/rdsk/%s' % zfs_poolname
self._execute('/usr/sbin/sbdadm', 'create-lu', zvol_name)
luid = self._get_luid(volume)
iscsi_name = self._build_iscsi_target_name(volume)
target_group_name = 'tg-%s' % volume['name']
# Create a iSCSI target, mapped to just this volume
if force_create or not self._target_group_exists(target_group_name):
self._execute('/usr/sbin/stmfadm', 'create-tg', target_group_name)
# Yes, we add the initiatior before we create it!
# Otherwise, it complains that the target is already active
if force_create or not self._is_target_group_member(target_group_name,
iscsi_name):
self._execute('/usr/sbin/stmfadm',
'add-tg-member', '-g', target_group_name, iscsi_name)
if force_create or not self._iscsi_target_exists(iscsi_name):
self._execute('/usr/sbin/itadm', 'create-target', '-n', iscsi_name)
if force_create or not self._view_exists(luid):
self._execute('/usr/sbin/stmfadm',
'add-view', '-t', target_group_name, luid)
#TODO(justinsb): Is this always 1? Does it matter?
iscsi_portal_interface = '1'
iscsi_portal = FLAGS.san_ip + ":3260," + iscsi_portal_interface
db_update = {}
db_update['provider_location'] = ("%s %s" %
(iscsi_portal,
iscsi_name))
return db_update
def remove_export(self, context, volume):
"""Removes an export for a logical volume."""
# This is the reverse of _do_export
luid = self._get_luid(volume)
iscsi_name = self._build_iscsi_target_name(volume)
target_group_name = 'tg-%s' % volume['name']
if self._view_exists(luid):
self._execute('/usr/sbin/stmfadm', 'remove-view', '-l', luid, '-a')
if self._iscsi_target_exists(iscsi_name):
self._execute('/usr/sbin/stmfadm', 'offline-target', iscsi_name)
self._execute('/usr/sbin/itadm', 'delete-target', iscsi_name)
# We don't delete the tg-member; we delete the whole tg!
if self._target_group_exists(target_group_name):
self._execute('/usr/sbin/stmfadm', 'delete-tg', target_group_name)
if self._is_lu_created(volume):
self._execute('/usr/sbin/sbdadm', 'delete-lu', luid)
class HpSanISCSIDriver(SanISCSIDriver):
"""Executes commands relating to HP/Lefthand SAN ISCSI volumes.
We use the CLIQ interface, over SSH.
Rough overview of CLIQ commands used:
:createVolume: (creates the volume)
:getVolumeInfo: (to discover the IQN etc)
:getClusterInfo: (to discover the iSCSI target IP address)
:assignVolumeChap: (exports it with CHAP security)
The 'trick' here is that the HP SAN enforces security by default, so
normally a volume mount would need both to configure the SAN in the volume
layer and do the mount on the compute layer. Multi-layer operations are
not catered for at the moment in the cinder architecture, so instead we
share the volume using CHAP at volume creation time. Then the mount need
only use those CHAP credentials, so can take place exclusively in the
compute layer.
"""
def _cliq_run(self, verb, cliq_args):
"""Runs a CLIQ command over SSH, without doing any result parsing"""
cliq_arg_strings = []
for k, v in cliq_args.items():
cliq_arg_strings.append(" %s=%s" % (k, v))
cmd = verb + ''.join(cliq_arg_strings)
return self._run_ssh(cmd)
def _cliq_run_xml(self, verb, cliq_args, check_cliq_result=True):
"""Runs a CLIQ command over SSH, parsing and checking the output"""
cliq_args['output'] = 'XML'
(out, _err) = self._cliq_run(verb, cliq_args)
LOG.debug(_("CLIQ command returned %s"), out)
result_xml = etree.fromstring(out)
if check_cliq_result:
response_node = result_xml.find("response")
if response_node is None:
msg = (_("Malformed response to CLIQ command "
"%(verb)s %(cliq_args)s. Result=%(out)s") %
locals())
raise exception.VolumeBackendAPIException(data=msg)
result_code = response_node.attrib.get("result")
if result_code != "0":
msg = (_("Error running CLIQ command %(verb)s %(cliq_args)s. "
" Result=%(out)s") %
locals())
raise exception.VolumeBackendAPIException(data=msg)
return result_xml
def _cliq_get_cluster_info(self, cluster_name):
"""Queries for info about the cluster (including IP)"""
cliq_args = {}
cliq_args['clusterName'] = cluster_name
cliq_args['searchDepth'] = '1'
cliq_args['verbose'] = '0'
result_xml = self._cliq_run_xml("getClusterInfo", cliq_args)
return result_xml
def _cliq_get_cluster_vip(self, cluster_name):
"""Gets the IP on which a cluster shares iSCSI volumes"""
cluster_xml = self._cliq_get_cluster_info(cluster_name)
vips = []
for vip in cluster_xml.findall("response/cluster/vip"):
vips.append(vip.attrib.get('ipAddress'))
if len(vips) == 1:
return vips[0]
_xml = etree.tostring(cluster_xml)
msg = (_("Unexpected number of virtual ips for cluster "
" %(cluster_name)s. Result=%(_xml)s") %
locals())
raise exception.VolumeBackendAPIException(data=msg)
def _cliq_get_volume_info(self, volume_name):
"""Gets the volume info, including IQN"""
cliq_args = {}
cliq_args['volumeName'] = volume_name
result_xml = self._cliq_run_xml("getVolumeInfo", cliq_args)
# Result looks like this:
#<gauche version="1.0">
# <response description="Operation succeeded." name="CliqSuccess"
# processingTime="87" result="0">
# <volume autogrowPages="4" availability="online" blockSize="1024"
# bytesWritten="0" checkSum="false" clusterName="Cluster01"
# created="2011-02-08T19:56:53Z" deleting="false" description=""
# groupName="Group01" initialQuota="536870912" isPrimary="true"
# iscsiIqn="iqn.2003-10.com.lefthandnetworks:group01:25366:vol-b"
# maxSize="6865387257856" md5="9fa5c8b2cca54b2948a63d833097e1ca"
# minReplication="1" name="vol-b" parity="0" replication="2"
# reserveQuota="536870912" scratchQuota="4194304"
# serialNumber="9fa5c8b2cca54b2948a63d833097e1ca0000000000006316"
# size="1073741824" stridePages="32" thinProvision="true">
# <status description="OK" value="2"/>
# <permission access="rw"
# authGroup="api-34281B815713B78-(trimmed)51ADD4B7030853AA7"
# chapName="chapusername" chapRequired="true" id="25369"
# initiatorSecret="" iqn="" iscsiEnabled="true"
# loadBalance="true" targetSecret="supersecret"/>
# </volume>
# </response>
#</gauche>
# Flatten the nodes into a dictionary; use prefixes to avoid collisions
volume_attributes = {}
volume_node = result_xml.find("response/volume")
for k, v in volume_node.attrib.items():
volume_attributes["volume." + k] = v
status_node = volume_node.find("status")
if not status_node is None:
for k, v in status_node.attrib.items():
volume_attributes["status." + k] = v
# We only consider the first permission node
permission_node = volume_node.find("permission")
if not permission_node is None:
for k, v in status_node.attrib.items():
volume_attributes["permission." + k] = v
LOG.debug(_("Volume info: %(volume_name)s => %(volume_attributes)s") %
locals())
return volume_attributes
def create_volume(self, volume):
"""Creates a volume."""
cliq_args = {}
cliq_args['clusterName'] = FLAGS.san_clustername
#TODO(justinsb): Should we default to inheriting thinProvision?
cliq_args['thinProvision'] = '1' if FLAGS.san_thin_provision else '0'
cliq_args['volumeName'] = volume['name']
if int(volume['size']) == 0:
cliq_args['size'] = '100MB'
else:
cliq_args['size'] = '%sGB' % volume['size']
self._cliq_run_xml("createVolume", cliq_args)
volume_info = self._cliq_get_volume_info(volume['name'])
cluster_name = volume_info['volume.clusterName']
iscsi_iqn = volume_info['volume.iscsiIqn']
#TODO(justinsb): Is this always 1? Does it matter?
cluster_interface = '1'
cluster_vip = self._cliq_get_cluster_vip(cluster_name)
iscsi_portal = cluster_vip + ":3260," + cluster_interface
model_update = {}
# NOTE(jdg): LH volumes always at lun 0 ?
model_update['provider_location'] = ("%s %s %s" %
(iscsi_portal,
iscsi_iqn,
0))
return model_update
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot."""
raise NotImplementedError()
def create_snapshot(self, snapshot):
"""Creates a snapshot."""
raise NotImplementedError()
def delete_volume(self, volume):
"""Deletes a volume."""
cliq_args = {}
cliq_args['volumeName'] = volume['name']
cliq_args['prompt'] = 'false' # Don't confirm
self._cliq_run_xml("deleteVolume", cliq_args)
def local_path(self, volume):
# TODO(justinsb): Is this needed here?
msg = _("local_path not supported")
raise exception.VolumeBackendAPIException(data=msg)
def initialize_connection(self, volume, connector):
"""Assigns the volume to a server.
Assign any created volume to a compute node/host so that it can be
used from that host. HP VSA requires a volume to be assigned
to a server.
This driver returns a driver_volume_type of 'iscsi'.
The format of the driver data is defined in _get_iscsi_properties.
Example return value:
{
'driver_volume_type': 'iscsi'
'data': {
'target_discovered': True,
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000001',
'target_protal': '127.0.0.1:3260',
'volume_id': 1,
}
}
"""
cliq_args = {}
cliq_args['volumeName'] = volume['name']
cliq_args['serverName'] = connector['host']
self._cliq_run_xml("assignVolumeToServer", cliq_args)
iscsi_properties = self._get_iscsi_properties(volume)
return {
'driver_volume_type': 'iscsi',
'data': iscsi_properties
}
def terminate_connection(self, volume, connector):
"""Unassign the volume from the host."""
cliq_args = {}
cliq_args['volumeName'] = volume['name']
cliq_args['serverName'] = connector['host']
self._cliq_run_xml("unassignVolumeToServer", cliq_args)
|
{
"content_hash": "6aefe59f8f34785ee66d8b34cfe5aaf0",
"timestamp": "",
"source": "github",
"line_count": 636,
"max_line_length": 79,
"avg_line_length": 36.43396226415094,
"alnum_prop": 0.5772052477127568,
"repo_name": "tylertian/Openstack",
"id": "7c06d859fbcd03f98d0c1eff3d628250a2e8c928",
"size": "23852",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "openstack F/cinder/cinder/volume/san.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "239919"
},
{
"name": "JavaScript",
"bytes": "156942"
},
{
"name": "Python",
"bytes": "16949418"
},
{
"name": "Shell",
"bytes": "96743"
}
],
"symlink_target": ""
}
|
import copy
from biicode.common.exception import (BiiStoreException, NotInStoreException,
AlreadyInStoreException)
class MemStore(object):
'''MemStore is the ABC for an in memory store, that could be used
both for testing (instead of a real DB) or in production for efficiency
as intermediate caching store, proxying a real store, passed as parameter
in the constructor. Can be None for testing with in memory only.
Such store must support the methods invoked, i.e. read(x, 'cell'), the
underlaying store must have a method called "read_cell(x)".
The methods called "multi", will append an 's' to the name, and retrieve
a dict{ID: RequestedObject}
The store always returns deep copies of objects, to really provide isolation
and avoid by-reference modifications, when used as a final (non-proxying) store
Names passed as parameters MUST be the names of the supporting dicts to hold data
'''
def __init__(self, store=None):
self._store = store
self._init_holders()
def _get_item(self, item):
""" this method differentiates if the store is being used in testing or not. If in
production, it will be a proxy of self._store, and then it does not require to deepcopy
objects. For testing it is better to ensure things are copied so, no referencial problemas
are hidden
"""
if self._store:
return item
else:
return copy.deepcopy(item)
def _init_holders(self):
'''derived classes must implement this method, declaring collections
to be cached, i.e. self.user = {}. Collections in SINGULAR'''
raise NotImplementedError('Derived class must implement this')
def read(self, ID, name):
collection = getattr(self, name)
try:
return self._get_item(collection[ID])
except KeyError:
if self._store:
read_method = getattr(self._store, 'read_' + name)
item = read_method(ID)
collection[ID] = item
return item
else:
raise NotInStoreException('ID (%s) not found' % str(ID))
def read_multi(self, IDs, name):
'''returns a dict {ID: object}'''
result = {}
missing = set()
collection = getattr(self, name)
for ID in IDs:
try:
result[ID] = self._get_item(collection[ID])
except KeyError:
missing.add(ID)
if missing:
if self._store:
read_method = getattr(self._store, 'read_%ss' % name)
items = read_method(missing) # assumes that return {ID=>value}
collection.update(items)
result.update(items)
#else:
# raise NotInStoreException('IDs (%s) not found' % missing)
return result
def create(self, value, name, **kwargs):
'''create has an extra kwargs for extra options, as update_if_current
'''
ID = value.ID
if ID is None:
raise BiiStoreException('Object without ID %s, %s' % (value, name))
collection = getattr(self, name)
if ID in collection:
raise AlreadyInStoreException('Duplicate key %s in %s' % (ID, name))
if self._store:
create_method = getattr(self._store, 'create_' + name)
create_method(value, **kwargs)
collection[ID] = self._get_item(value)
return ID
def create_multi(self, values, name, **kwargs):
'''create has an extra kwargs for extra options, as update_if_current
'''
collection = getattr(self, name)
values_dict = {}
for value in values:
ID = value.ID
if ID is None:
raise BiiStoreException('Object without ID %s, %s' % (value, name))
if ID in collection:
raise BiiStoreException('Duplicate key %s in %s' % (ID, name))
values_dict[ID] = value
if self._store:
create_method = getattr(self._store, 'create_%ss' % name)
create_method(values, **kwargs)
collection.update(self._get_item(values_dict))
def update(self, value, name):
ID = value.ID
if ID is None:
raise BiiStoreException('Object without ID %s, %s' % (value, name))
collection = getattr(self, name)
if ID not in collection:
raise BiiStoreException('Non existing ID (%s) in update' % ID)
if self._store:
meth = getattr(self._store, 'update_' + name)
meth(value)
collection[ID] = self._get_item(value)
def upsert(self, value, name):
ID = value.ID
collection = getattr(self, name)
if ID is None:
raise BiiStoreException('Object without ID %s, %s' % (value, name))
if self._store:
meth = getattr(self._store, 'upsert_' + name)
meth(value)
collection[ID] = self._get_item(value)
def upsert_multi(self, values, name):
collection = getattr(self, name)
values_dict = {}
for value in values:
ID = value.ID
if ID is None:
raise BiiStoreException('Object without ID %s, %s' % (value, name))
values_dict[ID] = value
if self._store:
create_method = getattr(self._store, 'upsert_%ss' % name)
create_method(values)
collection.update(self._get_item(values_dict))
def delete_multi(self, ids, name):
'''use with caution :P, only for edition'''
collection = getattr(self, name)
if any(ID not in collection for ID in ids):
raise BiiStoreException('key error in : %s' % name)
if self._store:
meth = getattr(self._store, 'delete_%ss' % name)
meth(ids)
for ID in ids:
del collection[ID]
def delete(self, key, name):
'''use with caution :P, only for edition'''
collection = getattr(self, name)
if key not in collection:
raise BiiStoreException('key error in : %s' % collection)
if self._store:
meth = getattr(self._store, 'delete_%s' % name)
meth(key)
del collection[key]
def update_field(self, name, obj_id, field_name, value):
# Because:
# a) field_name is the serial representation of the field
# b) object must not have a setter for this field
raise NotImplementedError()
|
{
"content_hash": "c47de677230ecd2c854e158e854d61e7",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 98,
"avg_line_length": 38.8235294117647,
"alnum_prop": 0.578030303030303,
"repo_name": "biicode/common",
"id": "5dbef60417c580544a7c39049515e66524708920",
"size": "6600",
"binary": false,
"copies": "5",
"ref": "refs/heads/develop",
"path": "store/mem_store.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3157300"
},
{
"name": "C++",
"bytes": "4667113"
},
{
"name": "CMake",
"bytes": "25379"
},
{
"name": "FORTRAN",
"bytes": "3691"
},
{
"name": "Java",
"bytes": "4201"
},
{
"name": "JavaScript",
"bytes": "172849"
},
{
"name": "Makefile",
"bytes": "6333"
},
{
"name": "Objective-C",
"bytes": "826"
},
{
"name": "Python",
"bytes": "702276"
},
{
"name": "Shell",
"bytes": "645"
}
],
"symlink_target": ""
}
|
from __future__ import print_function, unicode_literals
import os, sys, argparse, contextlib, subprocess, locale, re
from . import my_shlex as shlex
USING_PYTHON2 = True if sys.version_info < (3, 0) else False
if not USING_PYTHON2:
basestring = str
sys_encoding = locale.getpreferredencoding()
_DEBUG = '_ARC_DEBUG' in os.environ
debug_stream = sys.stderr
def debug(*args):
if _DEBUG:
print(file=debug_stream, *args)
BASH_FILE_COMPLETION_FALLBACK = 79
BASH_DIR_COMPLETION_FALLBACK = 80
safe_actions = (argparse._StoreAction,
argparse._StoreConstAction,
argparse._StoreTrueAction,
argparse._StoreFalseAction,
argparse._AppendAction,
argparse._AppendConstAction,
argparse._CountAction)
from . import completers
from .my_argparse import IntrospectiveArgumentParser, action_is_satisfied, action_is_open
@contextlib.contextmanager
def mute_stdout():
stdout = sys.stdout
sys.stdout = open(os.devnull, 'w')
yield
sys.stdout = stdout
@contextlib.contextmanager
def mute_stderr():
stderr = sys.stderr
sys.stderr = open(os.devnull, 'w')
yield
sys.stderr.close()
sys.stderr = stderr
class ArgcompleteException(Exception):
pass
def split_line(line, point=None):
if point is None:
point = len(line)
lexer = shlex.shlex(line, posix=True, punctuation_chars=True)
words = []
def split_word(word):
# TODO: make this less ugly
point_in_word = len(word) + point - lexer.instream.tell()
if isinstance(lexer.state, basestring) and lexer.state in lexer.whitespace:
point_in_word += 1
if point_in_word > len(word):
debug("In trailing whitespace")
words.append(word)
word = ''
prefix, suffix = word[:point_in_word], word[point_in_word:]
prequote = ''
# posix
if lexer.state is not None and lexer.state in lexer.quotes:
prequote = lexer.state
# non-posix
#if len(prefix) > 0 and prefix[0] in lexer.quotes:
# prequote, prefix = prefix[0], prefix[1:]
first_colon_pos = lexer.first_colon_pos if ':' in word else None
return prequote, prefix, suffix, words, first_colon_pos
while True:
try:
word = lexer.get_token()
if word == lexer.eof:
# TODO: check if this is ever unsafe
# raise ArgcompleteException("Unexpected end of input")
return "", "", "", words, None
if lexer.instream.tell() >= point:
debug("word", word, "split, lexer state: '{s}'".format(s=lexer.state))
return split_word(word)
words.append(word)
except ValueError:
debug("word", lexer.token, "split (lexer stopped, state: '{s}')".format(s=lexer.state))
if lexer.instream.tell() >= point:
return split_word(lexer.token)
else:
raise ArgcompleteException("Unexpected internal state. Please report this bug at https://github.com/kislyuk/argcomplete/issues.")
def default_validator(completion, prefix):
return completion.startswith(prefix)
class CompletionFinder(object):
'''
Inherit from this class if you wish to override any of the stages below. Otherwise, use ``argcomplete.autocomplete()``
directly (it's a convenience instance of this class). It has the same signature as
:meth:`CompletionFinder.__call__()`.
'''
def __init__(self, argument_parser=None, always_complete_options=True, exclude=None, validator=None):
self._parser = argument_parser
self.always_complete_options = always_complete_options
self.exclude = exclude
if validator is None:
validator = default_validator
self.validator = validator
def __call__(self, argument_parser, always_complete_options=True, exit_method=os._exit, output_stream=None,
exclude=None, validator=None):
'''
:param argument_parser: The argument parser to autocomplete on
:type argument_parser: :class:`argparse.ArgumentParser`
:param always_complete_options: Whether or not to autocomplete options even if an option string opening character (normally ``-``) has not been entered
:type always_complete_options: boolean
:param exit_method: Method used to stop the program after printing completions. Defaults to :meth:`os._exit`. If you want to perform a normal exit that calls exit handlers, use :meth:`sys.exit`.
:type exit_method: callable
:param exclude: List of strings representing options to be omitted from autocompletion
:type exclude: iterable
:param validator: Function to filter all completions through before returning (called with two string arguments, completion and prefix; return value is evaluated as a boolean)
:type validator: callable
.. note:: If you are not subclassing CompletionFinder to override its behaviors, use ``argcomplete.autocomplete()`` directly. It has the same signature as this method.
Produces tab completions for ``argument_parser``. See module docs for more info.
Argcomplete only executes actions if their class is known not to have side effects. Custom action classes can be
added to argcomplete.safe_actions, if their values are wanted in the ``parsed_args`` completer argument, or their
execution is otherwise desirable.
'''
self.__init__(argument_parser, always_complete_options, exclude, validator)
if '_ARGCOMPLETE' not in os.environ:
# not an argument completion invocation
return
global debug_stream
try:
debug_stream = os.fdopen(9, 'w')
except:
debug_stream = sys.stderr
if output_stream is None:
try:
output_stream = os.fdopen(8, 'wb')
except:
debug("Unable to open fd 8 for writing, quitting")
exit_method(1)
# print("", stream=debug_stream)
# for v in 'COMP_CWORD', 'COMP_LINE', 'COMP_POINT', 'COMP_TYPE', 'COMP_KEY', '_ARGCOMPLETE_COMP_WORDBREAKS', 'COMP_WORDS':
# print(v, os.environ[v], stream=debug_stream)
ifs = os.environ.get('_ARGCOMPLETE_IFS', '\013')
if len(ifs) != 1:
debug("Invalid value for IFS, quitting [{v}]".format(v=ifs))
exit_method(1)
comp_line = os.environ['COMP_LINE']
comp_point = int(os.environ['COMP_POINT'])
# Adjust comp_point for wide chars
if USING_PYTHON2:
comp_point = len(comp_line[:comp_point].decode(sys_encoding))
else:
comp_point = len(comp_line.encode(sys_encoding)[:comp_point].decode(sys_encoding))
if USING_PYTHON2:
comp_line = comp_line.decode(sys_encoding)
cword_prequote, cword_prefix, cword_suffix, comp_words, first_colon_pos = split_line(comp_line, comp_point)
if os.environ['_ARGCOMPLETE'] == "2":
# Shell hook recognized the first word as the interpreter; discard it
comp_words.pop(0)
debug("\nLINE: '{l}'\nPREQUOTE: '{pq}'\nPREFIX: '{p}'".format(l=comp_line, pq=cword_prequote, p=cword_prefix),
"\nSUFFIX: '{s}'".format(s=cword_suffix),
"\nWORDS:", comp_words)
completions = self._get_completions(comp_words, cword_prefix, cword_prequote, first_colon_pos)
debug("\nReturning completions:", completions)
output_stream.write(ifs.join(completions).encode(sys_encoding))
output_stream.flush()
debug_stream.flush()
exit_method(0)
def _get_completions(self, comp_words, cword_prefix, cword_prequote, first_colon_pos):
active_parsers, parsed_args = self._patch_argument_parser()
try:
debug("invoking parser with", comp_words[1:])
with mute_stderr():
a = self._parser.parse_known_args(comp_words[1:], namespace=parsed_args)
debug("parsed args:", a)
except BaseException as e:
debug("\nexception", type(e), str(e), "while parsing args")
completions = self.collect_completions(active_parsers, parsed_args, cword_prefix, debug)
completions = self.filter_completions(completions)
completions = self.quote_completions(completions, cword_prequote, first_colon_pos)
return completions
def _patch_argument_parser(self):
'''
Since argparse doesn't support much introspection, we monkey-patch it to replace the parse_known_args method and
all actions with hooks that tell us which action was last taken or about to be taken, and let us have the parser
figure out which subparsers need to be activated (then recursively monkey-patch those).
We save all active ArgumentParsers to extract all their possible option names later.
'''
active_parsers = [self._parser]
parsed_args = argparse.Namespace()
visited_actions = []
def patch(parser):
parser.__class__ = IntrospectiveArgumentParser
for action in parser._actions:
# TODO: accomplish this with super
class IntrospectAction(action.__class__):
def __call__(self, parser, namespace, values, option_string=None):
debug('Action stub called on', self)
debug('\targs:', parser, namespace, values, option_string)
debug('\torig class:', self._orig_class)
debug('\torig callable:', self._orig_callable)
visited_actions.append(self)
if self._orig_class == argparse._SubParsersAction:
debug('orig class is a subparsers action: patching and running it')
active_subparser = self._name_parser_map[values[0]]
patch(active_subparser)
active_parsers.append(active_subparser)
self._orig_callable(parser, namespace, values, option_string=option_string)
elif self._orig_class in safe_actions:
self._orig_callable(parser, namespace, values, option_string=option_string)
if getattr(action, "_orig_class", None):
debug("Action", action, "already patched")
action._orig_class = action.__class__
action._orig_callable = action.__call__
action.__class__ = IntrospectAction
patch(self._parser)
debug("Active parsers:", active_parsers)
debug("Visited actions:", visited_actions)
debug("Parse result namespace:", parsed_args)
return active_parsers, parsed_args
def collect_completions(self, active_parsers, parsed_args, cword_prefix, debug):
'''
Visits the active parsers and their actions, executes their completers or introspects them to collect their
option strings. Returns the resulting completions as a list of strings.
This method is exposed for overriding in subclasses; there is no need to use it directly.
'''
completions = []
for parser in active_parsers:
debug("Examining parser", parser)
for action in parser._actions:
debug("Examining action", action)
if isinstance(action, argparse._SubParsersAction):
subparser_activated = False
for subparser in action._name_parser_map.values():
if subparser in active_parsers:
subparser_activated = True
if subparser_activated:
# Parent parser completions are not valid in the subparser, so flush them
completions = []
else:
completions += [subcmd for subcmd in action.choices.keys() if subcmd.startswith(cword_prefix)]
elif self.always_complete_options or (len(cword_prefix) > 0 and cword_prefix[0] in parser.prefix_chars):
completions += [option for option in action.option_strings if option.startswith(cword_prefix)]
debug("Active actions (L={l}): {a}".format(l=len(parser.active_actions), a=parser.active_actions))
# Only run completers if current word does not start with - (is not an optional)
if len(cword_prefix) == 0 or cword_prefix[0] not in parser.prefix_chars:
for active_action in parser.active_actions:
if not active_action.option_strings: # action is a positional
if action_is_satisfied(active_action) and not action_is_open(active_action):
debug("Skipping", active_action)
continue
debug("Activating completion for", active_action, active_action._orig_class)
#completer = getattr(active_action, 'completer', DefaultCompleter())
completer = getattr(active_action, 'completer', None)
if completer is None and active_action.choices is not None:
if not isinstance(active_action, argparse._SubParsersAction):
completer = completers.ChoicesCompleter(active_action.choices)
if completer:
if len(active_action.option_strings) > 0: # only for optionals
if not action_is_satisfied(active_action):
# This means the current action will fail to parse if the word under the cursor is not given
# to it, so give it exclusive control over completions (flush previous completions)
debug("Resetting completions because", active_action, "is unsatisfied")
completions = []
if callable(completer):
completions += [c for c in completer(prefix=cword_prefix, action=active_action,
parsed_args=parsed_args)
if self.validator(c, cword_prefix)]
else:
debug("Completer is not callable, trying the readline completer protocol instead")
for i in range(9999):
next_completion = completer.complete(cword_prefix, i)
if next_completion is None:
break
if self.validator(next_completion, cword_prefix):
completions.append(next_completion)
debug("Completions:", completions)
elif not isinstance(active_action, argparse._SubParsersAction):
debug("Completer not available, falling back")
try:
# TODO: what happens if completions contain newlines? How do I make compgen use IFS?
bashcomp_cmd = ['bash', '-c', "compgen -A file -- '{p}'".format(p=cword_prefix)]
completions += subprocess.check_output(bashcomp_cmd).decode(sys_encoding).splitlines()
except subprocess.CalledProcessError:
pass
return completions
def filter_completions(self, completions):
'''
Ensures collected completions are Unicode text, de-duplicates them, and excludes those specified by ``exclude``.
Returns the filtered completions as an iterable.
This method is exposed for overriding in subclasses; there is no need to use it directly.
'''
# On Python 2, we have to make sure all completions are unicode objects before we continue and output them.
# Otherwise, because python disobeys the system locale encoding and uses ascii as the default encoding, it will try
# to implicitly decode string objects using ascii, and fail.
if USING_PYTHON2:
for i in range(len(completions)):
if type(completions[i]) != unicode:
completions[i] = completions[i].decode(sys_encoding)
# De-duplicate completions and remove excluded ones
if self.exclude is None:
self.exclude = set()
seen = set(self.exclude)
return [c for c in completions if c not in seen and not seen.add(c)]
def quote_completions(self, completions, cword_prequote, first_colon_pos):
'''
If the word under the cursor started with a quote (as indicated by a nonempty ``cword_prequote``), escapes
occurrences of that quote character in the completions, and adds the quote to the beginning of each completion.
Otherwise, escapes all characters that bash splits words on (``COMP_WORDBREAKS``), and removes portions of
completions before the first colon if (``COMP_WORDBREAKS``) contains a colon.
If there is only one completion, and it doesn't end with a **continuation character** (``/``, ``:``, or ``=``),
adds a space after the completion.
This method is exposed for overriding in subclasses; there is no need to use it directly.
'''
comp_wordbreaks = os.environ.get('_ARGCOMPLETE_COMP_WORDBREAKS', os.environ.get('COMP_WORDBREAKS', " \t\"'@><=;|&(:."))
if USING_PYTHON2:
comp_wordbreaks = comp_wordbreaks.decode(sys_encoding)
punctuation_chars = '();<>|&!`'
for char in punctuation_chars:
if char not in comp_wordbreaks:
comp_wordbreaks += char
# If the word under the cursor was quoted, escape the quote char and add the leading quote back in.
# Otherwise, escape all COMP_WORDBREAKS chars.
if cword_prequote == '':
# Bash mangles completions which contain colons if COMP_WORDBREAKS contains a colon.
# This workaround has the same effect as __ltrim_colon_completions in bash_completion.
if ':' in comp_wordbreaks and first_colon_pos:
completions = [c[first_colon_pos+1:] for c in completions]
for wordbreak_char in comp_wordbreaks:
completions = [c.replace(wordbreak_char, '\\'+wordbreak_char) for c in completions]
else:
if cword_prequote == '"':
for char in '`$!':
completions = [c.replace(char, '\\'+char) for c in completions]
completions = [cword_prequote+c.replace(cword_prequote, '\\'+cword_prequote) for c in completions]
# Note: similar functionality in bash is turned off by supplying the "-o nospace" option to complete.
# We can't use that functionality because bash is not smart enough to recognize continuation characters (/) for
# which no space should be added.
continuation_chars = '=/:'
if len(completions) == 1 and completions[0][-1] not in continuation_chars:
if cword_prequote == '' and not completions[0].endswith(' '):
completions[0] += ' '
return completions
def complete(self, text, state):
'''
Alternate entry point for using the argcomplete completer in a readline-based REPL. See also
`rlcompleter <https://docs.python.org/2/library/rlcompleter.html#completer-objects>`_.
Usage:
.. code-block:: python
import argcomplete, argparse, readline
parser = argparse.ArgumentParser()
...
completer = argcomplete.CompletionFinder(parser)
readline.set_completer(completer.complete)
readline.parse_and_bind("tab: complete")
result = input("prompt> ")
(Use ``raw_input`` instead of ``input`` on Python 2, or use `eight <https://github.com/kislyuk/eight>`_).
'''
if state == 0:
cword_prequote, cword_prefix, cword_suffix, comp_words, first_colon_pos = split_line(text)
comp_words.insert(0, sys.argv[0])
self.matches = self._get_completions(comp_words, cword_prefix, cword_prequote, first_colon_pos)
if state < len(self.matches):
return self.matches[state]
else:
return None
autocomplete = CompletionFinder()
autocomplete.__doc__ = ''' Use this to access argcomplete. See :meth:`argcomplete.CompletionFinder.__call__()`. '''
def warn(*args):
'''
Prints **args** to standard error when running completions. This will interrupt the user's command line interaction;
use it to indicate an error condition that is preventing your completer from working.
'''
print("\n", file=debug_stream, *args)
|
{
"content_hash": "39dabe0ca208e69d7a461e16ff8bfb8a",
"timestamp": "",
"source": "github",
"line_count": 437,
"max_line_length": 202,
"avg_line_length": 48.25171624713959,
"alnum_prop": 0.6036232571374371,
"repo_name": "Plantain/sms-mailinglist",
"id": "210e08fb56295156b4665e76e82e07cb521add8f",
"size": "21250",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "lib/argcomplete/__init__.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Erlang",
"bytes": "1479"
},
{
"name": "Perl",
"bytes": "6919"
},
{
"name": "Python",
"bytes": "4968506"
},
{
"name": "R",
"bytes": "274"
},
{
"name": "Shell",
"bytes": "1540"
}
],
"symlink_target": ""
}
|
import hashlib
import logging
subprocess = None
LOG = logging.getLogger(__name__)
PKI_ANS1_PREFIX = 'MII'
def _ensure_subprocess():
# NOTE(vish): late loading subprocess so we can
# use the green version if we are in
# eventlet.
global subprocess
if not subprocess:
try:
from eventlet import patcher
if patcher.already_patched.get('os'):
from eventlet.green import subprocess
else:
import subprocess
except ImportError:
import subprocess # noqa
def cms_verify(formatted, signing_cert_file_name, ca_file_name):
"""Verifies the signature of the contents IAW CMS syntax.
:raises: subprocess.CalledProcessError
"""
_ensure_subprocess()
process = subprocess.Popen(["openssl", "cms", "-verify",
"-certfile", signing_cert_file_name,
"-CAfile", ca_file_name,
"-inform", "PEM",
"-nosmimecap", "-nodetach",
"-nocerts", "-noattr"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output, err = process.communicate(formatted)
retcode = process.poll()
if retcode:
# Do not log errors, as some happen in the positive thread
# instead, catch them in the calling code and log them there.
# NOTE(dmllr): Python 2.6 compatibility:
# CalledProcessError did not have output keyword argument
e = subprocess.CalledProcessError(retcode, "openssl")
e.output = err
raise e
return output
def token_to_cms(signed_text):
copy_of_text = signed_text.replace('-', '/')
formatted = "-----BEGIN CMS-----\n"
line_length = 64
while len(copy_of_text) > 0:
if (len(copy_of_text) > line_length):
formatted += copy_of_text[:line_length]
copy_of_text = copy_of_text[line_length:]
else:
formatted += copy_of_text
copy_of_text = ""
formatted += "\n"
formatted += "-----END CMS-----\n"
return formatted
def verify_token(token, signing_cert_file_name, ca_file_name):
return cms_verify(token_to_cms(token),
signing_cert_file_name,
ca_file_name)
def is_ans1_token(token):
"""Determine if a token appears to be PKI-based.
thx to ayoung for sorting this out.
base64 decoded hex representation of MII is 3082
In [3]: binascii.hexlify(base64.b64decode('MII='))
Out[3]: '3082'
re: http://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
pg4: For tags from 0 to 30 the first octet is the identfier
pg10: Hex 30 means sequence, followed by the length of that sequence.
pg5: Second octet is the length octet
first bit indicates short or long form, next 7 bits encode the number
of subsequent octets that make up the content length octets as an
unsigned binary int
82 = 10000010 (first bit indicates long form)
0000010 = 2 octets of content length
so read the next 2 octets to get the length of the content.
In the case of a very large content length there could be a requirement to
have more than 2 octets to designate the content length, therefore
requiring us to check for MIM, MIQ, etc.
In [4]: base64.b64encode(binascii.a2b_hex('3083'))
Out[4]: 'MIM='
In [5]: base64.b64encode(binascii.a2b_hex('3084'))
Out[5]: 'MIQ='
Checking for MI would become invalid at 16 octets of content length
10010000 = 90
In [6]: base64.b64encode(binascii.a2b_hex('3090'))
Out[6]: 'MJA='
Checking for just M is insufficient
But we will only check for MII:
Max length of the content using 2 octets is 7FFF or 32767
It's not practical to support a token of this length or greater in http
therefore, we will check for MII only and ignore the case of larger tokens
"""
return token[:3] == PKI_ANS1_PREFIX
def cms_sign_text(text, signing_cert_file_name, signing_key_file_name):
"""Uses OpenSSL to sign a document.
Produces a Base64 encoding of a DER formatted CMS Document
http://en.wikipedia.org/wiki/Cryptographic_Message_Syntax
"""
_ensure_subprocess()
process = subprocess.Popen(["openssl", "cms", "-sign",
"-signer", signing_cert_file_name,
"-inkey", signing_key_file_name,
"-outform", "PEM",
"-nosmimecap", "-nodetach",
"-nocerts", "-noattr"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output, err = process.communicate(text)
retcode = process.poll()
if retcode or "Error" in err:
LOG.error('Signing error: %s' % err)
raise subprocess.CalledProcessError(retcode, "openssl")
return output
def cms_sign_token(text, signing_cert_file_name, signing_key_file_name):
output = cms_sign_text(text, signing_cert_file_name, signing_key_file_name)
return cms_to_token(output)
def cms_to_token(cms_text):
start_delim = "-----BEGIN CMS-----"
end_delim = "-----END CMS-----"
signed_text = cms_text
signed_text = signed_text.replace('/', '-')
signed_text = signed_text.replace(start_delim, '')
signed_text = signed_text.replace(end_delim, '')
signed_text = signed_text.replace('\n', '')
return signed_text
def cms_hash_token(token_id):
"""Hash PKI tokens.
return: for ans1_token, returns the hash of the passed in token
otherwise, returns what it was passed in.
"""
if token_id is None:
return None
if is_ans1_token(token_id):
hasher = hashlib.md5()
hasher.update(token_id)
return hasher.hexdigest()
else:
return token_id
|
{
"content_hash": "5fd6e8b4833429e80339fb51ec12928d",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 79,
"avg_line_length": 34.6123595505618,
"alnum_prop": 0.5888654439214414,
"repo_name": "citrix-openstack-build/python-keystoneclient",
"id": "8bc24f97d89e2cc042844af472157e3a53731534",
"size": "6779",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "keystoneclient/common/cms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16002"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "648424"
},
{
"name": "Shell",
"bytes": "11335"
}
],
"symlink_target": ""
}
|
from rdkit import Chem,Geometry
from rdkit.Chem import AllChem
from rdkit.Chem.Subshape import SubshapeObjects
from rdkit.Chem.Subshape import BuilderUtils
from rdkit.six.moves import cPickle
import time
#-----------------------------------------------------------------------------
class SubshapeCombineOperations(object):
UNION=0
SUM=1
INTERSECT=2
#-----------------------------------------------------------------------------
class SubshapeBuilder(object):
gridDims=(20,15,10)
gridSpacing=0.5
winRad=3.0
nbrCount=7
terminalPtRadScale=0.75
fraction=0.25
stepSize=1.0
featFactory=None
def SampleSubshape(self,subshape1,newSpacing):
ogrid=subshape1.grid
rgrid = Geometry.UniformGrid3D(self.gridDims[0],self.gridDims[1],self.gridDims[2],
newSpacing)
for idx in range(rgrid.GetSize()):
l = rgrid.GetGridPointLoc(idx)
v = ogrid.GetValPoint(l)
rgrid.SetVal(idx,v)
res = SubshapeObjects.ShapeWithSkeleton()
res.grid = rgrid
return res;
def GenerateSubshapeShape(self,cmpd,confId=-1,addSkeleton=True,**kwargs):
shape = SubshapeObjects.ShapeWithSkeleton()
shape.grid=Geometry.UniformGrid3D(self.gridDims[0],self.gridDims[1],self.gridDims[2],
self.gridSpacing)
AllChem.EncodeShape(cmpd,shape.grid,ignoreHs=False,confId=confId)
if addSkeleton:
conf = cmpd.GetConformer(confId)
self.GenerateSubshapeSkeleton(shape,conf,kwargs)
return shape
def __call__(self,cmpd,**kwargs):
return self.GenerateSubshapeShape(cmpd,**kwargs)
def GenerateSubshapeSkeleton(self,shape,conf=None,terminalPtsOnly=False,skelFromConf=True):
if conf and skelFromConf:
pts = BuilderUtils.FindTerminalPtsFromConformer(conf,self.winRad,self.nbrCount)
else:
pts = BuilderUtils.FindTerminalPtsFromShape(shape,self.winRad,self.fraction)
pts = BuilderUtils.ClusterTerminalPts(pts,self.winRad,self.terminalPtRadScale)
BuilderUtils.ExpandTerminalPts(shape,pts,self.winRad)
if len(pts)<3:
raise ValueError('only found %d terminals, need at least 3'%len(pts))
if not terminalPtsOnly:
pts = BuilderUtils.AppendSkeletonPoints(shape.grid,pts,self.winRad,self.stepSize)
for i,pt in enumerate(pts):
BuilderUtils.CalculateDirectionsAtPoint(pt,shape.grid,self.winRad)
if conf and self.featFactory:
BuilderUtils.AssignMolFeatsToPoints(pts,conf.GetOwningMol(),self.featFactory,self.winRad)
shape.skelPts=pts
def CombineSubshapes(self,subshape1,subshape2,operation=SubshapeCombineOperations.UNION):
import copy
cs = copy.deepcopy(subshape1)
if operation==SubshapeCombineOperations.UNION:
cs.grid |= subshape2.grid
elif operation==SubshapeCombineOperations.SUM:
cs.grid += subshape2.grid
elif operation==SubshapeCombineOperations.INTERSECT:
cs.grid &= subshape2.grid
else:
raise ValueError('bad combination operation')
return cs
if __name__=='__main__':
from rdkit.Chem import AllChem,ChemicalFeatures
from rdkit.Chem.PyMol import MolViewer
#cmpd = Chem.MolFromSmiles('CCCc1cc(C(=O)O)ccc1')
#cmpd = Chem.AddHs(cmpd)
if 1:
cmpd = Chem.MolFromSmiles('C1=CC=C1C#CC1=CC=C1')
cmpd = Chem.AddHs(cmpd)
AllChem.EmbedMolecule(cmpd)
AllChem.UFFOptimizeMolecule(cmpd)
AllChem.CanonicalizeMol(cmpd)
print >>file('testmol.mol','w+'),Chem.MolToMolBlock(cmpd)
else:
cmpd = Chem.MolFromMolFile('testmol.mol')
builder=SubshapeBuilder()
if 1:
shape=builder.GenerateSubshapeShape(cmpd)
v = MolViewer()
if 1:
import tempfile
tmpFile = tempfile.mktemp('.grd')
v.server.deleteAll()
Geometry.WriteGridToFile(shape.grid,tmpFile)
time.sleep(1)
v.ShowMol(cmpd,name='testMol',showOnly=True)
v.server.loadSurface(tmpFile,'testGrid','',2.5)
v.server.resetCGO('*')
cPickle.dump(shape,file('subshape.pkl','w+'))
for i,pt in enumerate(shape.skelPts):
v.server.sphere(tuple(pt.location),.5,(1,0,1),'Pt-%d'%i)
if not hasattr(pt,'shapeDirs'): continue
momBeg = pt.location-pt.shapeDirs[0]
momEnd = pt.location+pt.shapeDirs[0]
v.server.cylinder(tuple(momBeg),tuple(momEnd),.1,(1,0,1),'v-%d'%i)
|
{
"content_hash": "c0f3e10cbd40fece25074de0ece46892",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 95,
"avg_line_length": 36.282051282051285,
"alnum_prop": 0.6855123674911661,
"repo_name": "soerendip42/rdkit",
"id": "75a8a32fb7d753ffbfb4e9855ac575763b70dd31",
"size": "4317",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "rdkit/Chem/Subshape/SubshapeBuilder.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "385"
},
{
"name": "C",
"bytes": "203258"
},
{
"name": "C#",
"bytes": "6745"
},
{
"name": "C++",
"bytes": "7168898"
},
{
"name": "CMake",
"bytes": "585758"
},
{
"name": "CSS",
"bytes": "4742"
},
{
"name": "FORTRAN",
"bytes": "7661"
},
{
"name": "HTML",
"bytes": "65468"
},
{
"name": "Java",
"bytes": "248620"
},
{
"name": "JavaScript",
"bytes": "11595"
},
{
"name": "LLVM",
"bytes": "27271"
},
{
"name": "Lex",
"bytes": "4508"
},
{
"name": "Makefile",
"bytes": "15443"
},
{
"name": "Objective-C",
"bytes": "299"
},
{
"name": "Python",
"bytes": "3045831"
},
{
"name": "QMake",
"bytes": "389"
},
{
"name": "SMT",
"bytes": "3010"
},
{
"name": "Shell",
"bytes": "8899"
},
{
"name": "Smarty",
"bytes": "5864"
},
{
"name": "Yacc",
"bytes": "49170"
}
],
"symlink_target": ""
}
|
from utils import calculate_bytecode
scenario_description = (
"Make a proposal to change the default proposal deposit, vote for it and "
"then assure that the DAO's proposal deposit did indeed change"
)
def run(ctx):
ctx.assert_scenario_ran('fuel')
bytecode = calculate_bytecode(
'changeProposalDeposit',
('uint256', ctx.args.deposit_new_value)
)
ctx.create_js_file(substitutions={
"dao_abi": ctx.dao_abi,
"dao_address": ctx.dao_address,
"proposal_deposit": ctx.args.proposal_deposit,
"transaction_bytecode": bytecode,
"debating_period": ctx.args.deposit_debate_seconds
}
)
print(
"Notice: Debate period is {} seconds so the test will wait "
"as much".format(ctx.args.proposal_debate_seconds)
)
ctx.execute(expected={
"deposit_after_vote": ctx.args.deposit_new_value
})
|
{
"content_hash": "8a3815083fd4b89b652806352847a7b6",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 78,
"avg_line_length": 29.903225806451612,
"alnum_prop": 0.6299892125134844,
"repo_name": "Spreadway/core",
"id": "33683b16fa868e93f65ecfae97287a87073d3c7b",
"size": "927",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "DAO-develop/tests/scenarios/deposit/run.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2950"
},
{
"name": "HTML",
"bytes": "5341"
},
{
"name": "JavaScript",
"bytes": "695229"
},
{
"name": "Jupyter Notebook",
"bytes": "11712"
},
{
"name": "Python",
"bytes": "102340"
},
{
"name": "TeX",
"bytes": "340903"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fieldsight', '0036_delete_timezone'),
]
operations = [
migrations.CreateModel(
name='Timezone',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('time_zone', models.CharField(max_length=255, null=True, blank=True)),
('country', models.CharField(max_length=255, null=True, blank=True)),
('country_code', models.CharField(max_length=255, null=True, blank=True)),
('offset_time', models.CharField(max_length=255, blank=True)),
],
),
]
|
{
"content_hash": "0630c6a7be5f93c3253df665d435ffe6",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 114,
"avg_line_length": 34.43478260869565,
"alnum_prop": 0.5858585858585859,
"repo_name": "awemulya/fieldsight-kobocat",
"id": "85d0fa277a2bf080f5cb8671c2a3389d80f0e8b4",
"size": "816",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "onadata/apps/fieldsight/migrations/0037_timezone.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "70153"
},
{
"name": "Dockerfile",
"bytes": "2462"
},
{
"name": "HTML",
"bytes": "1488442"
},
{
"name": "JavaScript",
"bytes": "674757"
},
{
"name": "Makefile",
"bytes": "2286"
},
{
"name": "Python",
"bytes": "5340355"
},
{
"name": "Shell",
"bytes": "16493"
}
],
"symlink_target": ""
}
|
"""
Key manager implementation for Barbican
"""
import array
import base64
import binascii
from barbicanclient import client as barbican_client
from barbicanclient.common import auth
from keystoneclient.v2_0 import client as keystone_client
from oslo.config import cfg
from cinder import exception
from cinder.keymgr import key as keymgr_key
from cinder.keymgr import key_mgr
from cinder.openstack.common import excutils
from cinder.openstack.common.gettextutils import _ # noqa
from cinder.openstack.common import log as logging
CONF = cfg.CONF
CONF.import_opt('encryption_auth_url', 'cinder.keymgr.key_mgr', group='keymgr')
CONF.import_opt('encryption_api_url', 'cinder.keymgr.key_mgr', group='keymgr')
LOG = logging.getLogger(__name__)
class BarbicanKeyManager(key_mgr.KeyManager):
"""Key Manager Interface that wraps the Barbican client API."""
def _create_connection(self, ctxt):
"""Creates a connection to the Barbican service.
:param ctxt: the user context for authentication
:return: a Barbican Connection object
:throws NotAuthorized: if the ctxt is None
"""
# Confirm context is provided, if not raise not authorized
if not ctxt:
msg = _("User is not authorized to use key manager.")
LOG.error(msg)
raise exception.NotAuthorized(msg)
try:
endpoint = CONF.keymgr.encryption_auth_url
keystone = keystone_client.Client(token=ctxt.auth_token,
endpoint=endpoint)
keystone_auth = auth.KeystoneAuthV2(keystone=keystone)
keystone_auth._barbican_url = CONF.keymgr.encryption_api_url
connection = barbican_client.Client(auth_plugin=keystone_auth)
return connection
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_("Error creating Barbican client: %s"), (e))
def create_key(self, ctxt, expiration=None, name='Cinder Volume Key',
payload_content_type='application/octet-stream', mode='CBC',
algorithm='AES', length=256):
"""Creates a key.
:param ctxt: contains information of the user and the environment
for the request (cinder/context.py)
:param expiration: the date the key will expire
:param name: a friendly name for the secret
:param payload_content_type: the format/type of the secret data
:param mode: the algorithm mode (e.g. CBC or CTR mode)
:param algorithm: the algorithm associated with the secret
:param length: the bit length of the secret
:return: the UUID of the new key
:throws Exception: if key creation fails
"""
connection = self._create_connection(ctxt)
try:
order_ref = connection.orders.create(name, payload_content_type,
algorithm, length, mode,
expiration)
order = connection.orders.get(order_ref)
secret_uuid = order.secret_ref.rpartition('/')[2]
return secret_uuid
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_("Error creating key: %s"), (e))
def store_key(self, ctxt, key, expiration=None, name='Cinder Volume Key',
payload_content_type='application/octet-stream',
payload_content_encoding='base64', algorithm='AES',
bit_length=256, mode='CBC', from_copy=False):
"""Stores (i.e., registers) a key with the key manager.
:param ctxt: contains information of the user and the environment for
the request (cinder/context.py)
:param key: the unencrypted secret data. Known as "payload" to the
barbicanclient api
:param expiration: the expiration time of the secret in ISO 8601
format
:param name: a friendly name for the key
:param payload_content_type: the format/type of the secret data
:param payload_content_encoding: the encoding of the secret data
:param algorithm: the algorithm associated with this secret key
:param bit_length: the bit length of this secret key
:param mode: the algorithm mode used with this secret key
:param from_copy: establishes whether the function is being used
to copy a key. In case of the latter, it does not
try to decode the key
:returns: the UUID of the stored key
:throws Exception: if key storage fails
"""
connection = self._create_connection(ctxt)
try:
if key.get_algorithm():
algorithm = key.get_algorithm()
if payload_content_type == 'text/plain':
payload_content_encoding = None
encoded_key = key.get_encoded()
elif (payload_content_type == 'application/octet-stream' and
not from_copy):
key_list = key.get_encoded()
string_key = ''.join(map(lambda byte: "%02x" % byte, key_list))
encoded_key = base64.b64encode(binascii.unhexlify(string_key))
else:
encoded_key = key.get_encoded()
secret_ref = connection.secrets.store(name, encoded_key,
payload_content_type,
payload_content_encoding,
algorithm, bit_length, mode,
expiration)
secret_uuid = secret_ref.rpartition('/')[2]
return secret_uuid
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_("Error storing key: %s"), (e))
def copy_key(self, ctxt, key_id):
"""Copies (i.e., clones) a key stored by barbican.
:param ctxt: contains information of the user and the environment for
the request (cinder/context.py)
:param key_id: the UUID of the key to copy
:return: the UUID of the key copy
:throws Exception: if key copying fails
"""
connection = self._create_connection(ctxt)
try:
secret_ref = self._create_secret_ref(key_id, connection)
meta = self._get_secret_metadata(ctxt, secret_ref)
con_type = meta.content_types['default']
secret_data = self._get_secret_data(ctxt, secret_ref,
payload_content_type=con_type)
key = keymgr_key.SymmetricKey(meta.algorithm, secret_data)
copy_uuid = self.store_key(ctxt, key, meta.expiration,
meta.name, con_type,
'base64',
meta.algorithm, meta.bit_length,
meta.mode, True)
return copy_uuid
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_("Error copying key: %s"), (e))
def _create_secret_ref(self, key_id, connection):
"""Creates the URL required for accessing a secret.
:param key_id: the UUID of the key to copy
:param connection: barbican key manager object
:return: the URL of the requested secret
"""
return connection.base_url + "/secrets/" + key_id
def _get_secret_data(self, ctxt, secret_ref,
payload_content_type='application/octet-stream'):
"""Retrieves the secret data given a secret_ref and content_type.
:param ctxt: contains information of the user and the environment for
the request (cinder/context.py)
:param secret_ref: URL to access the secret
:param payload_content_type: the format/type of the secret data
:returns: the secret data
:throws Exception: if data cannot be retrieved
"""
connection = self._create_connection(ctxt)
try:
generated_data = connection.secrets.decrypt(secret_ref,
payload_content_type)
if payload_content_type == 'application/octet-stream':
secret_data = base64.b64encode(generated_data)
else:
secret_data = generated_data
return secret_data
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_("Error getting secret data: %s"), (e))
def _get_secret_metadata(self, ctxt, secret_ref):
"""Creates the URL required for accessing a secret's metadata.
:param ctxt: contains information of the user and the environment for
the request (cinder/context.py)
:param secret_ref: URL to access the secret
:return: the secret's metadata
:throws Exception: if there is an error retrieving the data
"""
connection = self._create_connection(ctxt)
try:
return connection.secrets.get(secret_ref)
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_("Error getting secret metadata: %s"), (e))
def get_key(self, ctxt, key_id,
payload_content_type='application/octet-stream'):
"""Retrieves the specified key.
:param ctxt: contains information of the user and the environment for
the request (cinder/context.py)
:param key_id: the UUID of the key to retrieve
:param payload_content_type: The format/type of the secret data
:return: SymmetricKey representation of the key
:throws Exception: if key retrieval fails
"""
connection = self._create_connection(ctxt)
try:
secret_ref = self._create_secret_ref(key_id, connection)
secret_data = self._get_secret_data(ctxt, secret_ref,
payload_content_type)
if payload_content_type == 'application/octet-stream':
# convert decoded string to list of unsigned ints for each byte
secret = array.array('B',
base64.b64decode(secret_data)).tolist()
else:
secret = secret_data
meta = self._get_secret_metadata(ctxt, secret_ref)
key = keymgr_key.SymmetricKey(meta.algorithm, secret)
return key
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_("Error getting key: %s"), (e))
def delete_key(self, ctxt, key_id):
"""Deletes the specified key.
:param ctxt: contains information of the user and the environment for
the request (cinder/context.py)
:param key_id: the UUID of the key to delete
:throws Exception: if key deletion fails
"""
connection = self._create_connection(ctxt)
try:
secret_ref = self._create_secret_ref(key_id, connection)
connection.secrets.delete(secret_ref)
except Exception as e:
with excutils.save_and_reraise_exception():
LOG.error(_("Error deleting key: %s"), (e))
|
{
"content_hash": "0bc923a593614917d2b150cffb1ad826",
"timestamp": "",
"source": "github",
"line_count": 266,
"max_line_length": 79,
"avg_line_length": 43.597744360902254,
"alnum_prop": 0.5817883935500561,
"repo_name": "e0ne/cinder",
"id": "1ebcfd321d5395be19ccf521c1791c2845af9e22",
"size": "12272",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "cinder/keymgr/barbican.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import collections
import capstone
from capstone import *
import pwndbg.memoize
import pwndbg.symbol
CS_OP_IMM
debug = False
groups = {v:k for k,v in globals().items() if k.startswith('CS_GRP_')}
ops = {v:k for k,v in globals().items() if k.startswith('CS_OP_')}
access = {v:k for k,v in globals().items() if k.startswith('CS_AC_')}
for value1, name1 in dict(access).items():
for value2, name2 in dict(access).items():
access.setdefault(value1 | value2, '%s | %s' % (name1, name2))
class DisassemblyAssistant(object):
# Registry of all instances, {architecture: instance}
assistants = {}
def __init__(self, architecture):
if architecture is not None:
self.assistants[architecture] = self
self.op_handlers = {
CS_OP_IMM: self.immediate,
CS_OP_REG: self.register,
CS_OP_MEM: self.memory
}
self.op_names = {
CS_OP_IMM: self.immediate_sz,
CS_OP_REG: self.register_sz,
CS_OP_MEM: self.memory_sz
}
@staticmethod
def enhance(instruction):
enhancer = DisassemblyAssistant.assistants.get(pwndbg.arch.current, generic_assistant)
enhancer.enhance_operands(instruction)
enhancer.enhance_symbol(instruction)
enhancer.enhance_conditional(instruction)
enhancer.enhance_next(instruction)
if debug:
print(enhancer.dump(instruction))
def enhance_conditional(self, instruction):
"""
Adds a ``condition`` field to the instruction.
If the instruction is always executed unconditionally, the value
of the field is ``None``.
If the instruction is executed conditionally, and we can be absolutely
sure that it will be executed, the value of the field is ``True``.
Generally, this implies that it is the next instruction to be executed.
In all other cases, it is set to ``False``.
"""
c = self.condition(instruction)
if c:
c = True
elif c is not None:
c = False
instruction.condition = c
def condition(self, instruction):
return False
def enhance_next(self, instruction):
"""
Adds a ``next`` field to the instruction.
By default, it is set to the address of the next linear
instruction.
If the instruction is a non-"call" branch and either:
- Is unconditional
- Is conditional, but is known to be taken
And the target can be resolved, it is set to the address
of the jump target.
"""
next_addr = None
if instruction.condition in (True, None):
next_addr = self.next(instruction)
instruction.target = None
instruction.target_const = None
instruction.next = None
if next_addr is None:
next_addr = instruction.address + instruction.size
instruction.target = self.next(instruction, call=True)
instruction.next = next_addr & pwndbg.arch.ptrmask
if instruction.target is None:
instruction.target = instruction.next
if instruction.operands and instruction.operands[0].int:
instruction.target_const = True
def next(self, instruction, call=False):
"""
Architecture-specific hook point for enhance_next.
"""
if CS_GRP_CALL in instruction.groups:
if not call:
return None
elif CS_GRP_JUMP not in instruction.groups:
return None
# At this point, all operands have been resolved.
# Assume only single-operand jumps.
if len(instruction.operands) != 1:
return None
# Memory operands must be dereferenced
op = instruction.operands[0]
addr = op.int
if addr:
addr &= pwndbg.arch.ptrmask
if op.type == CS_OP_MEM:
if addr is None:
addr = self.memory_sz(instruction, op)
addr = int(pwndbg.memory.poi(pwndbg.typeinfo.ppvoid, addr))
if op.type == CS_OP_REG:
addr = self.register(instruction, op)
# Evidently this can happen?
if addr is None:
return None
return int(addr)
def enhance_symbol(self, instruction):
"""
Adds a ``symbol`` and ``symbol_addr`` fields to the instruction.
If, after parsing all of the operands, there is exactly one
value which resolved to a named symbol, it will be set to
that value.
In all other cases, the value is ``None``.
"""
instruction.symbol = None
operands = [o for o in instruction.operands if o.symbol]
if len(operands) != 1:
return
o = operands[0]
instruction.symbol = o.symbol
instruction.symbol_addr = o.int
def enhance_operands(self, instruction):
"""
Enhances all of the operands in the instruction, by adding the following
fields:
operand.str:
String of this operand, as it should appear in the
disassembly.
operand.int:
Integer value of the operand, if it can be resolved.
operand.symbol:
Resolved symbol name for this operand.
"""
current = (instruction.address == pwndbg.regs.pc)
for i, op in enumerate(instruction.operands):
op.int = None
op.symbol = None
op.int = self.op_handlers.get(op.type, lambda *a: None)(instruction, op)
if op.int:
op.int &= pwndbg.arch.ptrmask
op.str = self.op_names.get(op.type, lambda *a: None)(instruction, op)
if op.int:
op.symbol = pwndbg.symbol.get(op.int)
def immediate(self, instruction, operand):
return operand.value.imm
def immediate_sz(self, instruction, operand):
value = operand.int
if abs(value) < 0x10:
return "%i" % value
return "%#x" % value
def register(self, instruction, operand):
if instruction.address != pwndbg.regs.pc:
return None
# # Don't care about registers which are only overwritten
# if operand.access & CS_AC_WRITE and not operand.access & CS_AC_READ:
# return None
reg = operand.value.reg
name = instruction.reg_name(reg)
return pwndbg.regs[name]
def register_sz(self, instruction, operand):
reg = operand.value.reg
return instruction.reg_name(reg).lower()
def memory(self, instruction, operand):
return None
def memory_sz(self, instruction, operand):
return None # raise NotImplementedError
def dump(self, instruction):
ins = instruction
rv = []
rv.append('%s %s' % (ins.mnemonic, ins.op_str))
for i, group in enumerate(ins.groups):
rv.append(' groups[%i] = %s' % (i, groups.get(group, group)))
rv.append(' next = %#x' % (ins.next))
rv.append(' condition = %r' % (ins.condition))
for i, op in enumerate(ins.operands):
rv.append(' operands[%i] = %s' % (i, ops.get(op.type, op.type)))
rv.append(' access = %s' % (access.get(op.access, op.access)))
if op.int is not None:
rv.append(' int = %#x' % (op.int))
if op.symbol is not None:
rv.append(' sym = %s' % (op.symbol))
if op.str is not None:
rv.append(' str = %s' % (op.str))
return '\n'.join(rv)
generic_assistant = DisassemblyAssistant(None)
|
{
"content_hash": "79bbe110a778356ce07ee2c5ac3f5991",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 94,
"avg_line_length": 29.90909090909091,
"alnum_prop": 0.5819402228976697,
"repo_name": "zachriggle/pwndbg",
"id": "1422cb6c7cdd0d34c40afab9263d0a200a4999a3",
"size": "7942",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pwndbg/disasm/arch.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1746603"
},
{
"name": "Shell",
"bytes": "1812"
}
],
"symlink_target": ""
}
|
"""Compute API that proxies via Cells Service."""
from nova import availability_zones
from nova import block_device
from nova.cells import rpcapi as cells_rpcapi
from nova.cells import utils as cells_utils
from nova.compute import api as compute_api
from nova.compute import instance_types
from nova.compute import rpcapi as compute_rpcapi
from nova.compute import vm_states
from nova import exception
from nova.openstack.common import excutils
check_instance_state = compute_api.check_instance_state
wrap_check_policy = compute_api.wrap_check_policy
check_policy = compute_api.check_policy
check_instance_lock = compute_api.check_instance_lock
def validate_cell(fn):
def _wrapped(self, context, instance, *args, **kwargs):
self._validate_cell(instance, fn.__name__)
return fn(self, context, instance, *args, **kwargs)
_wrapped.__name__ = fn.__name__
return _wrapped
class ComputeRPCAPINoOp(object):
def __getattr__(self, key):
def _noop_rpc_wrapper(*args, **kwargs):
return None
return _noop_rpc_wrapper
class SchedulerRPCAPIRedirect(object):
def __init__(self, cells_rpcapi_obj):
self.cells_rpcapi = cells_rpcapi_obj
def __getattr__(self, key):
def _noop_rpc_wrapper(*args, **kwargs):
return None
return _noop_rpc_wrapper
def run_instance(self, context, **kwargs):
self.cells_rpcapi.schedule_run_instance(context, **kwargs)
class ComputeRPCProxyAPI(compute_rpcapi.ComputeAPI):
"""Class used to substitute Compute RPC API that will proxy
via the cells manager to a compute manager in a child cell.
"""
def __init__(self, *args, **kwargs):
super(ComputeRPCProxyAPI, self).__init__(*args, **kwargs)
self.cells_rpcapi = cells_rpcapi.CellsAPI()
def cast(self, ctxt, msg, topic=None, version=None):
self._set_version(msg, version)
topic = self._get_topic(topic)
self.cells_rpcapi.proxy_rpc_to_manager(ctxt, msg, topic)
def call(self, ctxt, msg, topic=None, version=None, timeout=None):
self._set_version(msg, version)
topic = self._get_topic(topic)
return self.cells_rpcapi.proxy_rpc_to_manager(ctxt, msg, topic,
call=True,
timeout=timeout)
class ComputeCellsAPI(compute_api.API):
def __init__(self, *args, **kwargs):
super(ComputeCellsAPI, self).__init__(*args, **kwargs)
self.cells_rpcapi = cells_rpcapi.CellsAPI()
# Avoid casts/calls directly to compute
self.compute_rpcapi = ComputeRPCAPINoOp()
# Redirect scheduler run_instance to cells.
self.scheduler_rpcapi = SchedulerRPCAPIRedirect(self.cells_rpcapi)
def _cell_read_only(self, cell_name):
"""Is the target cell in a read-only mode?"""
# FIXME(comstud): Add support for this.
return False
def _validate_cell(self, instance, method):
cell_name = instance['cell_name']
if not cell_name:
raise exception.InstanceUnknownCell(
instance_uuid=instance['uuid'])
if self._cell_read_only(cell_name):
raise exception.InstanceInvalidState(
attr="vm_state",
instance_uuid=instance['uuid'],
state="temporary_readonly",
method=method)
def _cast_to_cells(self, context, instance, method, *args, **kwargs):
instance_uuid = instance['uuid']
cell_name = instance['cell_name']
if not cell_name:
raise exception.InstanceUnknownCell(instance_uuid=instance_uuid)
self.cells_rpcapi.cast_compute_api_method(context, cell_name,
method, instance_uuid, *args, **kwargs)
def _call_to_cells(self, context, instance, method, *args, **kwargs):
instance_uuid = instance['uuid']
cell_name = instance['cell_name']
if not cell_name:
raise exception.InstanceUnknownCell(instance_uuid=instance_uuid)
return self.cells_rpcapi.call_compute_api_method(context, cell_name,
method, instance_uuid, *args, **kwargs)
def _check_requested_networks(self, context, requested_networks):
"""Override compute API's checking of this. It'll happen in
child cell
"""
return
def _validate_image_href(self, context, image_href):
"""Override compute API's checking of this. It'll happen in
child cell
"""
return
def backup(self, context, instance, name, backup_type, rotation,
extra_properties=None, image_id=None):
"""Backup the given instance."""
image_meta = super(ComputeCellsAPI, self).backup(context,
instance, name, backup_type, rotation,
extra_properties=extra_properties, image_id=image_id)
image_id = image_meta['id']
self._cast_to_cells(context, instance, 'backup', name,
backup_type=backup_type, rotation=rotation,
extra_properties=extra_properties, image_id=image_id)
return image_meta
def snapshot(self, context, instance, name, extra_properties=None,
image_id=None):
"""Snapshot the given instance."""
image_meta = super(ComputeCellsAPI, self).snapshot(context,
instance, name, extra_properties=extra_properties,
image_id=image_id)
image_id = image_meta['id']
self._cast_to_cells(context, instance, 'snapshot',
name, extra_properties=extra_properties, image_id=image_id)
return image_meta
def create(self, *args, **kwargs):
"""We can use the base functionality, but I left this here just
for completeness.
"""
return super(ComputeCellsAPI, self).create(*args, **kwargs)
def update_state(self, context, instance, new_state):
"""Updates the state of a compute instance.
For example to 'active' or 'error'.
Also sets 'task_state' to None.
Used by admin_actions api
:param context: The security context
:param instance: The instance to update
:param new_state: A member of vm_state to change
the instance's state to,
eg. 'active'
"""
self.update(context, instance,
pass_on_state_change=True,
vm_state=new_state,
task_state=None)
def update(self, context, instance, pass_on_state_change=False, **kwargs):
"""
Update an instance.
:param pass_on_state_change: if true, the state change will be passed
on to child cells
"""
cell_name = instance['cell_name']
if cell_name and self._cell_read_only(cell_name):
raise exception.InstanceInvalidState(
attr="vm_state",
instance_uuid=instance['uuid'],
state="temporary_readonly",
method='update')
rv = super(ComputeCellsAPI, self).update(context,
instance, **kwargs)
kwargs_copy = kwargs.copy()
if not pass_on_state_change:
# We need to skip vm_state/task_state updates... those will
# happen via a _cast_to_cells when running a different
# compute api method
kwargs_copy.pop('vm_state', None)
kwargs_copy.pop('task_state', None)
if kwargs_copy:
try:
self._cast_to_cells(context, instance, 'update',
**kwargs_copy)
except exception.InstanceUnknownCell:
pass
return rv
def _local_delete(self, context, instance, bdms):
# This will get called for every delete in the API cell
# because _delete() in compute/api.py will not find a
# service when checking if it's up.
# We need to only take action if there's no cell_name. Our
# overrides of delete() and soft_delete() will take care of
# the rest.
cell_name = instance['cell_name']
if not cell_name:
return super(ComputeCellsAPI, self)._local_delete(context,
instance, bdms)
def soft_delete(self, context, instance):
self._handle_cell_delete(context, instance,
super(ComputeCellsAPI, self).soft_delete, 'soft_delete')
def delete(self, context, instance):
self._handle_cell_delete(context, instance,
super(ComputeCellsAPI, self).delete, 'delete')
def _handle_cell_delete(self, context, instance, method, method_name):
"""Terminate an instance."""
# We can't use the decorator because we have special logic in the
# case we don't know the cell_name...
cell_name = instance['cell_name']
if cell_name and self._cell_read_only(cell_name):
raise exception.InstanceInvalidState(
attr="vm_state",
instance_uuid=instance['uuid'],
state="temporary_readonly",
method=method_name)
method(context, instance)
try:
self._cast_to_cells(context, instance, method_name)
except exception.InstanceUnknownCell:
# If there's no cell, there's also no host... which means
# the instance was destroyed from the DB here. Let's just
# broadcast a message down to all cells and hope this ends
# up resolving itself... Worse case.. the instance will
# show back up again here.
delete_type = method_name == 'soft_delete' and 'soft' or 'hard'
self.cells_rpcapi.instance_delete_everywhere(context,
instance, delete_type)
@validate_cell
def restore(self, context, instance):
"""Restore a previously deleted (but not reclaimed) instance."""
super(ComputeCellsAPI, self).restore(context, instance)
self._cast_to_cells(context, instance, 'restore')
@validate_cell
def force_delete(self, context, instance):
"""Force delete a previously deleted (but not reclaimed) instance."""
super(ComputeCellsAPI, self).force_delete(context, instance)
self._cast_to_cells(context, instance, 'force_delete')
@validate_cell
def stop(self, context, instance, do_cast=True):
"""Stop an instance."""
super(ComputeCellsAPI, self).stop(context, instance)
if do_cast:
self._cast_to_cells(context, instance, 'stop', do_cast=True)
else:
return self._call_to_cells(context, instance, 'stop',
do_cast=False)
@validate_cell
def start(self, context, instance):
"""Start an instance."""
super(ComputeCellsAPI, self).start(context, instance)
self._cast_to_cells(context, instance, 'start')
@validate_cell
def reboot(self, context, instance, *args, **kwargs):
"""Reboot the given instance."""
super(ComputeCellsAPI, self).reboot(context, instance,
*args, **kwargs)
self._cast_to_cells(context, instance, 'reboot', *args,
**kwargs)
@validate_cell
def rebuild(self, context, instance, *args, **kwargs):
"""Rebuild the given instance with the provided attributes."""
super(ComputeCellsAPI, self).rebuild(context, instance, *args,
**kwargs)
self._cast_to_cells(context, instance, 'rebuild', *args, **kwargs)
@validate_cell
def evacuate(self, context, instance, *args, **kwargs):
"""Evacuate the given instance with the provided attributes."""
super(ComputeCellsAPI, self).evacuate(context, instance, *args,
**kwargs)
self._cast_to_cells(context, instance, 'evacuate', *args, **kwargs)
@check_instance_state(vm_state=[vm_states.RESIZED])
@validate_cell
def revert_resize(self, context, instance):
"""Reverts a resize, deleting the 'new' instance in the process."""
super(ComputeCellsAPI, self).revert_resize(context, instance)
self._cast_to_cells(context, instance, 'revert_resize')
@check_instance_state(vm_state=[vm_states.RESIZED])
@validate_cell
def confirm_resize(self, context, instance):
"""Confirms a migration/resize and deletes the 'old' instance."""
super(ComputeCellsAPI, self).confirm_resize(context, instance)
self._cast_to_cells(context, instance, 'confirm_resize')
@check_instance_state(vm_state=[vm_states.ACTIVE, vm_states.STOPPED],
task_state=[None])
@validate_cell
def resize(self, context, instance, flavor_id=None, *args, **kwargs):
"""Resize (ie, migrate) a running instance.
If flavor_id is None, the process is considered a migration, keeping
the original flavor_id. If flavor_id is not None, the instance should
be migrated to a new host and resized to the new flavor_id.
"""
super(ComputeCellsAPI, self).resize(context, instance,
flavor_id=flavor_id, *args,
**kwargs)
# NOTE(johannes): If we get to this point, then we know the
# specified flavor_id is valid and exists. We'll need to load
# it again, but that should be safe.
old_instance_type = instance_types.extract_instance_type(instance)
if not flavor_id:
new_instance_type = old_instance_type
else:
new_instance_type = instance_types.get_instance_type_by_flavor_id(
flavor_id, read_deleted="no")
# NOTE(johannes): Later, when the resize is confirmed or reverted,
# the superclass implementations of those methods will need access
# to a local migration record for quota reasons. We don't need
# source and/or destination information, just the old and new
# instance_types. Status is set to 'finished' since nothing else
# will update the status along the way.
self.db.migration_create(context.elevated(),
{'instance_uuid': instance['uuid'],
'old_instance_type_id': old_instance_type['id'],
'new_instance_type_id': new_instance_type['id'],
'status': 'finished'})
# FIXME(comstud): pass new instance_type object down to a method
# that'll unfold it
self._cast_to_cells(context, instance, 'resize', flavor_id=flavor_id,
*args, **kwargs)
@validate_cell
def add_fixed_ip(self, context, instance, *args, **kwargs):
"""Add fixed_ip from specified network to given instance."""
super(ComputeCellsAPI, self).add_fixed_ip(context, instance,
*args, **kwargs)
self._cast_to_cells(context, instance, 'add_fixed_ip',
*args, **kwargs)
@validate_cell
def remove_fixed_ip(self, context, instance, *args, **kwargs):
"""Remove fixed_ip from specified network to given instance."""
super(ComputeCellsAPI, self).remove_fixed_ip(context, instance,
*args, **kwargs)
self._cast_to_cells(context, instance, 'remove_fixed_ip',
*args, **kwargs)
@validate_cell
def pause(self, context, instance):
"""Pause the given instance."""
super(ComputeCellsAPI, self).pause(context, instance)
self._cast_to_cells(context, instance, 'pause')
@validate_cell
def unpause(self, context, instance):
"""Unpause the given instance."""
super(ComputeCellsAPI, self).unpause(context, instance)
self._cast_to_cells(context, instance, 'unpause')
def get_diagnostics(self, context, instance):
"""Retrieve diagnostics for the given instance."""
# FIXME(comstud): Cache this?
# Also: only calling super() to get state/policy checking
super(ComputeCellsAPI, self).get_diagnostics(context, instance)
return self._call_to_cells(context, instance, 'get_diagnostics')
@validate_cell
def suspend(self, context, instance):
"""Suspend the given instance."""
super(ComputeCellsAPI, self).suspend(context, instance)
self._cast_to_cells(context, instance, 'suspend')
@validate_cell
def resume(self, context, instance):
"""Resume the given instance."""
super(ComputeCellsAPI, self).resume(context, instance)
self._cast_to_cells(context, instance, 'resume')
@validate_cell
def rescue(self, context, instance, rescue_password=None):
"""Rescue the given instance."""
super(ComputeCellsAPI, self).rescue(context, instance,
rescue_password=rescue_password)
self._cast_to_cells(context, instance, 'rescue',
rescue_password=rescue_password)
@validate_cell
def unrescue(self, context, instance):
"""Unrescue the given instance."""
super(ComputeCellsAPI, self).unrescue(context, instance)
self._cast_to_cells(context, instance, 'unrescue')
@validate_cell
def set_admin_password(self, context, instance, password=None):
"""Set the root/admin password for the given instance."""
super(ComputeCellsAPI, self).set_admin_password(context, instance,
password=password)
self._cast_to_cells(context, instance, 'set_admin_password',
password=password)
@validate_cell
def inject_file(self, context, instance, *args, **kwargs):
"""Write a file to the given instance."""
super(ComputeCellsAPI, self).inject_file(context, instance, *args,
**kwargs)
self._cast_to_cells(context, instance, 'inject_file', *args, **kwargs)
@wrap_check_policy
@validate_cell
def get_vnc_console(self, context, instance, console_type):
"""Get a url to a VNC Console."""
if not instance['host']:
raise exception.InstanceNotReady(instance_id=instance['uuid'])
connect_info = self._call_to_cells(context, instance,
'get_vnc_connect_info', console_type)
self.consoleauth_rpcapi.authorize_console(context,
connect_info['token'], console_type, connect_info['host'],
connect_info['port'], connect_info['internal_access_path'],
instance_uuid=instance['uuid'])
return {'url': connect_info['access_url']}
@wrap_check_policy
@validate_cell
def get_spice_console(self, context, instance, console_type):
"""Get a url to a SPICE Console."""
if not instance['host']:
raise exception.InstanceNotReady(instance_id=instance['uuid'])
connect_info = self._call_to_cells(context, instance,
'get_spice_connect_info', console_type)
self.consoleauth_rpcapi.authorize_console(context,
connect_info['token'], console_type, connect_info['host'],
connect_info['port'], connect_info['internal_access_path'],
instance_uuid=instance['uuid'])
return {'url': connect_info['access_url']}
@validate_cell
def get_console_output(self, context, instance, *args, **kwargs):
"""Get console output for an an instance."""
# NOTE(comstud): Calling super() just to get policy check
super(ComputeCellsAPI, self).get_console_output(context, instance,
*args, **kwargs)
return self._call_to_cells(context, instance, 'get_console_output',
*args, **kwargs)
def lock(self, context, instance):
"""Lock the given instance."""
super(ComputeCellsAPI, self).lock(context, instance)
self._cast_to_cells(context, instance, 'lock')
def unlock(self, context, instance):
"""Unlock the given instance."""
super(ComputeCellsAPI, self).lock(context, instance)
self._cast_to_cells(context, instance, 'unlock')
@validate_cell
def reset_network(self, context, instance):
"""Reset networking on the instance."""
super(ComputeCellsAPI, self).reset_network(context, instance)
self._cast_to_cells(context, instance, 'reset_network')
@validate_cell
def inject_network_info(self, context, instance):
"""Inject network info for the instance."""
super(ComputeCellsAPI, self).inject_network_info(context, instance)
self._cast_to_cells(context, instance, 'inject_network_info')
@wrap_check_policy
@validate_cell
def attach_volume(self, context, instance, volume_id, device=None):
"""Attach an existing volume to an existing instance."""
if device and not block_device.match_device(device):
raise exception.InvalidDevicePath(path=device)
device = self.compute_rpcapi.reserve_block_device_name(
context, device=device, instance=instance, volume_id=volume_id)
try:
volume = self.volume_api.get(context, volume_id)
self.volume_api.check_attach(context, volume, instance=instance)
except Exception:
with excutils.save_and_reraise_exception():
self.db.block_device_mapping_destroy_by_instance_and_device(
context, instance['uuid'], device)
self._cast_to_cells(context, instance, 'attach_volume',
volume_id, device)
@validate_cell
def _detach_volume(self, context, instance, volume):
"""Detach a volume from an instance."""
self.volume_api.check_detach(context, volume)
self._cast_to_cells(context, instance, 'detach_volume',
volume)
@wrap_check_policy
@validate_cell
def associate_floating_ip(self, context, instance, address):
"""Makes calls to network_api to associate_floating_ip.
:param address: is a string floating ip address
"""
self._cast_to_cells(context, instance, 'associate_floating_ip',
address)
@validate_cell
def delete_instance_metadata(self, context, instance, key):
"""Delete the given metadata item from an instance."""
super(ComputeCellsAPI, self).delete_instance_metadata(context,
instance, key)
self._cast_to_cells(context, instance, 'delete_instance_metadata',
key)
@wrap_check_policy
@validate_cell
def update_instance_metadata(self, context, instance,
metadata, delete=False):
rv = super(ComputeCellsAPI, self).update_instance_metadata(context,
instance, metadata, delete=delete)
try:
self._cast_to_cells(context, instance,
'update_instance_metadata',
metadata, delete=delete)
except exception.InstanceUnknownCell:
pass
return rv
class HostAPI(compute_api.HostAPI):
"""HostAPI() class for cells.
Implements host management related operations. Works by setting the
RPC API used by the base class to proxy via the cells manager to the
compute manager in the correct cell. Hosts specified with cells will
need to be of the format 'path!to!cell@host'.
DB methods in the base class are also overridden to proxy via the
cells manager.
"""
def __init__(self):
super(HostAPI, self).__init__(rpcapi=ComputeRPCProxyAPI())
self.cells_rpcapi = cells_rpcapi.CellsAPI()
def _assert_host_exists(self, context, host_name):
"""Cannot check this in API cell. This will be checked in the
target child cell.
"""
pass
def service_get_all(self, context, filters=None, set_zones=False):
if filters is None:
filters = {}
if 'availability_zone' in filters:
zone_filter = filters.pop('availability_zone')
set_zones = True
else:
zone_filter = None
services = self.cells_rpcapi.service_get_all(context,
filters=filters)
if set_zones:
services = availability_zones.set_availability_zones(context,
services)
if zone_filter is not None:
services = [s for s in services
if s['availability_zone'] == zone_filter]
return services
def service_get_by_compute_host(self, context, host_name):
return self.cells_rpcapi.service_get_by_compute_host(context,
host_name)
def instance_get_all_by_host(self, context, host_name):
"""Get all instances by host. Host might have a cell prepended
to it, so we'll need to strip it out. We don't need to proxy
this call to cells, as we have instance information here in
the API cell.
"""
cell_name, host_name = cells_utils.split_cell_and_item(host_name)
instances = super(HostAPI, self).instance_get_all_by_host(context,
host_name)
if cell_name:
instances = [i for i in instances
if i['cell_name'] == cell_name]
return instances
def task_log_get_all(self, context, task_name, beginning, ending,
host=None, state=None):
"""Return the task logs within a given range from cells,
optionally filtering by the host and/or state. For cells, the
host should be a path like 'path!to!cell@host'. If no @host
is given, only task logs from a particular cell will be returned.
"""
return self.cells_rpcapi.task_log_get_all(context,
task_name,
beginning,
ending,
host=host,
state=state)
def compute_node_get(self, context, compute_id):
"""Get a compute node from a particular cell by its integer ID.
compute_id should be in the format of 'path!to!cell@ID'.
"""
return self.cells_rpcapi.compute_node_get(context, compute_id)
def compute_node_get_all(self, context):
return self.cells_rpcapi.compute_node_get_all(context)
def compute_node_search_by_hypervisor(self, context, hypervisor_match):
return self.cells_rpcapi.compute_node_get_all(context,
hypervisor_match=hypervisor_match)
def compute_node_statistics(self, context):
return self.cells_rpcapi.compute_node_stats(context)
class InstanceActionAPI(compute_api.InstanceActionAPI):
"""InstanceActionAPI() class for cells."""
def __init__(self):
super(InstanceActionAPI, self).__init__()
self.cells_rpcapi = cells_rpcapi.CellsAPI()
def actions_get(self, context, instance):
return self.cells_rpcapi.actions_get(context, instance)
def action_get_by_request_id(self, context, instance, request_id):
return self.cells_rpcapi.action_get_by_request_id(context, instance,
request_id)
def action_events_get(self, context, instance, action_id):
return self.cells_rpcapi.action_events_get(context, instance,
action_id)
|
{
"content_hash": "5525eca08483b9babd499ce9a8727962",
"timestamp": "",
"source": "github",
"line_count": 651,
"max_line_length": 78,
"avg_line_length": 42.61136712749616,
"alnum_prop": 0.6045782263878875,
"repo_name": "zestrada/nova-cs498cc",
"id": "f5ded45ec018288c22834e900ab91fc60d7c4e52",
"size": "28377",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/compute/cells_api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "9215416"
},
{
"name": "Shell",
"bytes": "17117"
}
],
"symlink_target": ""
}
|
"""``iperf_cmd.py``
`iperf command parsing and building support`
"""
import itertools
from collections import OrderedDict
from argparse import ArgumentParser
from testlib.linux.commands.cmd_helper import Command, CommandHelper, ArgumentBuilder
from testlib.custom_exceptions import UnknownArguments, ArgumentsCollision
chain_it = itertools.chain.from_iterable
IPERF_GENERAL_OPTS = {
'format': {
'names': {'short': '-f', 'long': '--format'},
'help': '[kmKM] format to report: Kbits, Mbits, KBytes, MBytes',
'choices': 'kmKM',
},
'help': {
'names': {'short': '-h', 'long': '--help'},
'help': 'print a help synopsis',
'action': 'store_true',
},
'interval': {
'names': {'short': '-i', 'long': '--interval'},
'help': 'pause N seconds between periodic bandwidth reports',
# 'default': 10,
'type': int,
},
'len': {
'names': {'short': '-l', 'long': '--len'},
'help': '\\d+[KM] set length read/write buffer to N (default 8 KB)',
'default': '8K',
},
'print_mss': {
'names': {'short': '-m', 'long': '--print_mss'},
'help': 'print TCP maximum segment size (MTU - TCP/IP header)',
'action': 'store_true',
},
'output': {
'names': {'short': '-o', 'long': '--output'},
'help': 'output the report or error message to this specified file',
},
'port': {
'names': {'short': '-p', 'long': '--port'},
'help': 'set server port to listen on/connect to N (default 5001)',
'default': '5001',
'type': int,
},
'udp': {
'names': {'short': '-u', 'long': '--udp'},
'help': 'use UDP rather than TCP',
'action': 'store_true',
},
'window': {
'names': {'short': '-w', 'long': '--window'},
'help': '\\d+[KM] TCP window size (socket buffer size)',
},
'bind': {
'names': {'short': '-B', 'long': '--bind'},
'help': 'bind to <host>, an interface or multicast address',
},
'compatibility': {
'names': {'short': '-C', 'long': '--compatibility'},
'help': 'for use with older versions does not send extra messages',
'action': 'store_true',
},
'mss': {
'names': {'short': '-M', 'long': '--mss'},
'help': '\\d+ set TCP maximum segment size (MTU - 40 bytes)',
'default': 40,
'type': int,
},
'nodelay': {
'names': {'short': '-N', 'long': '--nodelay'},
'help': "set TCP no delay, disabling Nagle's Algorithm",
'action': 'store_true',
},
'version': {
'names': {'short': '-v', 'long': '--version'},
'help': 'print version information and quit',
'action': 'store_true',
},
'IPv6Version': {
'names': {'short': '-V', 'long': '--IPv6Version'},
'help': 'set the domain to IPv6',
'action': 'store_true',
},
'reportexclude': {
'names': {'short': '-X', 'long': '--reportexclude'},
'help': '[CDMSV] exclude C(connection) D(data) M(multicast) S(settings) V(server) reports',
'choices': 'CDMSV',
},
'reportstyle': {
'names': {'short': '-m', 'long': '--print_mss'},
'help': 'C|c if set to C or c report results as CSV',
'choices': 'Cc',
},
}
IPERF_SERVER_OPTS = {
'server': {
'names': {'short': '-s', 'long': '--server'},
'help': 'run in server mode',
'action': 'store_true',
},
'single_udp': {
'names': {'short': '-U', 'long': '--single_udp'},
'help': 'run in single threaded UDP mode',
'action': 'store_true',
},
'daemon': {
'names': {'short': '-D', 'long': '--daemon'},
'help': 'run the server as a daemon',
'action': 'store_true',
},
}
IPERF_CLIENT_OPTS = {
'bandwidth': {
'names': {'short': '-b', 'long': '--bandwidth'},
'help': '\\d+[KM] set target bandwidth to N bits/sec (default 1 Mbit.sec)\
This setting requires UDP (-u)',
},
'client': {
'names': {'short': '-c', 'long': '--client'},
'help': 'run in client mode, connecting to <host>',
},
'dualtest': {
'names': {'short': '-d', 'long': '--dualtest'},
'help': 'do a bidirectional test simultaneously',
'action': 'store_true',
},
'num': {
'names': {'short': '-n', 'long': '--num'},
'help': '\d[KM] number of bytes to transmit (instead of -t)',
},
'tradeoff': {
'names': {'short': '-r', 'long': '--tradeoff'},
'help': 'do a bidirectional test individually',
'action': 'store_true',
},
'time': {
'names': {'short': '-t', 'long': '--time'},
'help': 'time in seconds to transmit for (default 10 secs)',
'type': int,
},
'fileinput': {
'names': {'short': '-F', 'long': '--fileinput'},
'help': 'input the data to be transmitted from a file',
},
'stdin': {
'names': {'short': '-I', 'long': '--stdin'},
'help': 'input the data to be transmitted from stdin',
'action': 'store_true',
},
'listenport': {
'names': {'short': '-L', 'long': '--listenport'},
'help': 'port to receive bidirectional tests back on',
'type': int,
},
'parallel': {
'names': {'short': '-P', 'long': '--parallel'},
'help': 'number of parallel client threads to run',
'type': int,
},
'ttl': {
'names': {'short': '-T', 'long': '--ttl'},
'help': 'time-to-live, for multicast (default 1)',
'type': int,
},
'linux-congestion': {
'names': {'short': '-Z', 'long': '--linux-congestion'},
'help': 'set TCP congestino control algorithm (Linux only)',
},
}
# specify the order of the output arguments when buildinig up a command
_IPERF_ARGS_ORDERED = OrderedDict.fromkeys(
itertools.chain(['server', 'client'],
sorted(IPERF_SERVER_OPTS),
sorted(IPERF_CLIENT_OPTS),
sorted(IPERF_GENERAL_OPTS)))
class IperfArgumentBuilder(ArgumentBuilder):
"""
"""
ARGS_ORDERED = list(_IPERF_ARGS_ORDERED.keys())
@classmethod
def get_formatter(cls):
_formatter = {
'optional': cls.FORMATTER_BY_VALUE_MAP(
{
cls.__TRUE__: cls.FORMAT_KEY_BY_TAG('long'),
cls.__FALSE__: cls.FORMAT_NONE,
None: cls.FORMAT_NONE,
},
default=cls.FORMATTER_JOIN_KEY_VAL(
key=cls.FORMAT_KEY_BY_TAG('long'),
joiner=cls.FORMAT_ARG_APPEND_LIST,
val=cls.FORMAT_VAL_TRANSFORM(str),
),
),
'positional': cls.FORMAT_VAL_TRANSFORM(str),
}
return _formatter
def __init__(self):
super(IperfArgumentBuilder, self).__init__(args_order=self.ARGS_ORDERED,
args_formatter=self.get_formatter())
class CmdIperfHelper(CommandHelper):
"""
"""
@classmethod
def check_args(cls, **kwargs):
"""Input command arguments checking API
"""
__kwargs = cls._encode_args(**kwargs)
return cls._check_args(**__kwargs)
@classmethod
def _check_args(cls, **__kwargs):
kwargs = cls._decode_args(**__kwargs)
is_server = kwargs.get('server', None)
is_client = kwargs.get('client', None)
if (is_server and is_client) or (not is_server and not is_client):
raise ArgumentsCollision(server=is_server, client=is_client)
if is_server:
return cls._check_server_args(**__kwargs)
return cls._check_client_args(**__kwargs)
# TODO maybe put these outside the class to avoid name mangling?
@classmethod
def _check_server_args(cls, __server=False, __single_udp=None, __daemon=None, **__kwargs):
assert __server
return cls._check_general_args(**__kwargs)
@classmethod
def _check_client_args(cls, __client=None, __bandwidth=None, __dualtest=None, __num=None,
__stdin=None, __tradeoff=None, __time=None, __fileinput=None,
__listenport=None, __parallel=None, __ttl=None, __linux_congestion=None,
**__kwargs):
assert __client
return cls._check_general_args(**__kwargs)
@classmethod
def _check_general_args(cls, __format='m', __help=False, __interval=10, __len='8K',
__print_mss=False, __output=None, __port=5001, __udp=False,
__window=None, __bind=None, __compatibility=False, __mss=40,
__nodelay=False, __version=False, __IPv6Version=False,
__reportexclude=None, __reportstyle=None, **__kwargs):
if __kwargs:
raise UnknownArguments(**cls._decode_args(**__kwargs))
return True
IPERF_PARSER = ArgumentParser(prog='iperf', conflict_handler='resolve')
IPERF_BUILDER = IperfArgumentBuilder()
_params_dict = dict(dict(IPERF_GENERAL_OPTS, **IPERF_SERVER_OPTS), **IPERF_CLIENT_OPTS)
iperf_cmd_kwargs = {
'arg_parser': IPERF_PARSER,
'params': _params_dict,
'arg_builder': IPERF_BUILDER,
'default_list': [],
}
IPERF_CMD_HELPER = CmdIperfHelper(**iperf_cmd_kwargs)
class CmdIperf(Command):
CMD_HELPER = IPERF_CMD_HELPER
|
{
"content_hash": "32eb7e483b408d2557047f7098285446",
"timestamp": "",
"source": "github",
"line_count": 282,
"max_line_length": 99,
"avg_line_length": 33.698581560283685,
"alnum_prop": 0.5167841734189204,
"repo_name": "orestkreminskyi/taf",
"id": "f7ded935aa02b80a392293b89a9d4c057e6ee61f",
"size": "10097",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "taf/testlib/linux/iperf/iperf_cmd.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "6745"
},
{
"name": "JavaScript",
"bytes": "1771"
},
{
"name": "Python",
"bytes": "3869203"
},
{
"name": "Shell",
"bytes": "3146"
},
{
"name": "Tcl",
"bytes": "68098"
},
{
"name": "XSLT",
"bytes": "41538"
}
],
"symlink_target": ""
}
|
import os
import sys
import time
import logging
def application_id():
from google.appengine.api import app_identity
try:
result = app_identity.get_application_id()
except AttributeError:
result = None
if not result:
# Apparently we aren't running live, probably inside a management command
from google.appengine.api import appinfo
info = appinfo.LoadSingleAppInfo(open(os.path.join(find_project_root(), "app.yaml")))
result = "dev~" + info.application
os.environ['APPLICATION_ID'] = result
result = app_identity.get_application_id()
return result
def appengine_on_path():
try:
from google.appengine.api import apiproxy_stub_map
apiproxy_stub_map # Silence pylint
return True
except ImportError:
return False
def on_production():
return 'SERVER_SOFTWARE' in os.environ and not os.environ['SERVER_SOFTWARE'].startswith("Development")
def datastore_available():
from google.appengine.api import apiproxy_stub_map
return bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
def in_testing():
return "test" in sys.argv
import collections
import functools
class memoized(object):
def __init__(self, func, *args):
self.func = func
self.cache = {}
self.args = args
def __call__(self, *args):
args = self.args or args
if not isinstance(args, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args)
if args in self.cache:
return self.cache[args]
else:
value = self.func(*args)
self.cache[args] = value
return value
def __repr__(self):
'''Return the function's docstring.'''
return self.func.__doc__
def __get__(self, obj, objtype):
'''Support instance methods.'''
return functools.partial(self.__call__, obj)
@memoized
def find_project_root():
"""Traverse the filesystem upwards and return the directory containing app.yaml"""
path = os.path.dirname(os.path.abspath(__file__))
app_yaml_path = os.environ.get('DJANGAE_APP_YAML_LOCATION', None)
# If the DJANGAE_APP_YAML_LOCATION variable is setup, will try to locate
# it from there.
if (app_yaml_path is not None and
os.path.exists(os.path.join(app_yaml_path, "app.yaml"))):
return app_yaml_path
# Failing that, iterates over the parent folders until it finds it,
# failing when it gets to the root
while True:
if os.path.exists(os.path.join(path, "app.yaml")):
return path
else:
parent = os.path.dirname(path)
if parent == path: # Filesystem root
break
else:
path = parent
raise RuntimeError("Unable to locate app.yaml. Did you add it to skip_files?")
def get_in_batches(queryset, batch_size=10):
""" prefetches the queryset in batches """
start = 0
if batch_size < 1:
raise Exception("batch_size must be > 0")
end = batch_size
while True:
batch = [x for x in queryset[start:end]]
for y in batch:
yield y
if len(batch) < batch_size:
break
start += batch_size
end += batch_size
def retry_until_successful(func, *args, **kwargs):
return retry(func, *args, _retries=float('inf'), **kwargs)
def retry(func, *args, **kwargs):
from google.appengine.api import datastore_errors
from google.appengine.runtime import apiproxy_errors
from google.appengine.runtime import DeadlineExceededError
from djangae.db.transaction import TransactionFailedError
retries = kwargs.pop('_retries', 3)
i = 0
try:
timeout_ms = 100
while True:
try:
i += 1
return func(*args, **kwargs)
except (datastore_errors.Error, apiproxy_errors.Error, TransactionFailedError), exc:
logging.info("Retrying function: %s(%s, %s) - %s", str(func), str(args), str(kwargs), str(exc))
time.sleep(timeout_ms / 1000000.0)
timeout_ms *= 2
if i > retries:
raise exc
except DeadlineExceededError:
logging.error("Timeout while running function: %s(%s, %s)", str(func), str(args), str(kwargs))
raise
def djangae_webapp(request_handler):
""" Decorator for wrapping a webapp2.RequestHandler to work with
the django wsgi hander"""
def request_handler_wrapper(request, *args, **kwargs):
from webapp2 import Request, Response, WSGIApplication
from django.http import HttpResponse
class Route:
handler_method = request.method.lower()
req = Request(request.environ)
req.route = Route()
req.route_args = args
req.route_kwargs = kwargs
req.app = WSGIApplication()
response = Response()
view_func = request_handler(req, response)
view_func.dispatch()
django_response = HttpResponse(response.body, status=int(str(response.status).split(" ")[0]))
for header, value in response.headers.iteritems():
django_response[header] = value
return django_response
return request_handler_wrapper
|
{
"content_hash": "10c8ccc81316f9d605d6400011324af2",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 111,
"avg_line_length": 29.994444444444444,
"alnum_prop": 0.6169661048342285,
"repo_name": "stucox/djangae",
"id": "2291e94d0638b0ef7d25bf7488315bc44890c432",
"size": "5399",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "djangae/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "277"
},
{
"name": "Python",
"bytes": "474522"
},
{
"name": "Shell",
"bytes": "175"
}
],
"symlink_target": ""
}
|
""" Backend that defers to another server for access control """
from .base import IAccessBackend
class RemoteAccessBackend(IAccessBackend):
"""
This backend allows you to defer all user auth and permissions to a remote
server. It requires the ``requests`` package.
"""
def __init__(self, request=None, settings=None, server=None, auth=None,
**kwargs):
super(RemoteAccessBackend, self).__init__(request, **kwargs)
self._settings = settings
self.server = server
self.auth = auth
@classmethod
def configure(cls, settings):
kwargs = super(RemoteAccessBackend, cls).configure(settings)
kwargs['settings'] = settings
kwargs['server'] = settings['auth.backend_server']
auth = None
user = settings.get('auth.user')
if user is not None:
password = settings.get('auth.password')
auth = (user, password)
kwargs['auth'] = auth
return kwargs
def _req(self, uri, params=None):
""" Hit a server endpoint and return the json response """
try:
import requests
except ImportError: # pragma: no cover
raise ImportError("You must 'pip install requests' before using "
"the remote server access backend")
response = requests.get(self.server + uri, params=params,
auth=self.auth)
response.raise_for_status()
return response.json()
def verify_user(self, username, password):
uri = self._settings.get('auth.uri.verify', '/verify')
params = {'username': username, 'password': password}
return self._req(uri, params)
def _get_password_hash(self, username):
# We don't have to do anything here because we overrode 'verify_user'
pass
def groups(self, username=None):
uri = self._settings.get('auth.uri.groups', '/groups')
params = {}
if username is not None:
params['username'] = username
return self._req(uri, params)
def group_members(self, group):
uri = self._settings.get('auth.uri.group_members', '/group_members')
params = {'group': group}
return self._req(uri, params)
def is_admin(self, username):
uri = self._settings.get('auth.uri.admin', '/admin')
params = {'username': username}
return self._req(uri, params)
def group_permissions(self, package, group=None):
uri = self._settings.get('auth.uri.group_permissions',
'/group_permissions')
params = {'package': package}
if group is not None:
params['group'] = group
return self._req(uri, params)
def user_permissions(self, package, username=None):
uri = self._settings.get('auth.uri.user_permissions',
'/user_permissions')
params = {'package': package}
if username is not None:
params['username'] = username
return self._req(uri, params)
def user_package_permissions(self, username):
uri = self._settings.get('auth.uri.user_package_permissions',
'/user_package_permissions')
params = {'username': username}
return self._req(uri, params)
def group_package_permissions(self, group):
uri = self._settings.get('auth.uri.group_package_permissions',
'/group_package_permissions')
params = {'group': group}
return self._req(uri, params)
def user_data(self, username=None):
uri = self._settings.get('auth.uri.user_data',
'/user_data')
params = None
if username is not None:
params = {'username': username}
return self._req(uri, params)
|
{
"content_hash": "aa6ce3a3d642e1e61f7f8ffc5aed9652",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 78,
"avg_line_length": 37.114285714285714,
"alnum_prop": 0.5783936361303567,
"repo_name": "rubikloud/pypicloud",
"id": "7eaa37d227568ceec8d4c84249ca4298e72c94b5",
"size": "3897",
"binary": false,
"copies": "3",
"ref": "refs/heads/rubikloud",
"path": "pypicloud/access/remote.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "397"
},
{
"name": "HTML",
"bytes": "24656"
},
{
"name": "JavaScript",
"bytes": "26246"
},
{
"name": "Python",
"bytes": "287319"
},
{
"name": "Shell",
"bytes": "2143"
}
],
"symlink_target": ""
}
|
import sys
import os
import random
import gzip
import pickle
import optparse
import time
from optparse import OptionParser
def main():
options = parse_args()
app = TestApplication(options)
app.run()
chars = 'abcdefghijklmnopqestuvwxyzABCDEFGHIJKLMNOPQESTUVWXYZ0123456789.,;:-'
class TestApplication(object):
def __init__(self, options):
self.options = options
random.seed(options.seed)
def run(self):
n = self.options.words
for i in range(n):
print(self.generate_random_word())
def generate_random_word(self):
n = random.randint(1, self.options.maxlength + 1)
s = ''
for i in range(n):
s += random.choice(chars)
return s
def parse_args():
parser = OptionParser()
parser.add_option(
"--max-words", dest='words', type=int, default=50000, metavar='N',
help="maximum number of words generated/loaded"
)
parser.add_option(
"--random", dest='random', action='store_true', default=False,
help="generate random words"
)
parser.add_option(
"--seed", dest='seed', type=int, default=0, metavar='INT',
help="random seed"
)
parser.add_option(
"--random-max-len", dest='maxlength', type=int, default=100, metavar='K',
help="maximum count of characters in a word"
)
(options, rest) = parser.parse_args()
return options
if __name__ == '__main__':
main()
|
{
"content_hash": "ad5977919975193725779aa8dbb02d4e",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 81,
"avg_line_length": 20.625,
"alnum_prop": 0.6074074074074074,
"repo_name": "pombredanne/pyahocorasick",
"id": "541d9a6bb92e82d7280c0f2188df5cdbf22f464b",
"size": "1485",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/generate_random_words.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "980"
},
{
"name": "C",
"bytes": "160031"
},
{
"name": "C++",
"bytes": "2464"
},
{
"name": "Makefile",
"bytes": "1362"
},
{
"name": "Python",
"bytes": "111411"
},
{
"name": "Shell",
"bytes": "8111"
}
],
"symlink_target": ""
}
|
'''@file decode.py
this file will use the model to decode a set of data'''
import sys
import os
import cPickle as pickle
sys.path.append(os.getcwd())
from six.moves import configparser
import tensorflow as tf
from nabu.neuralnetworks.recognizer import Recognizer
from nabu.neuralnetworks.models.model import Model
def decode(expdir, testing=False):
'''does everything for decoding
args:
expdir: the experiments directory
testing: if true only the graph will be created for debugging purposes
'''
#read the database config file
database_cfg = configparser.ConfigParser()
database_cfg.read(os.path.join(expdir, 'database.conf'))
#read the recognizer config file
recognizer_cfg = configparser.ConfigParser()
recognizer_cfg.read(os.path.join(expdir, 'recognizer.cfg'))
if testing:
model_cfg = configparser.ConfigParser()
model_cfg.read(os.path.join(expdir, 'model.cfg'))
trainer_cfg = configparser.ConfigParser()
trainer_cfg.read(os.path.join(expdir, 'trainer.cfg'))
model = Model(
conf=model_cfg,
trainlabels=int(trainer_cfg.get('trainer', 'trainlabels')),
constraint=None)
else:
#load the model
with open(os.path.join(expdir, 'model', 'model.pkl'), 'rb') as fid:
model = pickle.load(fid)
#create the recognizer
recognizer = Recognizer(
model=model,
conf=recognizer_cfg,
dataconf=database_cfg,
expdir=expdir)
if testing:
return
#do the recognition
recognizer.recognize()
if __name__ == '__main__':
tf.app.flags.DEFINE_string('expdir', 'expdir',
'the exeriments directory that was used for'
' training'
)
FLAGS = tf.app.flags.FLAGS
decode(FLAGS.expdir, False)
|
{
"content_hash": "06740f348cb3635e583f01368070b694",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 78,
"avg_line_length": 29.184615384615384,
"alnum_prop": 0.6309963099630996,
"repo_name": "vrenkens/Nabu-asr",
"id": "16d222ee012955671b279fa0b60deff85f5ef388",
"size": "1897",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nabu/scripts/decode.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "203240"
},
{
"name": "Shell",
"bytes": "700"
}
],
"symlink_target": ""
}
|
from django.db import models
from django.core.exceptions import ImproperlyConfigured
from actstream.registry import register, registry
from actstream.tests.base import ActivityBaseTestCase
from actstream.runtests.testapp_nested.models import my_model
try:
from django.apps import apps
except ImportError:
pass
else:
apps.all_models.pop('testapp_not_installed', None)
class NotInstalledModel(models.Model):
text = models.TextField()
class Meta:
app_label = 'testapp_not_installed'
class TestAppNestedTests(ActivityBaseTestCase):
def test_registration(self):
self.assertIn(my_model.NestedModel, registry)
def test_not_installed(self):
self.assertRaises(ImproperlyConfigured, register, NotInstalledModel)
|
{
"content_hash": "94795542b2fe5e084d6b1a94bb3c0dd3",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 76,
"avg_line_length": 26.310344827586206,
"alnum_prop": 0.7667103538663171,
"repo_name": "jrsupplee/django-activity-stream",
"id": "7c60f57f3638b1220feedcc1bcfbe9f4805294c5",
"size": "763",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "actstream/runtests/testapp_nested/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "3119"
},
{
"name": "Makefile",
"bytes": "464"
},
{
"name": "Python",
"bytes": "107812"
}
],
"symlink_target": ""
}
|
from distutils.core import setup, Extension
module1 = Extension(
'dummy',
sources=['dummy.cpp', 'SerialUtils.cc', 'StringUtils.cc'],
extra_compile_args=["-O3"]
)
setup(
name='Dummy',
version='1.0',
description='This is a dummy package',
ext_modules=[module1]
)
|
{
"content_hash": "d3bac6d7d0c5a24ace696d994f49012b",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 62,
"avg_line_length": 20.785714285714285,
"alnum_prop": 0.6426116838487973,
"repo_name": "elzaggo/pydoop",
"id": "f1986d1704d246abe49b828e7ed1ced24eff4680",
"size": "900",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "test/timings/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "202110"
},
{
"name": "C++",
"bytes": "157645"
},
{
"name": "Emacs Lisp",
"bytes": "153"
},
{
"name": "Java",
"bytes": "180329"
},
{
"name": "Makefile",
"bytes": "3322"
},
{
"name": "Python",
"bytes": "514013"
},
{
"name": "Shell",
"bytes": "18476"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/structure/dantooine/shared_dant_large_mudhut_no_flora_05m.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_object")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "235f8972f2c8d0734ed16483e87786d5",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 96,
"avg_line_length": 25,
"alnum_prop": 0.7015384615384616,
"repo_name": "obi-two/Rebelion",
"id": "f46e704f55570ceb8577845e8572d94dbde367ab",
"size": "470",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "data/scripts/templates/object/static/structure/dantooine/shared_dant_large_mudhut_no_flora_05m.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11818"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2293610"
},
{
"name": "CMake",
"bytes": "39727"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7499185"
},
{
"name": "SQLPL",
"bytes": "41864"
}
],
"symlink_target": ""
}
|
"""Train keras CNN on the CIFAR10 small images dataset.
The model comes from: https://zhuanlan.zhihu.com/p/29214791,
and it gets to about 87% validation accuracy in 100 epochs.
Note that the script requires a machine with 4 GPUs. You
can set {"gpu": 0} to use CPUs for training, although
it is less efficient.
"""
from __future__ import print_function
import argparse
import numpy as np
import tensorflow as tf
from tensorflow.python.keras.datasets import cifar10
from tensorflow.python.keras.layers import Input, Dense, Dropout, Flatten
from tensorflow.python.keras.layers import Convolution2D, MaxPooling2D
from tensorflow.python.keras.models import Model
from tensorflow.python.keras.preprocessing.image import ImageDataGenerator
import ray
from ray.tune import grid_search, run, sample_from
from ray.tune import Trainable
from ray.tune.schedulers import PopulationBasedTraining
num_classes = 10
class Cifar10Model(Trainable):
def _read_data(self):
# The data, split between train and test sets:
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
# Convert class vectors to binary class matrices.
y_train = tf.keras.utils.to_categorical(y_train, num_classes)
y_test = tf.keras.utils.to_categorical(y_test, num_classes)
x_train = x_train.astype("float32")
x_train /= 255
x_test = x_test.astype("float32")
x_test /= 255
return (x_train, y_train), (x_test, y_test)
def _build_model(self, input_shape):
x = Input(shape=(32, 32, 3))
y = x
y = Convolution2D(
filters=64,
kernel_size=3,
strides=1,
padding="same",
activation="relu",
kernel_initializer="he_normal")(y)
y = Convolution2D(
filters=64,
kernel_size=3,
strides=1,
padding="same",
activation="relu",
kernel_initializer="he_normal")(y)
y = MaxPooling2D(pool_size=2, strides=2, padding="same")(y)
y = Convolution2D(
filters=128,
kernel_size=3,
strides=1,
padding="same",
activation="relu",
kernel_initializer="he_normal")(y)
y = Convolution2D(
filters=128,
kernel_size=3,
strides=1,
padding="same",
activation="relu",
kernel_initializer="he_normal")(y)
y = MaxPooling2D(pool_size=2, strides=2, padding="same")(y)
y = Convolution2D(
filters=256,
kernel_size=3,
strides=1,
padding="same",
activation="relu",
kernel_initializer="he_normal")(y)
y = Convolution2D(
filters=256,
kernel_size=3,
strides=1,
padding="same",
activation="relu",
kernel_initializer="he_normal")(y)
y = MaxPooling2D(pool_size=2, strides=2, padding="same")(y)
y = Flatten()(y)
y = Dropout(self.config["dropout"])(y)
y = Dense(
units=10, activation="softmax", kernel_initializer="he_normal")(y)
model = Model(inputs=x, outputs=y, name="model1")
return model
def _setup(self, config):
self.train_data, self.test_data = self._read_data()
x_train = self.train_data[0]
model = self._build_model(x_train.shape[1:])
opt = tf.keras.optimizers.Adadelta(
lr=self.config["lr"], decay=self.config["decay"])
model.compile(
loss="categorical_crossentropy",
optimizer=opt,
metrics=["accuracy"])
self.model = model
def _train(self):
x_train, y_train = self.train_data
x_test, y_test = self.test_data
aug_gen = ImageDataGenerator(
# set input mean to 0 over the dataset
featurewise_center=False,
# set each sample mean to 0
samplewise_center=False,
# divide inputs by dataset std
featurewise_std_normalization=False,
# divide each input by its std
samplewise_std_normalization=False,
# apply ZCA whitening
zca_whitening=False,
# randomly rotate images in the range (degrees, 0 to 180)
rotation_range=0,
# randomly shift images horizontally (fraction of total width)
width_shift_range=0.1,
# randomly shift images vertically (fraction of total height)
height_shift_range=0.1,
# randomly flip images
horizontal_flip=True,
# randomly flip images
vertical_flip=False,
)
aug_gen.fit(x_train)
gen = aug_gen.flow(
x_train, y_train, batch_size=self.config["batch_size"])
self.model.fit_generator(
generator=gen,
steps_per_epoch=50000 // self.config["batch_size"],
epochs=self.config["epochs"],
validation_data=None)
# loss, accuracy
_, accuracy = self.model.evaluate(x_test, y_test, verbose=0)
return {"mean_accuracy": accuracy}
def _save(self, checkpoint_dir):
file_path = checkpoint_dir + "/model"
self.model.save_weights(file_path)
return file_path
def _restore(self, path):
self.model.load_weights(path)
def _stop(self):
# If need, save your model when exit.
# saved_path = self.model.save(self.logdir)
# print("save model at: ", saved_path)
pass
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--smoke-test", action="store_true", help="Finish quickly for testing")
args, _ = parser.parse_known_args()
train_spec = {
"resources_per_trial": {
"cpu": 1,
"gpu": 1
},
"stop": {
"mean_accuracy": 0.80,
"training_iteration": 30,
},
"config": {
"epochs": 1,
"batch_size": 64,
"lr": grid_search([10**-4, 10**-5]),
"decay": sample_from(lambda spec: spec.config.lr / 100.0),
"dropout": grid_search([0.25, 0.5]),
},
"num_samples": 4,
}
if args.smoke_test:
train_spec["config"]["lr"] = 10**-4
train_spec["config"]["dropout"] = 0.5
ray.init()
pbt = PopulationBasedTraining(
time_attr="training_iteration",
metric="mean_accuracy",
mode="max",
perturbation_interval=10,
hyperparam_mutations={
"dropout": lambda _: np.random.uniform(0, 1),
})
run(Cifar10Model, name="pbt_cifar10", scheduler=pbt, **train_spec)
|
{
"content_hash": "6ea664a311f32c5a2f77a0743675e778",
"timestamp": "",
"source": "github",
"line_count": 214,
"max_line_length": 79,
"avg_line_length": 31.66822429906542,
"alnum_prop": 0.5669175151246865,
"repo_name": "atumanov/ray",
"id": "c1a1765bc9f4fac4ea3d45dbff522f41f2d71549",
"size": "6823",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/ray/tune/examples/pbt_tune_cifar10_with_keras.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "20715"
},
{
"name": "C++",
"bytes": "1036803"
},
{
"name": "CSS",
"bytes": "9262"
},
{
"name": "Dockerfile",
"bytes": "3411"
},
{
"name": "HTML",
"bytes": "32704"
},
{
"name": "Java",
"bytes": "517715"
},
{
"name": "JavaScript",
"bytes": "8178"
},
{
"name": "Jupyter Notebook",
"bytes": "1610"
},
{
"name": "Python",
"bytes": "3081422"
},
{
"name": "Ruby",
"bytes": "956"
},
{
"name": "Shell",
"bytes": "76928"
},
{
"name": "Smarty",
"bytes": "955"
}
],
"symlink_target": ""
}
|
# Copyright 2003 Dave Abrahams
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# This tests the core rule for enumerating the variable names in a module.
import BoostBuild
t = BoostBuild.Tester(pass_toolset=0)
t.write("file.jam", """\
module foo
{
rule bar { }
var1 = x y ;
var2 = fubar ;
}
expected = var1 var2 ;
names = [ VARNAMES foo ] ;
if $(names) in $(expected) && $(expected) in $(names)
{
# everything OK
}
else
{
EXIT expected to find variables $(expected:J=", ") in module foo,
but found $(names:J=", ") instead. ;
}
DEPENDS all : xx ;
NOTFILE xx ;
""")
t.run_build_system(["-ffile.jam"], status=0)
t.cleanup()
|
{
"content_hash": "ef8b14b9406f57fc9323d749a821cfdf",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 81,
"avg_line_length": 21.216216216216218,
"alnum_prop": 0.621656050955414,
"repo_name": "flingone/frameworks_base_cmds_remoted",
"id": "a9b8d4b259ea0555654f788325028aa6789d76ba",
"size": "804",
"binary": false,
"copies": "6",
"ref": "refs/heads/matchstick-kk",
"path": "libs/boost/tools/build/test/core_varnames.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "178795"
},
{
"name": "Bison",
"bytes": "10393"
},
{
"name": "C",
"bytes": "2935304"
},
{
"name": "C#",
"bytes": "41850"
},
{
"name": "C++",
"bytes": "159845641"
},
{
"name": "CMake",
"bytes": "99536"
},
{
"name": "CSS",
"bytes": "246918"
},
{
"name": "Cuda",
"bytes": "26521"
},
{
"name": "FORTRAN",
"bytes": "1387"
},
{
"name": "Gnuplot",
"bytes": "2361"
},
{
"name": "Groff",
"bytes": "8174"
},
{
"name": "HTML",
"bytes": "145196591"
},
{
"name": "IDL",
"bytes": "15"
},
{
"name": "JavaScript",
"bytes": "136112"
},
{
"name": "Lua",
"bytes": "4202"
},
{
"name": "Makefile",
"bytes": "1004791"
},
{
"name": "Max",
"bytes": "37424"
},
{
"name": "Objective-C",
"bytes": "2979"
},
{
"name": "Objective-C++",
"bytes": "214"
},
{
"name": "PHP",
"bytes": "60249"
},
{
"name": "Perl",
"bytes": "35563"
},
{
"name": "Perl6",
"bytes": "2130"
},
{
"name": "Python",
"bytes": "1914013"
},
{
"name": "QML",
"bytes": "613"
},
{
"name": "QMake",
"bytes": "7148"
},
{
"name": "Rebol",
"bytes": "372"
},
{
"name": "Ruby",
"bytes": "2814"
},
{
"name": "Shell",
"bytes": "403506"
},
{
"name": "Tcl",
"bytes": "1205"
},
{
"name": "TeX",
"bytes": "13819"
},
{
"name": "XSLT",
"bytes": "770994"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.