repo_name stringlengths 5 100 | ref stringlengths 12 67 | path stringlengths 4 244 | copies stringlengths 1 8 | content stringlengths 0 1.05M ⌀ |
|---|---|---|---|---|
dbaxa/django | refs/heads/master | tests/select_for_update/tests.py | 123 | from __future__ import unicode_literals
import threading
import time
from multiple_database.routers import TestRouter
from django.db import DatabaseError, connection, router, transaction
from django.test import (
TransactionTestCase, override_settings, skipIfDBFeature,
skipUnlessDBFeature,
)
from .models import Person
# We need to set settings.DEBUG to True so we can capture the output SQL
# to examine.
@override_settings(DEBUG=True)
class SelectForUpdateTests(TransactionTestCase):
available_apps = ['select_for_update']
def setUp(self):
# This is executed in autocommit mode so that code in
# run_select_for_update can see this data.
self.person = Person.objects.create(name='Reinhardt')
# We need another database connection in transaction to test that one
# connection issuing a SELECT ... FOR UPDATE will block.
self.new_connection = connection.copy()
def tearDown(self):
try:
self.end_blocking_transaction()
except (DatabaseError, AttributeError):
pass
self.new_connection.close()
def start_blocking_transaction(self):
self.new_connection.set_autocommit(False)
# Start a blocking transaction. At some point,
# end_blocking_transaction() should be called.
self.cursor = self.new_connection.cursor()
sql = 'SELECT * FROM %(db_table)s %(for_update)s;' % {
'db_table': Person._meta.db_table,
'for_update': self.new_connection.ops.for_update_sql(),
}
self.cursor.execute(sql, ())
self.cursor.fetchone()
def end_blocking_transaction(self):
# Roll back the blocking transaction.
self.new_connection.rollback()
self.new_connection.set_autocommit(True)
def has_for_update_sql(self, tested_connection, nowait=False):
# Examine the SQL that was executed to determine whether it
# contains the 'SELECT..FOR UPDATE' stanza.
for_update_sql = tested_connection.ops.for_update_sql(nowait)
sql = tested_connection.queries[-1]['sql']
return bool(sql.find(for_update_sql) > -1)
@skipUnlessDBFeature('has_select_for_update')
def test_for_update_sql_generated(self):
"""
Test that the backend's FOR UPDATE variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic():
list(Person.objects.all().select_for_update())
self.assertTrue(self.has_for_update_sql(connection))
@skipUnlessDBFeature('has_select_for_update_nowait')
def test_for_update_sql_generated_nowait(self):
"""
Test that the backend's FOR UPDATE NOWAIT variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic():
list(Person.objects.all().select_for_update(nowait=True))
self.assertTrue(self.has_for_update_sql(connection, nowait=True))
@skipUnlessDBFeature('has_select_for_update_nowait')
def test_nowait_raises_error_on_block(self):
"""
If nowait is specified, we expect an error to be raised rather
than blocking.
"""
self.start_blocking_transaction()
status = []
thread = threading.Thread(
target=self.run_select_for_update,
args=(status,),
kwargs={'nowait': True},
)
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], DatabaseError)
@skipIfDBFeature('has_select_for_update_nowait')
@skipUnlessDBFeature('has_select_for_update')
def test_unsupported_nowait_raises_error(self):
"""
If a SELECT...FOR UPDATE NOWAIT is run on a database backend
that supports FOR UPDATE but not NOWAIT, then we should find
that a DatabaseError is raised.
"""
self.assertRaises(
DatabaseError,
list,
Person.objects.all().select_for_update(nowait=True)
)
@skipUnlessDBFeature('has_select_for_update')
def test_for_update_requires_transaction(self):
"""
Test that a TransactionManagementError is raised
when a select_for_update query is executed outside of a transaction.
"""
with self.assertRaises(transaction.TransactionManagementError):
list(Person.objects.all().select_for_update())
@skipUnlessDBFeature('has_select_for_update')
def test_for_update_requires_transaction_only_in_execution(self):
"""
Test that no TransactionManagementError is raised
when select_for_update is invoked outside of a transaction -
only when the query is executed.
"""
people = Person.objects.all().select_for_update()
with self.assertRaises(transaction.TransactionManagementError):
list(people)
def run_select_for_update(self, status, nowait=False):
"""
Utility method that runs a SELECT FOR UPDATE against all
Person instances. After the select_for_update, it attempts
to update the name of the only record, save, and commit.
This function expects to run in a separate thread.
"""
status.append('started')
try:
# We need to enter transaction management again, as this is done on
# per-thread basis
with transaction.atomic():
people = list(
Person.objects.all().select_for_update(nowait=nowait)
)
people[0].name = 'Fred'
people[0].save()
except DatabaseError as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
connection.close()
@skipUnlessDBFeature('has_select_for_update')
@skipUnlessDBFeature('supports_transactions')
def test_block(self):
"""
Check that a thread running a select_for_update that
accesses rows being touched by a similar operation
on another connection blocks correctly.
"""
# First, let's start the transaction in our thread.
self.start_blocking_transaction()
# Now, try it again using the ORM's select_for_update
# facility. Do this in a separate thread.
status = []
thread = threading.Thread(
target=self.run_select_for_update, args=(status,)
)
# The thread should immediately block, but we'll sleep
# for a bit to make sure.
thread.start()
sanity_count = 0
while len(status) != 1 and sanity_count < 10:
sanity_count += 1
time.sleep(1)
if sanity_count >= 10:
raise ValueError('Thread did not run and block')
# Check the person hasn't been updated. Since this isn't
# using FOR UPDATE, it won't block.
p = Person.objects.get(pk=self.person.pk)
self.assertEqual('Reinhardt', p.name)
# When we end our blocking transaction, our thread should
# be able to continue.
self.end_blocking_transaction()
thread.join(5.0)
# Check the thread has finished. Assuming it has, we should
# find that it has updated the person's name.
self.assertFalse(thread.isAlive())
# We must commit the transaction to ensure that MySQL gets a fresh read,
# since by default it runs in REPEATABLE READ mode
transaction.commit()
p = Person.objects.get(pk=self.person.pk)
self.assertEqual('Fred', p.name)
@skipUnlessDBFeature('has_select_for_update')
def test_raw_lock_not_available(self):
"""
Check that running a raw query which can't obtain a FOR UPDATE lock
raises the correct exception
"""
self.start_blocking_transaction()
def raw(status):
try:
list(
Person.objects.raw(
'SELECT * FROM %s %s' % (
Person._meta.db_table,
connection.ops.for_update_sql(nowait=True)
)
)
)
except DatabaseError as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
connection.close()
status = []
thread = threading.Thread(target=raw, kwargs={'status': status})
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], DatabaseError)
@skipUnlessDBFeature('has_select_for_update')
@override_settings(DATABASE_ROUTERS=[TestRouter()])
def test_select_for_update_on_multidb(self):
query = Person.objects.select_for_update()
self.assertEqual(router.db_for_write(Person), query.db)
@skipUnlessDBFeature('has_select_for_update')
def test_select_for_update_with_get(self):
with transaction.atomic():
person = Person.objects.select_for_update().get(name='Reinhardt')
self.assertEqual(person.name, 'Reinhardt')
|
withorwithoutgod/tacozmq | refs/heads/master | cherrypy/test/test_encoding.py | 12 |
import gzip
import sys
import cherrypy
from cherrypy._cpcompat import BytesIO, IncompleteRead, ntob, ntou
europoundUnicode = ntou('\x80\xa3')
sing = ntou("\u6bdb\u6cfd\u4e1c: Sing, Little Birdie?", 'escape')
sing8 = sing.encode('utf-8')
sing16 = sing.encode('utf-16')
from cherrypy.test import helper
class EncodingTests(helper.CPWebCase):
def setup_server():
class Root:
def index(self, param):
assert param == europoundUnicode, "%r != %r" % (param, europoundUnicode)
yield europoundUnicode
index.exposed = True
def mao_zedong(self):
return sing
mao_zedong.exposed = True
def utf8(self):
return sing8
utf8.exposed = True
utf8._cp_config = {'tools.encode.encoding': 'utf-8'}
def cookies_and_headers(self):
# if the headers have non-ascii characters and a cookie has
# any part which is unicode (even ascii), the response
# should not fail.
cherrypy.response.cookie['candy'] = 'bar'
cherrypy.response.cookie['candy']['domain'] = 'cherrypy.org'
cherrypy.response.headers['Some-Header'] = 'My d\xc3\xb6g has fleas'
return 'Any content'
cookies_and_headers.exposed = True
def reqparams(self, *args, **kwargs):
return ntob(', ').join([": ".join((k, v)).encode('utf8')
for k, v in cherrypy.request.params.items()])
reqparams.exposed = True
def nontext(self, *args, **kwargs):
cherrypy.response.headers['Content-Type'] = 'application/binary'
return '\x00\x01\x02\x03'
nontext.exposed = True
nontext._cp_config = {'tools.encode.text_only': False,
'tools.encode.add_charset': True,
}
class GZIP:
def index(self):
yield "Hello, world"
index.exposed = True
def noshow(self):
# Test for ticket #147, where yield showed no exceptions (content-
# encoding was still gzip even though traceback wasn't zipped).
raise IndexError()
yield "Here be dragons"
noshow.exposed = True
# Turn encoding off so the gzip tool is the one doing the collapse.
noshow._cp_config = {'tools.encode.on': False}
def noshow_stream(self):
# Test for ticket #147, where yield showed no exceptions (content-
# encoding was still gzip even though traceback wasn't zipped).
raise IndexError()
yield "Here be dragons"
noshow_stream.exposed = True
noshow_stream._cp_config = {'response.stream': True}
class Decode:
def extra_charset(self, *args, **kwargs):
return ', '.join([": ".join((k, v))
for k, v in cherrypy.request.params.items()])
extra_charset.exposed = True
extra_charset._cp_config = {
'tools.decode.on': True,
'tools.decode.default_encoding': ['utf-16'],
}
def force_charset(self, *args, **kwargs):
return ', '.join([": ".join((k, v))
for k, v in cherrypy.request.params.items()])
force_charset.exposed = True
force_charset._cp_config = {
'tools.decode.on': True,
'tools.decode.encoding': 'utf-16',
}
root = Root()
root.gzip = GZIP()
root.decode = Decode()
cherrypy.tree.mount(root, config={'/gzip': {'tools.gzip.on': True}})
setup_server = staticmethod(setup_server)
def test_query_string_decoding(self):
europoundUtf8 = europoundUnicode.encode('utf-8')
self.getPage(ntob('/?param=') + europoundUtf8)
self.assertBody(europoundUtf8)
# Encoded utf8 query strings MUST be parsed correctly.
# Here, q is the POUND SIGN U+00A3 encoded in utf8 and then %HEX
self.getPage("/reqparams?q=%C2%A3")
# The return value will be encoded as utf8.
self.assertBody(ntob("q: \xc2\xa3"))
# Query strings that are incorrectly encoded MUST raise 404.
# Here, q is the POUND SIGN U+00A3 encoded in latin1 and then %HEX
self.getPage("/reqparams?q=%A3")
self.assertStatus(404)
self.assertErrorPage(404,
"The given query string could not be processed. Query "
"strings for this resource must be encoded with 'utf8'.")
def test_urlencoded_decoding(self):
# Test the decoding of an application/x-www-form-urlencoded entity.
europoundUtf8 = europoundUnicode.encode('utf-8')
body=ntob("param=") + europoundUtf8
self.getPage('/', method='POST',
headers=[("Content-Type", "application/x-www-form-urlencoded"),
("Content-Length", str(len(body))),
],
body=body),
self.assertBody(europoundUtf8)
# Encoded utf8 entities MUST be parsed and decoded correctly.
# Here, q is the POUND SIGN U+00A3 encoded in utf8
body = ntob("q=\xc2\xa3")
self.getPage('/reqparams', method='POST',
headers=[("Content-Type", "application/x-www-form-urlencoded"),
("Content-Length", str(len(body))),
],
body=body),
self.assertBody(ntob("q: \xc2\xa3"))
# ...and in utf16, which is not in the default attempt_charsets list:
body = ntob("\xff\xfeq\x00=\xff\xfe\xa3\x00")
self.getPage('/reqparams', method='POST',
headers=[("Content-Type", "application/x-www-form-urlencoded;charset=utf-16"),
("Content-Length", str(len(body))),
],
body=body),
self.assertBody(ntob("q: \xc2\xa3"))
# Entities that are incorrectly encoded MUST raise 400.
# Here, q is the POUND SIGN U+00A3 encoded in utf16, but
# the Content-Type incorrectly labels it utf-8.
body = ntob("\xff\xfeq\x00=\xff\xfe\xa3\x00")
self.getPage('/reqparams', method='POST',
headers=[("Content-Type", "application/x-www-form-urlencoded;charset=utf-8"),
("Content-Length", str(len(body))),
],
body=body),
self.assertStatus(400)
self.assertErrorPage(400,
"The request entity could not be decoded. The following charsets "
"were attempted: ['utf-8']")
def test_decode_tool(self):
# An extra charset should be tried first, and succeed if it matches.
# Here, we add utf-16 as a charset and pass a utf-16 body.
body = ntob("\xff\xfeq\x00=\xff\xfe\xa3\x00")
self.getPage('/decode/extra_charset', method='POST',
headers=[("Content-Type", "application/x-www-form-urlencoded"),
("Content-Length", str(len(body))),
],
body=body),
self.assertBody(ntob("q: \xc2\xa3"))
# An extra charset should be tried first, and continue to other default
# charsets if it doesn't match.
# Here, we add utf-16 as a charset but still pass a utf-8 body.
body = ntob("q=\xc2\xa3")
self.getPage('/decode/extra_charset', method='POST',
headers=[("Content-Type", "application/x-www-form-urlencoded"),
("Content-Length", str(len(body))),
],
body=body),
self.assertBody(ntob("q: \xc2\xa3"))
# An extra charset should error if force is True and it doesn't match.
# Here, we force utf-16 as a charset but still pass a utf-8 body.
body = ntob("q=\xc2\xa3")
self.getPage('/decode/force_charset', method='POST',
headers=[("Content-Type", "application/x-www-form-urlencoded"),
("Content-Length", str(len(body))),
],
body=body),
self.assertErrorPage(400,
"The request entity could not be decoded. The following charsets "
"were attempted: ['utf-16']")
def test_multipart_decoding(self):
# Test the decoding of a multipart entity when the charset (utf16) is
# explicitly given.
body=ntob('\r\n'.join(['--X',
'Content-Type: text/plain;charset=utf-16',
'Content-Disposition: form-data; name="text"',
'',
'\xff\xfea\x00b\x00\x1c c\x00',
'--X',
'Content-Type: text/plain;charset=utf-16',
'Content-Disposition: form-data; name="submit"',
'',
'\xff\xfeC\x00r\x00e\x00a\x00t\x00e\x00',
'--X--']))
self.getPage('/reqparams', method='POST',
headers=[("Content-Type", "multipart/form-data;boundary=X"),
("Content-Length", str(len(body))),
],
body=body),
self.assertBody(ntob("text: ab\xe2\x80\x9cc, submit: Create"))
def test_multipart_decoding_no_charset(self):
# Test the decoding of a multipart entity when the charset (utf8) is
# NOT explicitly given, but is in the list of charsets to attempt.
body=ntob('\r\n'.join(['--X',
'Content-Disposition: form-data; name="text"',
'',
'\xe2\x80\x9c',
'--X',
'Content-Disposition: form-data; name="submit"',
'',
'Create',
'--X--']))
self.getPage('/reqparams', method='POST',
headers=[("Content-Type", "multipart/form-data;boundary=X"),
("Content-Length", str(len(body))),
],
body=body),
self.assertBody(ntob("text: \xe2\x80\x9c, submit: Create"))
def test_multipart_decoding_no_successful_charset(self):
# Test the decoding of a multipart entity when the charset (utf16) is
# NOT explicitly given, and is NOT in the list of charsets to attempt.
body=ntob('\r\n'.join(['--X',
'Content-Disposition: form-data; name="text"',
'',
'\xff\xfea\x00b\x00\x1c c\x00',
'--X',
'Content-Disposition: form-data; name="submit"',
'',
'\xff\xfeC\x00r\x00e\x00a\x00t\x00e\x00',
'--X--']))
self.getPage('/reqparams', method='POST',
headers=[("Content-Type", "multipart/form-data;boundary=X"),
("Content-Length", str(len(body))),
],
body=body),
self.assertStatus(400)
self.assertErrorPage(400,
"The request entity could not be decoded. The following charsets "
"were attempted: ['us-ascii', 'utf-8']")
def test_nontext(self):
self.getPage('/nontext')
self.assertHeader('Content-Type', 'application/binary;charset=utf-8')
self.assertBody('\x00\x01\x02\x03')
def testEncoding(self):
# Default encoding should be utf-8
self.getPage('/mao_zedong')
self.assertBody(sing8)
# Ask for utf-16.
self.getPage('/mao_zedong', [('Accept-Charset', 'utf-16')])
self.assertHeader('Content-Type', 'text/html;charset=utf-16')
self.assertBody(sing16)
# Ask for multiple encodings. ISO-8859-1 should fail, and utf-16
# should be produced.
self.getPage('/mao_zedong', [('Accept-Charset',
'iso-8859-1;q=1, utf-16;q=0.5')])
self.assertBody(sing16)
# The "*" value should default to our default_encoding, utf-8
self.getPage('/mao_zedong', [('Accept-Charset', '*;q=1, utf-7;q=.2')])
self.assertBody(sing8)
# Only allow iso-8859-1, which should fail and raise 406.
self.getPage('/mao_zedong', [('Accept-Charset', 'iso-8859-1, *;q=0')])
self.assertStatus("406 Not Acceptable")
self.assertInBody("Your client sent this Accept-Charset header: "
"iso-8859-1, *;q=0. We tried these charsets: "
"iso-8859-1.")
# Ask for x-mac-ce, which should be unknown. See ticket #569.
self.getPage('/mao_zedong', [('Accept-Charset',
'us-ascii, ISO-8859-1, x-mac-ce')])
self.assertStatus("406 Not Acceptable")
self.assertInBody("Your client sent this Accept-Charset header: "
"us-ascii, ISO-8859-1, x-mac-ce. We tried these "
"charsets: ISO-8859-1, us-ascii, x-mac-ce.")
# Test the 'encoding' arg to encode.
self.getPage('/utf8')
self.assertBody(sing8)
self.getPage('/utf8', [('Accept-Charset', 'us-ascii, ISO-8859-1')])
self.assertStatus("406 Not Acceptable")
def testGzip(self):
zbuf = BytesIO()
zfile = gzip.GzipFile(mode='wb', fileobj=zbuf, compresslevel=9)
zfile.write(ntob("Hello, world"))
zfile.close()
self.getPage('/gzip/', headers=[("Accept-Encoding", "gzip")])
self.assertInBody(zbuf.getvalue()[:3])
self.assertHeader("Vary", "Accept-Encoding")
self.assertHeader("Content-Encoding", "gzip")
# Test when gzip is denied.
self.getPage('/gzip/', headers=[("Accept-Encoding", "identity")])
self.assertHeader("Vary", "Accept-Encoding")
self.assertNoHeader("Content-Encoding")
self.assertBody("Hello, world")
self.getPage('/gzip/', headers=[("Accept-Encoding", "gzip;q=0")])
self.assertHeader("Vary", "Accept-Encoding")
self.assertNoHeader("Content-Encoding")
self.assertBody("Hello, world")
self.getPage('/gzip/', headers=[("Accept-Encoding", "*;q=0")])
self.assertStatus(406)
self.assertNoHeader("Content-Encoding")
self.assertErrorPage(406, "identity, gzip")
# Test for ticket #147
self.getPage('/gzip/noshow', headers=[("Accept-Encoding", "gzip")])
self.assertNoHeader('Content-Encoding')
self.assertStatus(500)
self.assertErrorPage(500, pattern="IndexError\n")
# In this case, there's nothing we can do to deliver a
# readable page, since 1) the gzip header is already set,
# and 2) we may have already written some of the body.
# The fix is to never stream yields when using gzip.
if (cherrypy.server.protocol_version == "HTTP/1.0" or
getattr(cherrypy.server, "using_apache", False)):
self.getPage('/gzip/noshow_stream',
headers=[("Accept-Encoding", "gzip")])
self.assertHeader('Content-Encoding', 'gzip')
self.assertInBody('\x1f\x8b\x08\x00')
else:
# The wsgiserver will simply stop sending data, and the HTTP client
# will error due to an incomplete chunk-encoded stream.
self.assertRaises((ValueError, IncompleteRead), self.getPage,
'/gzip/noshow_stream',
headers=[("Accept-Encoding", "gzip")])
def test_UnicodeHeaders(self):
self.getPage('/cookies_and_headers')
self.assertBody('Any content')
|
marctc/wagtail | refs/heads/master | wagtail/wagtailsites/urls.py | 10 | from django.conf.urls import url
from wagtail.wagtailsites import views
urlpatterns = [
url(r'^$', views.index, name='wagtailsites_index'),
url(r'^new/$', views.create, name='wagtailsites_create'),
url(r'^(\d+)/$', views.edit, name='wagtailsites_edit'),
url(r'^(\d+)/delete/$', views.delete, name='wagtailsites_delete'),
]
|
AlphaSmartDog/DeepLearningNotes | refs/heads/master | Note-6 A3CNet/Note-6.4 HS300指数增强/sonnet/python/modules/rnn_core_test.py | 9 | # Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Recurrent cores in sonnet."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import mock
import numpy as np
import sonnet as snt
from sonnet.testing import parameterized
import tensorflow as tf
from tensorflow.python.util import nest
BATCH_SIZE = 5
MASK_TUPLE = (True, (False, True))
_state_size_tuple = (3, (4, 5))
_state_size_element = 6
# Use patch to instantiate RNNCore
@mock.patch.multiple(snt.RNNCore, __abstractmethods__=set())
class RNNCoreTest(tf.test.TestCase, parameterized.ParameterizedTestCase):
@parameterized.Parameters(
(False, False, _state_size_tuple),
(False, True, _state_size_tuple),
(True, False, _state_size_tuple),
(True, True, _state_size_tuple),
(False, False, _state_size_element),
(False, True, _state_size_element),
(True, False, _state_size_element),
(True, True, _state_size_element))
def testInitialStateTuple(self, trainable, use_custom_initial_value,
state_size):
batch_size = 6
# Set the attribute to the class since it we can't set properties of
# abstract classes
snt.RNNCore.state_size = state_size
flat_state_size = nest.flatten(state_size)
core = snt.RNNCore(name="dummy_core")
if use_custom_initial_value:
flat_initializer = [tf.constant_initializer(2)] * len(flat_state_size)
trainable_initializers = nest.pack_sequence_as(
structure=state_size, flat_sequence=flat_initializer)
else:
trainable_initializers = None
initial_state = core.initial_state(
batch_size, dtype=tf.float32, trainable=trainable,
trainable_initializers=trainable_initializers)
nest.assert_same_structure(initial_state, state_size)
flat_initial_state = nest.flatten(initial_state)
for state, size in zip(flat_initial_state, flat_state_size):
self.assertEqual(state.get_shape(), [batch_size, size])
with self.test_session() as sess:
tf.global_variables_initializer().run()
flat_initial_state_value = sess.run(flat_initial_state)
for value, size in zip(flat_initial_state_value, flat_state_size):
expected_initial_state = np.empty([batch_size, size])
if not trainable:
expected_initial_state.fill(0)
elif use_custom_initial_value:
expected_initial_state.fill(2)
else:
value_row = value[0]
expected_initial_state = np.tile(value_row, (batch_size, 1))
self.assertAllClose(value, expected_initial_state)
@parameterized.Parameters(
(False, _state_size_tuple),
(True, _state_size_tuple),
(False, _state_size_element),
(True, _state_size_element))
def testRegularizers(self, trainable, state_size):
batch_size = 6
# Set the attribute to the class since it we can't set properties of
# abstract classes
snt.RNNCore.state_size = state_size
flat_state_size = nest.flatten(state_size)
core = snt.RNNCore(name="dummy_core")
flat_regularizer = ([tf.contrib.layers.l1_regularizer(scale=0.5)] *
len(flat_state_size))
trainable_regularizers = nest.pack_sequence_as(
structure=state_size, flat_sequence=flat_regularizer)
core.initial_state(batch_size, dtype=tf.float32, trainable=trainable,
trainable_regularizers=trainable_regularizers)
graph_regularizers = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
if not trainable:
self.assertFalse(graph_regularizers)
else:
for i in range(len(flat_state_size)):
self.assertRegexpMatches(
graph_regularizers[i].name, ".*l1_regularizer.*")
class TrainableInitialState(tf.test.TestCase,
parameterized.ParameterizedTestCase):
@parameterized.Parameters((True, MASK_TUPLE), (True, None), (False, False),
(False, None))
def testInitialStateComputation(self, tuple_state, mask):
if tuple_state:
initial_state = (tf.fill([BATCH_SIZE, 6], 2),
(tf.fill([BATCH_SIZE, 7], 3),
tf.fill([BATCH_SIZE, 8], 4)))
else:
initial_state = tf.fill([BATCH_SIZE, 9], 10)
trainable_state_module = snt.TrainableInitialState(initial_state, mask=mask)
trainable_state = trainable_state_module()
nest.assert_same_structure(initial_state, trainable_state)
flat_initial_state = nest.flatten(initial_state)
flat_trainable_state = nest.flatten(trainable_state)
if mask is not None:
flat_mask = nest.flatten(mask)
else:
flat_mask = (True,) * len(flat_initial_state)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
# Check all variables are initialized correctly and return a state that
# has the same as it is provided.
for trainable_state, initial_state in zip(flat_trainable_state,
flat_initial_state):
self.assertAllEqual(sess.run(trainable_state), sess.run(initial_state))
# Change the value of all the trainable variables to ones.
for variable in tf.trainable_variables():
sess.run(tf.assign(variable, tf.ones_like(variable)))
# Check that the values of the initial_states have changed if and only if
# they are trainable.
for trainable_state, initial_state, mask in zip(flat_trainable_state,
flat_initial_state,
flat_mask):
trainable_state_value = sess.run(trainable_state)
initial_state_value = sess.run(initial_state)
if mask:
expected_value = np.ones_like(initial_state_value)
else:
expected_value = initial_state_value
self.assertAllEqual(trainable_state_value, expected_value)
def testBadArguments(self):
initial_state = (tf.random_normal([BATCH_SIZE, 6]),
(tf.random_normal([BATCH_SIZE, 7]),
tf.random_normal([BATCH_SIZE, 8])))
with self.assertRaises(TypeError):
snt.TrainableInitialState(initial_state, mask=(True, (False, "foo")))
snt.TrainableInitialState(initial_state, mask=(True, (False, True)))()
with self.test_session() as sess:
with self.assertRaises(tf.errors.InvalidArgumentError):
# Check that the class checks that the elements of initial_state have
# identical rows.
sess.run(tf.global_variables_initializer())
if __name__ == "__main__":
tf.test.main()
|
daoluan/decode-Django | refs/heads/master | Django-1.5.1/django/utils/translation/__init__.py | 110 | """
Internationalization support.
"""
from __future__ import unicode_literals
from django.utils.encoding import force_text
from django.utils.functional import lazy
from django.utils import six
__all__ = [
'activate', 'deactivate', 'override', 'deactivate_all',
'get_language', 'get_language_from_request',
'get_language_info', 'get_language_bidi',
'check_for_language', 'to_locale', 'templatize', 'string_concat',
'gettext', 'gettext_lazy', 'gettext_noop',
'ugettext', 'ugettext_lazy', 'ugettext_noop',
'ngettext', 'ngettext_lazy',
'ungettext', 'ungettext_lazy',
'pgettext', 'pgettext_lazy',
'npgettext', 'npgettext_lazy',
]
# Here be dragons, so a short explanation of the logic won't hurt:
# We are trying to solve two problems: (1) access settings, in particular
# settings.USE_I18N, as late as possible, so that modules can be imported
# without having to first configure Django, and (2) if some other code creates
# a reference to one of these functions, don't break that reference when we
# replace the functions with their real counterparts (once we do access the
# settings).
class Trans(object):
"""
The purpose of this class is to store the actual translation function upon
receiving the first call to that function. After this is done, changes to
USE_I18N will have no effect to which function is served upon request. If
your tests rely on changing USE_I18N, you can delete all the functions
from _trans.__dict__.
Note that storing the function with setattr will have a noticeable
performance effect, as access to the function goes the normal path,
instead of using __getattr__.
"""
def __getattr__(self, real_name):
from django.conf import settings
if settings.USE_I18N:
from django.utils.translation import trans_real as trans
else:
from django.utils.translation import trans_null as trans
setattr(self, real_name, getattr(trans, real_name))
return getattr(trans, real_name)
_trans = Trans()
# The Trans class is no more needed, so remove it from the namespace.
del Trans
def gettext_noop(message):
return _trans.gettext_noop(message)
ugettext_noop = gettext_noop
def gettext(message):
return _trans.gettext(message)
def ngettext(singular, plural, number):
return _trans.ngettext(singular, plural, number)
def ugettext(message):
return _trans.ugettext(message)
def ungettext(singular, plural, number):
return _trans.ungettext(singular, plural, number)
def pgettext(context, message):
return _trans.pgettext(context, message)
def npgettext(context, singular, plural, number):
return _trans.npgettext(context, singular, plural, number)
gettext_lazy = lazy(gettext, str)
ngettext_lazy = lazy(ngettext, str)
ugettext_lazy = lazy(ugettext, six.text_type)
ungettext_lazy = lazy(ungettext, six.text_type)
pgettext_lazy = lazy(pgettext, six.text_type)
npgettext_lazy = lazy(npgettext, six.text_type)
def activate(language):
return _trans.activate(language)
def deactivate():
return _trans.deactivate()
class override(object):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
self.old_language = get_language()
def __enter__(self):
if self.language is not None:
activate(self.language)
else:
deactivate_all()
def __exit__(self, exc_type, exc_value, traceback):
if self.deactivate:
deactivate()
else:
activate(self.old_language)
def get_language():
return _trans.get_language()
def get_language_bidi():
return _trans.get_language_bidi()
def check_for_language(lang_code):
return _trans.check_for_language(lang_code)
def to_locale(language):
return _trans.to_locale(language)
def get_language_from_request(request, check_path=False):
return _trans.get_language_from_request(request, check_path)
def get_language_from_path(path):
return _trans.get_language_from_path(path)
def templatize(src, origin=None):
return _trans.templatize(src, origin)
def deactivate_all():
return _trans.deactivate_all()
def _string_concat(*strings):
"""
Lazy variant of string concatenation, needed for translations that are
constructed from multiple parts.
"""
return ''.join([force_text(s) for s in strings])
string_concat = lazy(_string_concat, six.text_type)
def get_language_info(lang_code):
from django.conf.locale import LANG_INFO
try:
return LANG_INFO[lang_code]
except KeyError:
raise KeyError("Unknown language code %r." % lang_code)
|
rubencabrera/odoo | refs/heads/8.0 | addons/l10n_be_hr_payroll_account/__init__.py | 430 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
maxwell-demon/grpc | refs/heads/master | src/python/grpcio/grpc/_adapter/_types.py | 4 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import abc
import collections
import enum
from grpc._cython import cygrpc
class GrpcChannelArgumentKeys(enum.Enum):
"""Mirrors keys used in grpc_channel_args for GRPC-specific arguments."""
SSL_TARGET_NAME_OVERRIDE = 'grpc.ssl_target_name_override'
@enum.unique
class CallError(enum.IntEnum):
"""Mirrors grpc_call_error in the C core."""
OK = cygrpc.CallError.ok
ERROR = cygrpc.CallError.error
ERROR_NOT_ON_SERVER = cygrpc.CallError.not_on_server
ERROR_NOT_ON_CLIENT = cygrpc.CallError.not_on_client
ERROR_ALREADY_ACCEPTED = cygrpc.CallError.already_accepted
ERROR_ALREADY_INVOKED = cygrpc.CallError.already_invoked
ERROR_NOT_INVOKED = cygrpc.CallError.not_invoked
ERROR_ALREADY_FINISHED = cygrpc.CallError.already_finished
ERROR_TOO_MANY_OPERATIONS = cygrpc.CallError.too_many_operations
ERROR_INVALID_FLAGS = cygrpc.CallError.invalid_flags
ERROR_INVALID_METADATA = cygrpc.CallError.invalid_metadata
@enum.unique
class StatusCode(enum.IntEnum):
"""Mirrors grpc_status_code in the C core."""
OK = cygrpc.StatusCode.ok
CANCELLED = cygrpc.StatusCode.cancelled
UNKNOWN = cygrpc.StatusCode.unknown
INVALID_ARGUMENT = cygrpc.StatusCode.invalid_argument
DEADLINE_EXCEEDED = cygrpc.StatusCode.deadline_exceeded
NOT_FOUND = cygrpc.StatusCode.not_found
ALREADY_EXISTS = cygrpc.StatusCode.already_exists
PERMISSION_DENIED = cygrpc.StatusCode.permission_denied
RESOURCE_EXHAUSTED = cygrpc.StatusCode.resource_exhausted
FAILED_PRECONDITION = cygrpc.StatusCode.failed_precondition
ABORTED = cygrpc.StatusCode.aborted
OUT_OF_RANGE = cygrpc.StatusCode.out_of_range
UNIMPLEMENTED = cygrpc.StatusCode.unimplemented
INTERNAL = cygrpc.StatusCode.internal
UNAVAILABLE = cygrpc.StatusCode.unavailable
DATA_LOSS = cygrpc.StatusCode.data_loss
UNAUTHENTICATED = cygrpc.StatusCode.unauthenticated
@enum.unique
class OpWriteFlags(enum.IntEnum):
"""Mirrors defined write-flag constants in the C core."""
WRITE_BUFFER_HINT = cygrpc.WriteFlag.buffer_hint
WRITE_NO_COMPRESS = cygrpc.WriteFlag.no_compress
@enum.unique
class OpType(enum.IntEnum):
"""Mirrors grpc_op_type in the C core."""
SEND_INITIAL_METADATA = cygrpc.OperationType.send_initial_metadata
SEND_MESSAGE = cygrpc.OperationType.send_message
SEND_CLOSE_FROM_CLIENT = cygrpc.OperationType.send_close_from_client
SEND_STATUS_FROM_SERVER = cygrpc.OperationType.send_status_from_server
RECV_INITIAL_METADATA = cygrpc.OperationType.receive_initial_metadata
RECV_MESSAGE = cygrpc.OperationType.receive_message
RECV_STATUS_ON_CLIENT = cygrpc.OperationType.receive_status_on_client
RECV_CLOSE_ON_SERVER = cygrpc.OperationType.receive_close_on_server
@enum.unique
class EventType(enum.IntEnum):
"""Mirrors grpc_completion_type in the C core."""
QUEUE_SHUTDOWN = cygrpc.CompletionType.queue_shutdown
QUEUE_TIMEOUT = cygrpc.CompletionType.queue_timeout
OP_COMPLETE = cygrpc.CompletionType.operation_complete
@enum.unique
class ConnectivityState(enum.IntEnum):
"""Mirrors grpc_connectivity_state in the C core."""
IDLE = cygrpc.ConnectivityState.idle
CONNECTING = cygrpc.ConnectivityState.connecting
READY = cygrpc.ConnectivityState.ready
TRANSIENT_FAILURE = cygrpc.ConnectivityState.transient_failure
FATAL_FAILURE = cygrpc.ConnectivityState.fatal_failure
class Status(collections.namedtuple(
'Status', [
'code',
'details',
])):
"""The end status of a GRPC call.
Attributes:
code (StatusCode): ...
details (str): ...
"""
class CallDetails(collections.namedtuple(
'CallDetails', [
'method',
'host',
'deadline',
])):
"""Provides information to the server about the client's call.
Attributes:
method (str): ...
host (str): ...
deadline (float): ...
"""
class OpArgs(collections.namedtuple(
'OpArgs', [
'type',
'initial_metadata',
'trailing_metadata',
'message',
'status',
'write_flags',
])):
"""Arguments passed into a GRPC operation.
Attributes:
type (OpType): ...
initial_metadata (sequence of 2-sequence of str): Only valid if type ==
OpType.SEND_INITIAL_METADATA, else is None.
trailing_metadata (sequence of 2-sequence of str): Only valid if type ==
OpType.SEND_STATUS_FROM_SERVER, else is None.
message (bytes): Only valid if type == OpType.SEND_MESSAGE, else is None.
status (Status): Only valid if type == OpType.SEND_STATUS_FROM_SERVER, else
is None.
write_flags (int): a bit OR'ing of 0 or more OpWriteFlags values.
"""
@staticmethod
def send_initial_metadata(initial_metadata):
return OpArgs(OpType.SEND_INITIAL_METADATA, initial_metadata, None, None, None, 0)
@staticmethod
def send_message(message, flags):
return OpArgs(OpType.SEND_MESSAGE, None, None, message, None, flags)
@staticmethod
def send_close_from_client():
return OpArgs(OpType.SEND_CLOSE_FROM_CLIENT, None, None, None, None, 0)
@staticmethod
def send_status_from_server(trailing_metadata, status_code, status_details):
return OpArgs(OpType.SEND_STATUS_FROM_SERVER, None, trailing_metadata, None, Status(status_code, status_details), 0)
@staticmethod
def recv_initial_metadata():
return OpArgs(OpType.RECV_INITIAL_METADATA, None, None, None, None, 0);
@staticmethod
def recv_message():
return OpArgs(OpType.RECV_MESSAGE, None, None, None, None, 0)
@staticmethod
def recv_status_on_client():
return OpArgs(OpType.RECV_STATUS_ON_CLIENT, None, None, None, None, 0)
@staticmethod
def recv_close_on_server():
return OpArgs(OpType.RECV_CLOSE_ON_SERVER, None, None, None, None, 0)
class OpResult(collections.namedtuple(
'OpResult', [
'type',
'initial_metadata',
'trailing_metadata',
'message',
'status',
'cancelled',
])):
"""Results received from a GRPC operation.
Attributes:
type (OpType): ...
initial_metadata (sequence of 2-sequence of str): Only valid if type ==
OpType.RECV_INITIAL_METADATA, else is None.
trailing_metadata (sequence of 2-sequence of str): Only valid if type ==
OpType.RECV_STATUS_ON_CLIENT, else is None.
message (bytes): Only valid if type == OpType.RECV_MESSAGE, else is None.
status (Status): Only valid if type == OpType.RECV_STATUS_ON_CLIENT, else
is None.
cancelled (bool): Only valid if type == OpType.RECV_CLOSE_ON_SERVER, else
is None.
"""
class Event(collections.namedtuple(
'Event', [
'type',
'tag',
'call',
'call_details',
'results',
'success',
])):
"""An event received from a GRPC completion queue.
Attributes:
type (EventType): ...
tag (object): ...
call (Call): The Call object associated with this event (if there is one,
else None).
call_details (CallDetails): The call details associated with the
server-side call (if there is such information, else None).
results (list of OpResult): ...
success (bool): ...
"""
class CompletionQueue:
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(self):
pass
def __iter__(self):
"""This class may be iterated over.
This is the equivalent of calling next() repeatedly with an absolute
deadline of None (i.e. no deadline).
"""
return self
@abc.abstractmethod
def next(self, deadline=float('+inf')):
"""Get the next event on this completion queue.
Args:
deadline (float): absolute deadline in seconds from the Python epoch, or
None for no deadline.
Returns:
Event: ...
"""
pass
@abc.abstractmethod
def shutdown(self):
"""Begin the shutdown process of this completion queue.
Note that this does not immediately destroy the completion queue.
Nevertheless, user code should not pass it around after invoking this.
"""
return None
class Call:
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def start_batch(self, ops, tag):
"""Start a batch of operations.
Args:
ops (sequence of OpArgs): ...
tag (object): ...
Returns:
CallError: ...
"""
return CallError.ERROR
@abc.abstractmethod
def cancel(self, code=None, details=None):
"""Cancel the call.
Args:
code (int): Status code to cancel with (on the server side). If
specified, so must `details`.
details (str): Status details to cancel with (on the server side). If
specified, so must `code`.
Returns:
CallError: ...
"""
return CallError.ERROR
@abc.abstractmethod
def peer(self):
"""Get the peer of this call.
Returns:
str: the peer of this call.
"""
return None
def set_credentials(self, creds):
"""Set per-call credentials.
Args:
creds (CallCredentials): Credentials to be set for this call.
"""
return None
class Channel:
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(self, target, args, credentials=None):
"""Initialize a Channel.
Args:
target (str): ...
args (sequence of 2-sequence of str, (str|integer)): ...
credentials (ChannelCredentials): If None, create an insecure channel,
else create a secure channel using the client credentials.
"""
@abc.abstractmethod
def create_call(self, completion_queue, method, host, deadline=float('+inf')):
"""Create a call from this channel.
Args:
completion_queue (CompletionQueue): ...
method (str): ...
host (str): ...
deadline (float): absolute deadline in seconds from the Python epoch, or
None for no deadline.
Returns:
Call: call object associated with this Channel and passed parameters.
"""
return None
@abc.abstractmethod
def check_connectivity_state(self, try_to_connect):
"""Check and optionally repair the connectivity state of the channel.
Args:
try_to_connect (bool): whether or not to try to connect the channel if
disconnected.
Returns:
ConnectivityState: state of the channel at the time of this invocation.
"""
return None
@abc.abstractmethod
def watch_connectivity_state(self, last_observed_state, deadline,
completion_queue, tag):
"""Watch for connectivity state changes from the last_observed_state.
Args:
last_observed_state (ConnectivityState): ...
deadline (float): ...
completion_queue (CompletionQueue): ...
tag (object) ...
"""
@abc.abstractmethod
def target(self):
"""Get the target of this channel.
Returns:
str: the target of this channel.
"""
return None
class Server:
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(self, completion_queue, args):
"""Initialize a server.
Args:
completion_queue (CompletionQueue): ...
args (sequence of 2-sequence of str, (str|integer)): ...
"""
@abc.abstractmethod
def add_http2_port(self, address, credentials=None):
"""Adds an HTTP/2 address+port to the server.
Args:
address (str): ...
credentials (ServerCredentials): If None, create an insecure port, else
create a secure port using the server credentials.
"""
@abc.abstractmethod
def start(self):
"""Starts the server."""
@abc.abstractmethod
def shutdown(self, tag=None):
"""Shuts down the server. Does not immediately destroy the server.
Args:
tag (object): if not None, have the server place an event on its
completion queue notifying it when this server has completely shut down.
"""
@abc.abstractmethod
def request_call(self, completion_queue, tag):
"""Requests a call from the server on the server's completion queue.
Args:
completion_queue (CompletionQueue): Completion queue for the call. May be
the same as the server's completion queue.
tag (object) ...
"""
|
wzbozon/statsmodels | refs/heads/master | statsmodels/graphics/plottools.py | 42 | import numpy as np
def rainbow(n):
"""
Returns a list of colors sampled at equal intervals over the spectrum.
Parameters
----------
n : int
The number of colors to return
Returns
-------
R : (n,3) array
An of rows of RGB color values
Notes
-----
Converts from HSV coordinates (0, 1, 1) to (1, 1, 1) to RGB. Based on
the Sage function of the same name.
"""
from matplotlib import colors
R = np.ones((1,n,3))
R[0,:,0] = np.linspace(0, 1, n, endpoint=False)
#Note: could iterate and use colorsys.hsv_to_rgb
return colors.hsv_to_rgb(R).squeeze()
|
GenericStudent/home-assistant | refs/heads/dev | homeassistant/components/upcloud/const.py | 14 | """UpCloud constants."""
from datetime import timedelta
DOMAIN = "upcloud"
DEFAULT_SCAN_INTERVAL = timedelta(seconds=60)
CONFIG_ENTRY_UPDATE_SIGNAL_TEMPLATE = f"{DOMAIN}_config_entry_update:" "{}"
|
gsmartway/odoo | refs/heads/8.0 | addons/web_tests_demo/__openerp__.py | 384 | {
'name': "Demonstration of web/javascript tests",
'category': 'Hidden',
'description': """
OpenERP Web demo of a test suite
================================
Test suite example, same code as that used in the testing documentation.
""",
'depends': ['web'],
'data' : [
'views/web_tests_demo.xml',
],
'qweb': ['static/src/xml/demo.xml'],
}
|
dilawar/moogli | refs/heads/geometry | main.py | 2 | import _moogli
"""
class Visualizer(MorphologyViewer):
def __init__(parent = None, callback = None):
self.callback = callback
self.morphology = moogli.Morphology("morph", 1)
desktop = QtGui.QApplication.desktop()
self.visualizer = MorphologyEditor( self.morphology
, desktop.screenGeometry().width()
, desktop.screenGeometry().height()
)
self._timer = QtCore.QTimer(self)
self._update()
def read_morphology_from_moose(path = ""):
import moose
compartments = moose.wildcardFind(path + "/##[ISA=CompartmentBase]")
for compartment in compartments:
try:
parent_compartment = compartment.neighbors["raxial"][0]
proximal_diameter = parent_compartment.diameter
except IndexError:
proximal_diameter = compartment.diameter
self.morphology.add_compartment( compartment.path
, compartment.parent.path
, compartment.x0 * 10000000
, compartment.y0 * 10000000
, compartment.z0 * 10000000
, proximal_diameter * 10000000
, compartment.x * 10000000
, compartment.y * 10000000
, compartment.z * 10000000
, compartment.diameter * 10000000
)
def _update(self):
if self.callback is not None:
self.callback(morphology, self)
self.frame()
self._timer.timeout.connect(self._update)
self._timer.start(0)
def main():
app = QtGui.QApplication(sys.argv)
filename = os.path.join( os.path.split(os.path.realpath(__file__))[0]
, "../../Demos/neuroml/CA1/CA1.morph.pop.xml")
# filename = os.path.join( os.path.split(os.path.realpath(__file__))[0]
# , "../neuroml/PurkinjeCellPassivePulseInput/PurkinjePassive.net.xml")
# filename = os.path.join( os.path.split(os.path.realpath(__file__))[0]
# , "../neuroml/OlfactoryBulbPassive/OBpassive_numgloms3_seed750.0.xml")
visualizer = Visualizer()
visualizer.read_morphology_from_moose()
visualizer.show()
return app.exec_()
# popdict, projdict = moose.neuroml.loadNeuroML_L123(filename)
# modelRoot = moose.Neutral("/" + os.path.splitext(os.path.basename(filename))[0])
# element = moose.Neutral(modelRoot.path + "/model")
# if(moose.exists("/cells")) : moose.move("/cells" , element.path)
# if(moose.exists("/elec")) : moose.move("/elec" , modelRoot.path)
# if(moose.exists("/library")): moose.move("/library", modelRoot.path)
if __name__ == "__main__":
main()
"""
DISTAL = 0
AVERAGED = 1
PROXIMAL_DISTAL = 2
def read_morphology_from_moose(name = "", path = "", radius = DISTAL):
import moose
morphology = _moogli.Morphology(name, 1)
compartments = moose.wildcardFind(path + "/##[ISA=CompartmentBase]")
for compartment in compartments:
distal_diameter = compartment.diameter
try:
parent_compartment = compartment.neighbors["raxial"][0]
proximal_diameter = parent_compartment.diameter
except IndexError:
proximal_diameter = distal_diameter
if radius == DISTAL :
proximal_diameter = distal_diameter
elif radius == AVERAGED :
distal_diameter = proximal_diameter = ( distal_diameter
+ proximal_diameter
) / 2.0
morphology.add_compartment( compartment.path
, compartment.parent.path
, compartment.x0 * 10000000
, compartment.y0 * 10000000
, compartment.z0 * 10000000
, proximal_diameter * 10000000
, compartment.x * 10000000
, compartment.y * 10000000
, compartment.z * 10000000
, distal_diameter * 10000000
)
return morphology
|
plotly/plotly.py | refs/heads/master | packages/python/plotly/plotly/validators/icicle/marker/_colorscale.py | 1 | import _plotly_utils.basevalidators
class ColorscaleValidator(_plotly_utils.basevalidators.ColorscaleValidator):
def __init__(self, plotly_name="colorscale", parent_name="icicle.marker", **kwargs):
super(ColorscaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
implied_edits=kwargs.pop("implied_edits", {"autocolorscale": False}),
**kwargs
)
|
ccpgames/eve-metrics | refs/heads/master | web2py/applications/welcome/languages/fr-ca.py | 1 | # coding: utf8
{
'!langcode!': 'fr-ca',
'!langname!': 'Français (Canadien)',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" est une expression optionnelle comme "champ1=\'nouvellevaleur\'". Vous ne pouvez mettre à jour ou supprimer les résultats d\'un JOIN',
'%s %%{row} deleted': '%s rangées supprimées',
'%s %%{row} updated': '%s rangées mises à jour',
'%s selected': '%s sélectionné',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'about': 'à propos',
'About': 'À propos',
'Access Control': "Contrôle d'accès",
'Administrative Interface': 'Administrative Interface',
'Administrative interface': "Interface d'administration",
'Ajax Recipes': 'Recettes Ajax',
'appadmin is disabled because insecure channel': "appadmin est désactivée parce que le canal n'est pas sécurisé",
'Are you sure you want to delete this object?': 'Êtes-vous sûr de vouloir supprimer cet objet?',
'Authentication': 'Authentification',
'Available Databases and Tables': 'Bases de données et tables disponibles',
'Buy this book': 'Acheter ce livre',
'cache': 'cache',
'Cache': 'Cache',
'Cache Keys': 'Cache Keys',
'Cannot be empty': 'Ne peut pas être vide',
'change password': 'changer le mot de passe',
'Check to delete': 'Cliquez pour supprimer',
'Check to delete:': 'Cliquez pour supprimer:',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Client IP': 'IP client',
'Community': 'Communauté',
'Components and Plugins': 'Components and Plugins',
'Controller': 'Contrôleur',
'Copyright': "Droit d'auteur",
'Current request': 'Demande actuelle',
'Current response': 'Réponse actuelle',
'Current session': 'Session en cours',
'customize me!': 'personnalisez-moi!',
'data uploaded': 'données téléchargées',
'Database': 'base de données',
'Database %s select': 'base de données %s select',
'db': 'db',
'DB Model': 'Modèle DB',
'Delete:': 'Supprimer:',
'Demo': 'Démo',
'Deployment Recipes': 'Recettes de déploiement ',
'Description': 'Descriptif',
'design': 'design',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'Documentation': 'Documentation',
"Don't know what to do?": "Don't know what to do?",
'done!': 'fait!',
'Download': 'Téléchargement',
'E-mail': 'Courriel',
'Edit': 'Éditer',
'Edit current record': "Modifier l'enregistrement courant",
'edit profile': 'modifier le profil',
'Edit This App': 'Modifier cette application',
'Email and SMS': 'Email and SMS',
'enter an integer between %(min)g and %(max)g': 'entrer un entier compris entre %(min)g et %(max)g',
'Errors': 'Erreurs',
'export as csv file': 'exporter sous forme de fichier csv',
'FAQ': 'faq',
'First name': 'Prénom',
'Forms and Validators': 'Formulaires et Validateurs',
'Free Applications': 'Applications gratuites',
'Function disabled': 'Fonction désactivée',
'Group %(group_id)s created': '%(group_id)s groupe créé',
'Group ID': 'Groupe ID',
'Group uniquely assigned to user %(id)s': "Groupe unique attribué à l'utilisateur %(id)s",
'Groups': 'Groupes',
'Hello World': 'Bonjour le monde',
'Home': 'Accueil',
'How did you get here?': 'How did you get here?',
'import': 'import',
'Import/Export': 'Importer/Exporter',
'Index': 'Index',
'insert new': 'insérer un nouveau',
'insert new %s': 'insérer un nouveau %s',
'Internal State': 'État interne',
'Introduction': 'Présentation',
'Invalid email': 'Courriel invalide',
'Invalid Query': 'Requête Invalide',
'invalid request': 'requête invalide',
'Key': 'Key',
'Last name': 'Nom',
'Layout': 'Mise en page',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'layouts',
'Live chat': 'Clavardage en direct',
'Live Chat': 'Live Chat',
'Logged in': 'Connecté',
'login': 'connectez-vous',
'Login': 'Connectez-vous',
'logout': 'déconnectez-vous',
'lost password': 'mot de passe perdu',
'Lost Password': 'Mot de passe perdu',
'lost password?': 'mot de passe perdu?',
'Main Menu': 'Menu principal',
'Manage Cache': 'Manage Cache',
'Menu Model': 'Menu modèle',
'My Sites': 'My Sites',
'Name': 'Nom',
'New Record': 'Nouvel enregistrement',
'new record inserted': 'nouvel enregistrement inséré',
'next 100 rows': '100 prochaines lignes',
'No databases in this application': "Cette application n'a pas de bases de données",
'Online examples': 'Exemples en ligne',
'or import from csv file': "ou importer d'un fichier CSV",
'Origin': 'Origine',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Autres recettes',
'Overview': 'Présentation',
'password': 'mot de passe',
'Password': 'Mot de passe',
"Password fields don't match": 'Les mots de passe ne correspondent pas',
'please input your password again': "S'il vous plaît entrer votre mot de passe",
'Plugins': 'Plugiciels',
'Powered by': 'Alimenté par',
'Preface': 'Préface',
'previous 100 rows': '100 lignes précédentes',
'profile': 'profile',
'Python': 'Python',
'Query:': 'Requête:',
'Quick Examples': 'Examples Rapides',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'Readme': 'Lisez-moi',
'Recipes': 'Recettes',
'Record': 'enregistrement',
'Record %(id)s created': 'Record %(id)s created',
'Record %(id)s updated': 'Record %(id)s updated',
'Record Created': 'Record Created',
'record does not exist': "l'archive n'existe pas",
'Record ID': "ID d'enregistrement",
'Record id': "id d'enregistrement",
'Record Updated': 'Record Updated',
'Register': "S'inscrire",
'register': "s'inscrire",
'Registration key': "Clé d'enregistrement",
'Registration successful': 'Inscription réussie',
'Remember me (for 30 days)': 'Se souvenir de moi (pendant 30 jours)',
'Request reset password': 'Demande de réinitialiser le mot clé',
'Reset Password key': 'Réinitialiser le mot clé',
'Resources': 'Ressources',
'Role': 'Rôle',
'Rows in Table': 'Lignes du tableau',
'Rows selected': 'Lignes sélectionnées',
'Semantic': 'Sémantique',
'Services': 'Services',
'Size of cache:': 'Size of cache:',
'state': 'état',
'Statistics': 'Statistics',
'Stylesheet': 'Feuille de style',
'submit': 'submit',
'Submit': 'Soumettre',
'Support': 'Soutien',
'Sure you want to delete this object?': 'Êtes-vous sûr de vouloir supprimer cet objet?',
'Table': 'tableau',
'Table name': 'Nom du tableau',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'La "query" est une condition comme "db.table1.champ1==\'valeur\'". Quelque chose comme "db.table1.champ1==db.table2.champ2" résulte en un JOIN SQL.',
'The Core': 'Le noyau',
'The output of the file is a dictionary that was rendered by the view %s': 'La sortie de ce fichier est un dictionnaire qui été restitué par la vue %s',
'The Views': 'Les Vues',
'This App': 'Cette Appli',
'This is a copy of the scaffolding application': "Ceci est une copie de l'application échafaudage",
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': 'Horodatage',
'Twitter': 'Twitter',
'unable to parse csv file': "incapable d'analyser le fichier cvs",
'Update:': 'Mise à jour:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Employez (...)&(...) pour AND, (...)|(...) pour OR, and ~(...) pour NOT pour construire des requêtes plus complexes.',
'User %(id)s Logged-in': 'Utilisateur %(id)s connecté',
'User %(id)s Registered': 'Utilisateur %(id)s enregistré',
'User ID': 'ID utilisateur',
'User Voice': 'User Voice',
'value already in database or empty': 'valeur déjà dans la base ou vide',
'Verify Password': 'Vérifiez le mot de passe',
'Videos': 'Vidéos',
'View': 'Présentation',
'Web2py': 'Web2py',
'Welcome': 'Bienvenu',
'Welcome %s': 'Bienvenue %s',
'Welcome to web2py': 'Bienvenue à web2py',
'Welcome to web2py!': 'Welcome to web2py!',
'Which called the function %s located in the file %s': 'Qui a appelé la fonction %s se trouvant dans le fichier %s',
'You are successfully running web2py': 'Vous roulez avec succès web2py',
'You can modify this application and adapt it to your needs': "Vous pouvez modifier cette application et l'adapter à vos besoins",
'You visited the url %s': "Vous avez visité l'URL %s",
}
|
jhauberg/cards.py | refs/heads/master | test/test_markdown.py | 1 | # coding=utf-8
import unittest
from cards.markdown import markdown
class MarkdownTest(unittest.TestCase):
def test_markdown(self):
# strong
self.assertEqual(markdown('**strong**'), '<strong>strong</strong>')
self.assertEqual(markdown('**strong word**'), '<strong>strong word</strong>')
self.assertEqual(markdown(' **strong**'), ' <strong>strong</strong>')
#self.assertEqual(markdown('** strong**'), '** strong**') # on github this is the result
self.assertEqual(markdown('** strong**'), '<strong> strong</strong>')
self.assertEqual(markdown('** strong **'), '<strong> strong </strong>')
self.assertEqual(markdown('**strong** '), '<strong>strong</strong> ')
self.assertEqual(markdown(' **strong** '), ' <strong>strong</strong> ')
self.assertEqual(markdown('__strong__'), '<strong>strong</strong>')
self.assertEqual(markdown(' __strong__'), ' <strong>strong</strong>')
#self.assertEqual(markdown('__ strong__'), '__ strong__') # on github this is the result
self.assertEqual(markdown('__ strong__'), '<strong> strong</strong>')
self.assertEqual(markdown('__ strong __'), '<strong> strong </strong>')
self.assertEqual(markdown('__strong__ '), '<strong>strong</strong> ')
self.assertEqual(markdown(' __strong__ '), ' <strong>strong</strong> ')
self.assertEqual(markdown('(__strong__)'), '(<strong>strong</strong>)')
# todo: these are weird- what would we expect?
# self.assertEqual(markdown('****'), '<em>*</em>*') # this works out, but probably not what you'd want
# self.assertEqual(markdown('____'), '<em>_</em>_') # on github it becomes a horizontal ruler
# not sure if these should be as shown
# self.assertEqual(markdown('****'), '****')
# self.assertEqual(markdown('____'), '____')
# self.assertEqual(markdown('_____'), '_____')
self.assertEqual(markdown('** **'), '<strong> </strong>')
self.assertEqual(markdown('__ __'), '<strong> </strong>')
# emphasis
self.assertEqual(markdown('*emphasis*'), '<em>emphasis</em>')
self.assertEqual(markdown('*emphasized word*'), '<em>emphasized word</em>')
self.assertEqual(markdown(' *emphasis*'), ' <em>emphasis</em>')
#self.assertEqual(markdown('* emphasis*'), '* emphasis*') # on github this is the result
self.assertEqual(markdown('* emphasis*'), '<em> emphasis</em>')
self.assertEqual(markdown('*emphasis* '), '<em>emphasis</em> ')
self.assertEqual(markdown(' *emphasis* '), ' <em>emphasis</em> ')
self.assertEqual(markdown('_emphasis_'), '<em>emphasis</em>')
self.assertEqual(markdown(' _emphasis_'), ' <em>emphasis</em>')
#self.assertEqual(markdown('_ emphasis_'), '_ emphasis_') # on github this is the result
self.assertEqual(markdown('_ emphasis_'), '<em> emphasis</em>')
self.assertEqual(markdown('_ emphasis _'), '<em> emphasis </em>')
self.assertEqual(markdown('_emphasis_ '), '<em>emphasis</em> ')
self.assertEqual(markdown(' _emphasis_ '), ' <em>emphasis</em> ')
self.assertEqual(markdown('(_emphasis_)'), '(<em>emphasis</em>)')
self.assertEqual(markdown('no_emphasis_'), 'no_emphasis_')
self.assertEqual(markdown('\*emphasis*'), '*emphasis*')
# super
self.assertEqual(markdown('super^this'), 'super<sup>this</sup>')
self.assertEqual(markdown('super^this not^'), 'super<sup>this</sup> not^')
self.assertEqual(markdown('super^this^not_this'), 'super<sup>this^not_this</sup>')
self.assertEqual(markdown('^this'), '<sup>this</sup>')
self.assertEqual(markdown('not^'), 'not^')
# inserted
self.assertEqual(markdown('++inserted++'), '<ins>inserted</ins>')
# deleted
self.assertEqual(markdown('~~deleted~~'), '<del>deleted</del>')
# break line
self.assertEqual(markdown(' '), '<br />')
self.assertEqual(markdown(' '), '<br /><br />')
self.assertEqual(markdown('one break'), 'one<br />break')
self.assertEqual(markdown('two breaks'), 'two<br /><br />breaks')
self.assertEqual(markdown('two breaks'), 'two<br /><br />breaks')
self.assertEqual(markdown('three breaks'), 'three<br /><br /><br />breaks')
# escaping
self.assertEqual(markdown('\**strong**'), '*<em>strong</em>*')
self.assertEqual(markdown('\*\*strong**'), '**strong**')
self.assertEqual(markdown('\__strong__'), '_<em>strong</em>_')
self.assertEqual(markdown('\_\_strong__'), '__strong__')
|
bansallab/asnr | refs/heads/master | Python_files/community.py | 1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This module implements community detection.
"""
__all__ = ["partition_at_level", "modularity", "best_partition", "generate_dendogram", "induced_graph"]
__author__ = """Thomas Aynaud (thomas.aynaud@lip6.fr)"""
# Copyright (C) 2009 by
# Thomas Aynaud <thomas.aynaud@lip6.fr>
# All rights reserved.
# BSD license.
__PASS_MAX = -1
__MIN = 0.0000001
import networkx as nx
import sys
import types
import array
#import find_community_structure
import csv
import random
#from PYTHON_CODE import general_tools
#import find_community_structure
def partition_at_level(dendogram, level) :
"""Return the partition of the nodes at the given level
A dendogram is a tree and each level is a partition of the graph nodes.
Level 0 is the first partition, which contains the smallest communities, and the best is len(dendogram) - 1.
The higher the level is, the bigger are the communities
Parameters
----------
dendogram : list of dict
a list of partitions, ie dictionnaries where keys of the i+1 are the values of the i.
level : int
the level which belongs to [0..len(dendogram)-1]
Returns
-------
partition : dictionnary
A dictionary where keys are the nodes and the values are the set it belongs to
Raises
------
KeyError
If the dendogram is not well formed or the level is too high
See Also
--------
best_partition which directly combines partition_at_level and generate_dendogram to obtain the partition of highest modularity
Examples
--------
>>> G=nx.erdos_renyi_graph(100, 0.01)
>>> dendo = generate_dendogram(G)
>>> for level in range(len(dendo) - 1) :
>>> print "partition at level", level, "is", partition_at_level(dendo, level)
"""
partition = dendogram[0].copy()
for index in range(1, level + 1) :
for node, community in partition.iteritems() :
partition[node] = dendogram[index][community]
return partition
def modularity(partition, graph) :
"""Compute the modularity of a partition of a graph
Parameters
----------
partition : dict
the partition of the nodes, i.e a dictionary where keys are their nodes and values the communities
graph : networkx.Graph
the networkx graph which is decomposed
Returns
-------
modularity : float
The modularity
Raises
------
KeyError
If the partition is not a partition of all graph nodes
ValueError
If the graph has no link
TypeError
If graph is not a networkx.Graph
References
----------
.. 1. Newman, M.E.J. & Girvan, M. Finding and evaluating community structure in networks. Physical Review E 69, 26113(2004).
Examples
--------
>>> G=nx.erdos_renyi_graph(100, 0.01)
>>> part = best_partition(G)
>>> modularity(part, G)
"""
if type(graph) != nx.Graph :
raise TypeError("Bad graph type, use only non directed graph")
inc = dict([])
deg = dict([])
links = graph.size(weight='weight')
if links == 0 :
raise ValueError("A graph without links has an undefined modularity")
for node in graph :
com = partition[node]
deg[com] = deg.get(com, 0.) + graph.degree(node, weight = 'weight')
for neighbor, datas in graph[node].iteritems() :
weight = datas.get("weight", 1)
if partition[neighbor] == com :
if neighbor == node :
inc[com] = inc.get(com, 0.) + float(weight)
else :
inc[com] = inc.get(com, 0.) + float(weight) / 2.
res = 0.
for com in set(partition.values()) :
res += (inc.get(com, 0.) / links) - (deg.get(com, 0.) / (2.*links))**2
return res
def best_partition(graph, part_first=None, partition = None) :
"""Compute the partition of the graph nodes which maximises the modularity
(or try..) using the Louvain heuristices
This is the partition of highest modularity, i.e. the highest partition of the dendogram
generated by the Louvain algorithm.
Parameters
----------
graph : networkx.Graph
the networkx graph which is decomposed
partition : dict, optionnal
the algorithm will start using this partition of the nodes. It's a dictionary where keys are their nodes and values the communities
Returns
-------
partition : dictionnary
The partition, with communities numbered from 0 to number of communities
Raises
------
NetworkXError
If the graph is not Eulerian.
See Also
--------
generate_dendogram to obtain all the decompositions levels
Notes
-----
Uses Louvain algorithm
References
----------
.. 1. Blondel, V.D. et al. Fast unfolding of communities in large networks. J. Stat. Mech 10008, 1-12(2008).
Examples
--------
>>> #Basic usage
>>> G=nx.erdos_renyi_graph(100, 0.01)
>>> part = best_partition(G)
>>> #other example to display a graph with its community :
>>> #better with karate_graph() as defined in networkx examples
>>> #erdos renyi don't have true community structure
>>> G = nx.erdos_renyi_graph(30, 0.05)
>>> #first compute the best partition
>>> partition = community.best_partition(G)
>>> #drawing
>>> size = float(len(set(partition.values())))
>>> pos = nx.spring_layout(G)
>>> count = 0.
>>> for com in set(partition.values()) :
>>> count = count + 1.
>>> list_nodes = [nodes for nodes in partition.keys()
>>> if partition[nodes] == com]
>>> nx.draw_networkx_nodes(G, pos, list_nodes, node_size = 20,
node_color = str(count / size))
>>> nx.draw_networkx_edges(G,pos, alpha=0.5)
>>> plt.show()
"""
dendo = generate_dendogram(graph, part_first)
return partition_at_level(dendo, len(dendo) - 1 )
def generate_dendogram(graph, part_first=None, part_init = None) :
"""Find communities in the graph and return the associated dendogram
A dendogram is a tree and each level is a partition of the graph nodes. Level 0 is the first partition, which contains the smallest communities, and the best is len(dendogram) - 1. The higher the level is, the bigger are the communities
Parameters
----------
graph : networkx.Graph
the networkx graph which will be decomposed
part_init : dict, optionnal
the algorithm will start using this partition of the nodes. It's a dictionary where keys are their nodes and values the communities
Returns
-------
dendogram : list of dictionaries
a list of partitions, ie dictionnaries where keys of the i+1 are the values of the i. and where keys of the first are the nodes of graph
Raises
------
TypeError
If the graph is not a networkx.Graph
See Also
--------
best_partition
Notes
-----
Uses Louvain algorithm
References
----------
.. 1. Blondel, V.D. et al. Fast unfolding of communities in large networks. J. Stat. Mech 10008, 1-12(2008).
Examples
--------
>>> G=nx.erdos_renyi_graph(100, 0.01)
>>> dendo = generate_dendogram(G)
>>> for level in range(len(dendo) - 1) :
>>> print "partition at level", level, "is", partition_at_level(dendo, level)
"""
if type(graph) != nx.Graph :
raise TypeError("Bad graph type, use only non directed graph")
current_graph = graph.copy()
status = Status()
status.init(current_graph, part_init)
mod = __modularity(status)
status_list = list()
__one_level(current_graph, status)
new_mod = __modularity(status)
partition = __renumber(status.node2com)
status_list.append(partition)
mod = new_mod
current_graph = induced_graph(partition, current_graph)
status.init(current_graph)
while True :
__one_level(current_graph, status)
new_mod = __modularity(status)
if new_mod - mod < __MIN :
break
partition = __renumber(status.node2com)
status_list.append(partition)
mod = new_mod
current_graph = induced_graph(partition, current_graph)
status.init(current_graph)
return status_list[:]
def induced_graph(partition, graph) :
"""Produce the graph where nodes are the communities
there is a link of weight w between communities if the sum of the weights of the links between their elements is w
Parameters
----------
partition : dict
a dictionary where keys are graph nodes and values the part the node belongs to
graph : networkx.Graph
the initial graph
Returns
-------
g : networkx.Graph
a networkx graph where nodes are the parts
Examples
--------
>>> n = 5
>>> g = nx.complete_graph(2*n)
>>> part = dict([])
>>> for node in g.nodes() :
>>> part[node] = node % 2
>>> ind = induced_graph(part, g)
>>> goal = nx.Graph()
>>> goal.add_weighted_edges_from([(0,1,n*n),(0,0,n*(n-1)/2), (1, 1, n*(n-1)/2)])
>>> nx.is_isomorphic(int, goal)
True
"""
ret = nx.Graph()
ret.add_nodes_from(partition.values())
for node1, node2, datas in graph.edges(data = True) :
weight = datas.get("weight", 1)
com1 = partition[node1]
com2 = partition[node2]
w_prec = ret.get_edge_data(com1, com2, {"weight":0}).get("weight", 1)
ret.add_edge(com1, com2, weight = w_prec + weight)
return ret
def __renumber(dictionary) :
"""Renumber the values of the dictionary from 0 to n
"""
count = 0
ret = dictionary.copy()
new_values = dict([])
for key in dictionary.keys() :
value = dictionary[key]
new_value = new_values.get(value, -1)
if new_value == -1 :
new_values[value] = count
new_value = count
count = count + 1
ret[key] = new_value
return ret
def __load_binary(data) :
"""Load binary graph as used by the cpp implementation of this algorithm
"""
if type(data) == types.StringType :
data = open(data, "rb")
reader = array.array("I")
reader.fromfile(data, 1)
num_nodes = reader.pop()
reader = array.array("I")
reader.fromfile(data, num_nodes)
cum_deg = reader.tolist()
num_links = reader.pop()
reader = array.array("I")
reader.fromfile(data, num_links)
links = reader.tolist()
graph = nx.Graph()
graph.add_nodes_from(range(num_nodes))
prec_deg = 0
for index in range(num_nodes) :
last_deg = cum_deg[index]
neighbors = links[prec_deg:last_deg]
graph.add_edges_from([(index, int(neigh)) for neigh in neighbors])
prec_deg = last_deg
return graph
def __one_level(graph, status) :
"""Compute one level of communities
"""
random.seed()
modif = True
nb_pass_done = 0
cur_mod = __modularity(status)
new_mod = cur_mod
while modif and nb_pass_done != __PASS_MAX :
cur_mod = new_mod
modif = False
nb_pass_done += 1
for node in graph.nodes() :
com_node = status.node2com[node]
degc_totw = status.gdegrees.get(node, 0.) / (status.total_weight*2.)
neigh_communities = __neighcom(node, graph, status)
__remove(node, com_node,
neigh_communities.get(com_node, 0.), status)
best_com = [com_node] #SB changed from best_com = com_node
best_increase = 0
sc_best = 0
for com, dnc in neigh_communities.iteritems() :
incr = dnc - status.degrees.get(com, 0.) * degc_totw
new_mod = __modularity(status)
if incr > best_increase:
best_increase = incr
best_com = [com]
# SB added sept 5 2012 (original was just picking first instance of incr > best_increase)
elif incr == best_increase:
best_com.append(com)
best_com = random.choice(best_com) # sb added
__insert(node, best_com,
neigh_communities.get(best_com, 0.), status)
if best_com != com_node :
modif = True
# compute spatial cohesiveness after every move to see if spatial cohesiveness increasing
#new_mod = __modularity(status)
#partition = __renumber(status.node2com)
#(sc,Gnn) = find_community_structure.measure_spatial_cohesiveness_shweta(graph, partition,"county")
#print new_mod, sc
new_mod = __modularity(status)
if new_mod - cur_mod < __MIN :
break
def __one_level_sb(graph, status, part_first) :
"""Compute one level of communities
"""
# computes one level of communities with the additional constraint of it being different than original partition
# COALA method of Bae and Bailey. Couldn't get the code to work...
random.seed()
if part_first is None:
__one_level(graph,status)
return
else:
modif = True
nb_pass_done = 0
cur_mod = __modularity(status)
new_mod = cur_mod
while modif and nb_pass_done != __PASS_MAX :
cur_mod = new_mod
modif = False
nb_pass_done += 1
for node in graph.nodes() :
com_node = status.node2com[node]
degc_totw = status.gdegrees.get(node, 0.) / (status.total_weight*2.)
neigh_communities = __neighcom(node, graph, status)
__remove(node, com_node,
neigh_communities.get(com_node, 0.), status)
best_com = com_node
best_increase = 0
# for com, dnc in neigh_communities.iteritems() :
incr = [(dnc - status.degrees.get(com, 0.) * degc_totw) for com, dnc in neigh_communities.iteritems()]
com = [c for c,d in neigh_communities.iteritems()]
com_incr = dict(zip(com, incr))
#if len(com_incr) > 0:
#print node, com_incr
# max_com = [key for key,val in com_incr.iteritems() if val == max(com_incr.values())][0]# converts from list to num
# max_incr = [val for key,val in com_incr.iteritems() if val == max(com_incr.values())][0]
# if max_incr > best_increase:
# best_com = max_com
# com_incr2 = general_tools.remove_from_dict_key(com_incr,max_com)
# if len(com_incr2)>0:
# second_com = [key for key,val in com_incr2.iteritems() if val == max(com_incr2.values())][0]
# second_incr = [val for key,val in com_incr2.iteritems() if val == max(com_incr2.values())][0]
#com_incr2 = general_tools.remove_from_dict_key(com_incr2,second_com)
# best_com = second_com
#print best_com, max_com, second_com
#print max_incr/(1.0*second_incr)
__insert(node, best_com, neigh_communities.get(best_com, 0.), status)
if best_com != com_node :
modif = True
# compute spatial cohesiveness after every move
#new_mod = __modularity(status)
#partition = __renumber(status.node2com)
#(sc,Gnn) = find_community_structure.measure_spatial_cohesiveness_shweta(graph, partition,"county")
#print new_mod, sc
new_mod = __modularity(status)
if new_mod - cur_mod < __MIN :
break
class Status :
"""
To handle several data in one struct.
Could be replaced by named tuple, but don't want to depend on python 2.6
"""
node2com = {}
total_weight = 0
internals = {}
degrees = {}
gdegrees = {}
def __init__(self) :
self.node2com = dict([])
self.total_weight = 0
self.degrees = dict([])
self.gdegrees = dict([])
self.internals = dict([])
self.loops = dict([])
def __str__(self) :
return ("node2com : " + str(self.node2com) + " degrees : "
+ str(self.degrees) + " internals : " + str(self.internals)
+ " total_weight : " + str(self.total_weight))
def copy(self) :
"""Perform a deep copy of status"""
new_status = Status()
new_status.node2com = self.node2com.copy()
new_status.internals = self.internals.copy()
new_status.degrees = self.degrees.copy()
new_status.gdegrees = self.gdegrees.copy()
new_status.total_weight = self.total_weight
def init(self, graph, part = None) :
"""Initialize the status of a graph with every node in one community"""
count = 0
self.node2com = dict([])
self.total_weight = 0
self.degrees = dict([])
self.gdegrees = dict([])
self.internals = dict([])
self.total_weight = graph.size(weight = 'weight')
if part == None :
for node in graph.nodes() :
self.node2com[node] = count
deg = float(graph.degree(node, weight = 'weight'))
self.degrees[count] = deg
self.gdegrees[node] = deg
self.loops[node] = float(graph.get_edge_data(node, node,
{"weight":0}).get("weight", 1))
self.internals[count] = self.loops[node]
count = count + 1
else :
for node in graph.nodes() :
com = part[node]
self.node2com[node] = com
deg = float(graph.degree(node, weight = 'weight'))
self.degrees[com] = self.degrees.get(com, 0) + deg
self.gdegrees[node] = deg
inc = 0.
for neighbor, datas in graph[node].iteritems() :
weight = datas.get("weight", 1)
if part[neighbor] == com :
if neighbor == node :
inc += float(weight)
else :
inc += float(weight) / 2.
self.internals[com] = self.internals.get(com, 0) + inc
def __neighcom(node, graph, status) :
"""
Compute the communities in the neighborood of node in the graph given
with the decomposition node2com
"""
weights = {}
for neighbor, datas in graph[node].iteritems() :
if neighbor != node :
weight = datas.get("weight", 1)
neighborcom = status.node2com[neighbor]
weights[neighborcom] = weights.get(neighborcom, 0) + weight
return weights
def __remove(node, com, weight, status) :
""" Remove node from community com and modify status"""
status.degrees[com] = ( status.degrees.get(com, 0.)
- status.gdegrees.get(node, 0.) )
status.internals[com] = float( status.internals.get(com, 0.) -
weight - status.loops.get(node, 0.) )
status.node2com[node] = -1
def __insert(node, com, weight, status) :
""" Insert node into community and modify status"""
status.node2com[node] = com
status.degrees[com] = ( status.degrees.get(com, 0.) +
status.gdegrees.get(node, 0.) )
status.internals[com] = float( status.internals.get(com, 0.) +
weight + status.loops.get(node, 0.) )
def __modularity(status) :
"""
Compute the modularity of the partition of the graph faslty using status precomputed
"""
links = float(status.total_weight)
result = 0.
for community in set(status.node2com.values()) :
in_degree = status.internals.get(community, 0.)
degree = status.degrees.get(community, 0.)
if links > 0 :
result = result + in_degree / links - ((degree / (2.*links))**2)
return result
def __main() :
"""Main function to mimic C++ version behavior"""
try :
filename = sys.argv[1]
graphfile = __load_binary(filename)
partition = best_partition(graphfile, {}, {})
print >> sys.stderr, str(modularity(partition, graphfile))
for elem, part in partition.iteritems() :
print str(elem) + " " + str(part)
except (IndexError, IOError):
print "Usage : ./community filename"
print "find the communities in graph filename and display the dendogram"
print "Parameters:"
print "filename is a binary file as generated by the "
print "convert utility distributed with the C implementation"
if __name__ == "__main__" :
__main()
|
DocuSignDev/phantomjs | refs/heads/master | src/breakpad/src/tools/gyp/pylib/gyp/SCons.py | 137 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
SCons generator.
This contains class definitions and supporting functions for generating
pieces of SCons files for the different types of GYP targets.
"""
import os
def WriteList(fp, list, prefix='',
separator=',\n ',
preamble=None,
postamble=None):
fp.write(preamble or '')
fp.write((separator or ' ').join([prefix + l for l in list]))
fp.write(postamble or '')
class TargetBase(object):
"""
Base class for a SCons representation of a GYP target.
"""
is_ignored = False
target_prefix = ''
target_suffix = ''
def __init__(self, spec):
self.spec = spec
def full_product_name(self):
"""
Returns the full name of the product being built:
* Uses 'product_name' if it's set, else 'target_name'.
* Appends SCons prefix and suffix variables for the target type.
* Prepends 'product_dir' if set.
"""
name = self.spec.get('product_name') or self.spec['target_name']
name = self.target_prefix + name + self.target_suffix
product_dir = self.spec.get('product_dir')
if product_dir:
name = os.path.join(product_dir, name)
return name
def write_input_files(self, fp):
"""
Writes the definition of the input files (sources).
"""
sources = self.spec.get('sources')
if not sources:
fp.write('\ninput_files = []\n')
return
preamble = '\ninput_files = [\n '
postamble = ',\n]\n'
WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble)
def builder_call(self):
"""
Returns the actual SCons builder call to build this target.
"""
name = self.full_product_name()
return 'env.%s(%r, input_files)' % (self.builder_name, name)
def write_target(self, fp, src_dir='', pre=''):
"""
Writes the lines necessary to build this target.
"""
fp.write('\n' + pre)
fp.write('_outputs = %s\n' % self.builder_call())
fp.write('target_files.extend(_outputs)\n')
class NoneTarget(TargetBase):
"""
A GYP target type of 'none', implicitly or explicitly.
"""
def write_target(self, fp, pre=''):
fp.write('\ntarget_files.extend(input_files)\n')
class SettingsTarget(TargetBase):
"""
A GYP target type of 'settings'.
"""
is_ignored = True
compilable_sources_template = """
_result = []
for infile in input_files:
if env.compilable(infile):
if (type(infile) == type('')
and (infile.startswith(%(src_dir)r)
or not os.path.isabs(env.subst(infile)))):
# Force files below the build directory by replacing all '..'
# elements in the path with '__':
base, ext = os.path.splitext(os.path.normpath(infile))
base = [d == '..' and '__' or d for d in base.split('/')]
base = os.path.join(*base)
object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base
if not infile.startswith(%(src_dir)r):
infile = %(src_dir)r + infile
infile = env.%(name)s(object, infile)[0]
else:
infile = env.%(name)s(infile)[0]
_result.append(infile)
input_files = _result
"""
class CompilableSourcesTargetBase(TargetBase):
"""
An abstract base class for targets that compile their source files.
We explicitly transform compilable files into object files,
even though SCons could infer that for us, because we want
to control where the object file ends up. (The implicit rules
in SCons always put the object file next to the source file.)
"""
intermediate_builder_name = None
def write_target(self, fp, src_dir='', pre=''):
if self.intermediate_builder_name is None:
raise NotImplementedError
if src_dir and not src_dir.endswith('/'):
src_dir += '/'
variables = {
'src_dir': src_dir,
'name': self.intermediate_builder_name,
}
fp.write(compilable_sources_template % variables)
super(CompilableSourcesTargetBase, self).write_target(fp)
class ProgramTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'executable'.
"""
builder_name = 'GypProgram'
intermediate_builder_name = 'StaticObject'
target_prefix = '${PROGPREFIX}'
target_suffix = '${PROGSUFFIX}'
# TODO: remove these subclass methods by moving the env.File()
# into the base class.
def write_target(self, fp, src_dir='', pre=''):
fp.write('\n_program = env.File(%r)' % self.full_product_name())
super(ProgramTarget, self).write_target(fp, src_dir, pre)
def builder_call(self):
return 'env.GypProgram(_program, input_files)'
class StaticLibraryTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'static_library'.
"""
builder_name = 'GypStaticLibrary'
intermediate_builder_name = 'StaticObject'
# TODO: enable these
#target_prefix = '${LIBPREFIX}'
#target_suffix = '${LIBSUFFIX}'
class SharedLibraryTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'shared_library'.
"""
builder_name = 'GypSharedLibrary'
intermediate_builder_name = 'SharedObject'
# TODO: enable these
#target_prefix = '${SHLIBPREFIX}'
#target_suffix = '${SHLIBSUFFIX}'
class LoadableModuleTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'loadable_module'.
"""
builder_name = 'GypLoadableModule'
intermediate_builder_name = 'SharedObject'
# TODO: enable these
#target_prefix = '${SHLIBPREFIX}'
#target_suffix = '${SHLIBSUFFIX}'
TargetMap = {
None : NoneTarget,
'none' : NoneTarget,
'settings' : SettingsTarget,
'executable' : ProgramTarget,
'static_library' : StaticLibraryTarget,
'shared_library' : SharedLibraryTarget,
'loadable_module' : LoadableModuleTarget,
}
def Target(spec):
return TargetMap[spec.get('type')](spec)
|
Martiusweb/asyncio | refs/heads/master | asyncio/events.py | 2 | """Event loop and event loop policy."""
__all__ = ['AbstractEventLoopPolicy',
'AbstractEventLoop', 'AbstractServer',
'Handle', 'TimerHandle',
'get_event_loop_policy', 'set_event_loop_policy',
'get_event_loop', 'set_event_loop', 'new_event_loop',
'get_child_watcher', 'set_child_watcher',
]
import functools
import inspect
import reprlib
import socket
import subprocess
import sys
import threading
import traceback
from asyncio import compat
def _get_function_source(func):
if compat.PY34:
func = inspect.unwrap(func)
elif hasattr(func, '__wrapped__'):
func = func.__wrapped__
if inspect.isfunction(func):
code = func.__code__
return (code.co_filename, code.co_firstlineno)
if isinstance(func, functools.partial):
return _get_function_source(func.func)
if compat.PY34 and isinstance(func, functools.partialmethod):
return _get_function_source(func.func)
return None
def _format_args_and_kwargs(args, kwargs):
"""Format function arguments and keyword arguments.
Special case for a single parameter: ('hello',) is formatted as ('hello').
"""
# use reprlib to limit the length of the output
items = []
if args:
items.extend(reprlib.repr(arg) for arg in args)
if kwargs:
items.extend('{}={}'.format(k, reprlib.repr(v))
for k, v in kwargs.items())
return '(' + ', '.join(items) + ')'
def _format_callback(func, args, kwargs, suffix=''):
if isinstance(func, functools.partial):
suffix = _format_args_and_kwargs(args, kwargs) + suffix
return _format_callback(func.func, func.args, func.keywords, suffix)
if hasattr(func, '__qualname__'):
func_repr = getattr(func, '__qualname__')
elif hasattr(func, '__name__'):
func_repr = getattr(func, '__name__')
else:
func_repr = repr(func)
func_repr += _format_args_and_kwargs(args, kwargs)
if suffix:
func_repr += suffix
return func_repr
def _format_callback_source(func, args):
func_repr = _format_callback(func, args, None)
source = _get_function_source(func)
if source:
func_repr += ' at %s:%s' % source
return func_repr
class Handle:
"""Object returned by callback registration methods."""
__slots__ = ('_callback', '_args', '_cancelled', '_loop',
'_source_traceback', '_repr', '__weakref__')
def __init__(self, callback, args, loop):
assert not isinstance(callback, Handle), 'A Handle is not a callback'
self._loop = loop
self._callback = callback
self._args = args
self._cancelled = False
self._repr = None
if self._loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
else:
self._source_traceback = None
def _repr_info(self):
info = [self.__class__.__name__]
if self._cancelled:
info.append('cancelled')
if self._callback is not None:
info.append(_format_callback_source(self._callback, self._args))
if self._source_traceback:
frame = self._source_traceback[-1]
info.append('created at %s:%s' % (frame[0], frame[1]))
return info
def __repr__(self):
if self._repr is not None:
return self._repr
info = self._repr_info()
return '<%s>' % ' '.join(info)
def cancel(self):
if not self._cancelled:
self._cancelled = True
if self._loop.get_debug():
# Keep a representation in debug mode to keep callback and
# parameters. For example, to log the warning
# "Executing <Handle...> took 2.5 second"
self._repr = repr(self)
self._callback = None
self._args = None
def _run(self):
try:
self._callback(*self._args)
except Exception as exc:
cb = _format_callback_source(self._callback, self._args)
msg = 'Exception in callback {}'.format(cb)
context = {
'message': msg,
'exception': exc,
'handle': self,
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
self = None # Needed to break cycles when an exception occurs.
class TimerHandle(Handle):
"""Object returned by timed callback registration methods."""
__slots__ = ['_scheduled', '_when']
def __init__(self, when, callback, args, loop):
assert when is not None
super().__init__(callback, args, loop)
if self._source_traceback:
del self._source_traceback[-1]
self._when = when
self._scheduled = False
def _repr_info(self):
info = super()._repr_info()
pos = 2 if self._cancelled else 1
info.insert(pos, 'when=%s' % self._when)
return info
def __hash__(self):
return hash(self._when)
def __lt__(self, other):
return self._when < other._when
def __le__(self, other):
if self._when < other._when:
return True
return self.__eq__(other)
def __gt__(self, other):
return self._when > other._when
def __ge__(self, other):
if self._when > other._when:
return True
return self.__eq__(other)
def __eq__(self, other):
if isinstance(other, TimerHandle):
return (self._when == other._when and
self._callback == other._callback and
self._args == other._args and
self._cancelled == other._cancelled)
return NotImplemented
def __ne__(self, other):
equal = self.__eq__(other)
return NotImplemented if equal is NotImplemented else not equal
def cancel(self):
if not self._cancelled:
self._loop._timer_handle_cancelled(self)
super().cancel()
class AbstractServer:
"""Abstract server returned by create_server()."""
def close(self):
"""Stop serving. This leaves existing connections open."""
return NotImplemented
def wait_closed(self):
"""Coroutine to wait until service is closed."""
return NotImplemented
class AbstractEventLoop:
"""Abstract event loop."""
# Running and stopping the event loop.
def run_forever(self):
"""Run the event loop until stop() is called."""
raise NotImplementedError
def run_until_complete(self, future):
"""Run the event loop until a Future is done.
Return the Future's result, or raise its exception.
"""
raise NotImplementedError
def stop(self):
"""Stop the event loop as soon as reasonable.
Exactly how soon that is may depend on the implementation, but
no more I/O callbacks should be scheduled.
"""
raise NotImplementedError
def is_running(self):
"""Return whether the event loop is currently running."""
raise NotImplementedError
def is_closed(self):
"""Returns True if the event loop was closed."""
raise NotImplementedError
def close(self):
"""Close the loop.
The loop should not be running.
This is idempotent and irreversible.
No other methods should be called after this one.
"""
raise NotImplementedError
def shutdown_asyncgens(self):
"""Shutdown all active asynchronous generators."""
raise NotImplementedError
# Methods scheduling callbacks. All these return Handles.
def _timer_handle_cancelled(self, handle):
"""Notification that a TimerHandle has been cancelled."""
raise NotImplementedError
def call_soon(self, callback, *args):
return self.call_later(0, callback, *args)
def call_later(self, delay, callback, *args):
raise NotImplementedError
def call_at(self, when, callback, *args):
raise NotImplementedError
def time(self):
raise NotImplementedError
def create_future(self):
raise NotImplementedError
# Method scheduling a coroutine object: create a task.
def create_task(self, coro):
raise NotImplementedError
# Methods for interacting with threads.
def call_soon_threadsafe(self, callback, *args):
raise NotImplementedError
def run_in_executor(self, executor, func, *args):
raise NotImplementedError
def set_default_executor(self, executor):
raise NotImplementedError
# Network I/O methods returning Futures.
def getaddrinfo(self, host, port, *, family=0, type=0, proto=0, flags=0):
raise NotImplementedError
def getnameinfo(self, sockaddr, flags=0):
raise NotImplementedError
def create_connection(self, protocol_factory, host=None, port=None, *,
ssl=None, family=0, proto=0, flags=0, sock=None,
local_addr=None, server_hostname=None):
raise NotImplementedError
def create_server(self, protocol_factory, host=None, port=None, *,
family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE,
sock=None, backlog=100, ssl=None, reuse_address=None,
reuse_port=None):
"""A coroutine which creates a TCP server bound to host and port.
The return value is a Server object which can be used to stop
the service.
If host is an empty string or None all interfaces are assumed
and a list of multiple sockets will be returned (most likely
one for IPv4 and another one for IPv6). The host parameter can also be a
sequence (e.g. list) of hosts to bind to.
family can be set to either AF_INET or AF_INET6 to force the
socket to use IPv4 or IPv6. If not set it will be determined
from host (defaults to AF_UNSPEC).
flags is a bitmask for getaddrinfo().
sock can optionally be specified in order to use a preexisting
socket object.
backlog is the maximum number of queued connections passed to
listen() (defaults to 100).
ssl can be set to an SSLContext to enable SSL over the
accepted connections.
reuse_address tells the kernel to reuse a local socket in
TIME_WAIT state, without waiting for its natural timeout to
expire. If not specified will automatically be set to True on
UNIX.
reuse_port tells the kernel to allow this endpoint to be bound to
the same port as other existing endpoints are bound to, so long as
they all set this flag when being created. This option is not
supported on Windows.
"""
raise NotImplementedError
def create_unix_connection(self, protocol_factory, path, *,
ssl=None, sock=None,
server_hostname=None):
raise NotImplementedError
def create_unix_server(self, protocol_factory, path, *,
sock=None, backlog=100, ssl=None):
"""A coroutine which creates a UNIX Domain Socket server.
The return value is a Server object, which can be used to stop
the service.
path is a str, representing a file systsem path to bind the
server socket to.
sock can optionally be specified in order to use a preexisting
socket object.
backlog is the maximum number of queued connections passed to
listen() (defaults to 100).
ssl can be set to an SSLContext to enable SSL over the
accepted connections.
"""
raise NotImplementedError
def create_datagram_endpoint(self, protocol_factory,
local_addr=None, remote_addr=None, *,
family=0, proto=0, flags=0,
reuse_address=None, reuse_port=None,
allow_broadcast=None, sock=None):
"""A coroutine which creates a datagram endpoint.
This method will try to establish the endpoint in the background.
When successful, the coroutine returns a (transport, protocol) pair.
protocol_factory must be a callable returning a protocol instance.
socket family AF_INET or socket.AF_INET6 depending on host (or
family if specified), socket type SOCK_DGRAM.
reuse_address tells the kernel to reuse a local socket in
TIME_WAIT state, without waiting for its natural timeout to
expire. If not specified it will automatically be set to True on
UNIX.
reuse_port tells the kernel to allow this endpoint to be bound to
the same port as other existing endpoints are bound to, so long as
they all set this flag when being created. This option is not
supported on Windows and some UNIX's. If the
:py:data:`~socket.SO_REUSEPORT` constant is not defined then this
capability is unsupported.
allow_broadcast tells the kernel to allow this endpoint to send
messages to the broadcast address.
sock can optionally be specified in order to use a preexisting
socket object.
"""
raise NotImplementedError
# Pipes and subprocesses.
def connect_read_pipe(self, protocol_factory, pipe):
"""Register read pipe in event loop. Set the pipe to non-blocking mode.
protocol_factory should instantiate object with Protocol interface.
pipe is a file-like object.
Return pair (transport, protocol), where transport supports the
ReadTransport interface."""
# The reason to accept file-like object instead of just file descriptor
# is: we need to own pipe and close it at transport finishing
# Can got complicated errors if pass f.fileno(),
# close fd in pipe transport then close f and vise versa.
raise NotImplementedError
def connect_write_pipe(self, protocol_factory, pipe):
"""Register write pipe in event loop.
protocol_factory should instantiate object with BaseProtocol interface.
Pipe is file-like object already switched to nonblocking.
Return pair (transport, protocol), where transport support
WriteTransport interface."""
# The reason to accept file-like object instead of just file descriptor
# is: we need to own pipe and close it at transport finishing
# Can got complicated errors if pass f.fileno(),
# close fd in pipe transport then close f and vise versa.
raise NotImplementedError
def subprocess_shell(self, protocol_factory, cmd, *, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
**kwargs):
raise NotImplementedError
def subprocess_exec(self, protocol_factory, *args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
**kwargs):
raise NotImplementedError
# Ready-based callback registration methods.
# The add_*() methods return None.
# The remove_*() methods return True if something was removed,
# False if there was nothing to delete.
def add_reader(self, fd, callback, *args):
raise NotImplementedError
def remove_reader(self, fd):
raise NotImplementedError
def add_writer(self, fd, callback, *args):
raise NotImplementedError
def remove_writer(self, fd):
raise NotImplementedError
# Completion based I/O methods returning Futures.
def sock_recv(self, sock, nbytes):
raise NotImplementedError
def sock_sendall(self, sock, data):
raise NotImplementedError
def sock_connect(self, sock, address):
raise NotImplementedError
def sock_accept(self, sock):
raise NotImplementedError
# Signal handling.
def add_signal_handler(self, sig, callback, *args):
raise NotImplementedError
def remove_signal_handler(self, sig):
raise NotImplementedError
# Task factory.
def set_task_factory(self, factory):
raise NotImplementedError
def get_task_factory(self):
raise NotImplementedError
# Error handlers.
def get_exception_handler(self):
raise NotImplementedError
def set_exception_handler(self, handler):
raise NotImplementedError
def default_exception_handler(self, context):
raise NotImplementedError
def call_exception_handler(self, context):
raise NotImplementedError
# Debug flag management.
def get_debug(self):
raise NotImplementedError
def set_debug(self, enabled):
raise NotImplementedError
class AbstractEventLoopPolicy:
"""Abstract policy for accessing the event loop."""
def get_event_loop(self):
"""Get the event loop for the current context.
Returns an event loop object implementing the BaseEventLoop interface,
or raises an exception in case no event loop has been set for the
current context and the current policy does not specify to create one.
It should never return None."""
raise NotImplementedError
def set_event_loop(self, loop):
"""Set the event loop for the current context to loop."""
raise NotImplementedError
def new_event_loop(self):
"""Create and return a new event loop object according to this
policy's rules. If there's need to set this loop as the event loop for
the current context, set_event_loop must be called explicitly."""
raise NotImplementedError
# Child processes handling (Unix only).
def get_child_watcher(self):
"Get the watcher for child processes."
raise NotImplementedError
def set_child_watcher(self, watcher):
"""Set the watcher for child processes."""
raise NotImplementedError
class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy):
"""Default policy implementation for accessing the event loop.
In this policy, each thread has its own event loop. However, we
only automatically create an event loop by default for the main
thread; other threads by default have no event loop.
Other policies may have different rules (e.g. a single global
event loop, or automatically creating an event loop per thread, or
using some other notion of context to which an event loop is
associated).
"""
_loop_factory = None
class _Local(threading.local):
_loop = None
_set_called = False
def __init__(self):
self._local = self._Local()
def get_event_loop(self):
"""Get the event loop.
This may be None or an instance of EventLoop.
"""
if (self._local._loop is None and
not self._local._set_called and
isinstance(threading.current_thread(), threading._MainThread)):
self.set_event_loop(self.new_event_loop())
if self._local._loop is None:
raise RuntimeError('There is no current event loop in thread %r.'
% threading.current_thread().name)
return self._local._loop
def set_event_loop(self, loop):
"""Set the event loop."""
self._local._set_called = True
assert loop is None or isinstance(loop, AbstractEventLoop)
self._local._loop = loop
def new_event_loop(self):
"""Create a new event loop.
You must call set_event_loop() to make this the current event
loop.
"""
return self._loop_factory()
# Event loop policy. The policy itself is always global, even if the
# policy's rules say that there is an event loop per thread (or other
# notion of context). The default policy is installed by the first
# call to get_event_loop_policy().
_event_loop_policy = None
# Lock for protecting the on-the-fly creation of the event loop policy.
_lock = threading.Lock()
def _init_event_loop_policy():
global _event_loop_policy
with _lock:
if _event_loop_policy is None: # pragma: no branch
from . import DefaultEventLoopPolicy
_event_loop_policy = DefaultEventLoopPolicy()
def get_event_loop_policy():
"""Get the current event loop policy."""
if _event_loop_policy is None:
_init_event_loop_policy()
return _event_loop_policy
def set_event_loop_policy(policy):
"""Set the current event loop policy.
If policy is None, the default policy is restored."""
global _event_loop_policy
assert policy is None or isinstance(policy, AbstractEventLoopPolicy)
_event_loop_policy = policy
def get_event_loop():
"""Equivalent to calling get_event_loop_policy().get_event_loop()."""
return get_event_loop_policy().get_event_loop()
def set_event_loop(loop):
"""Equivalent to calling get_event_loop_policy().set_event_loop(loop)."""
get_event_loop_policy().set_event_loop(loop)
def new_event_loop():
"""Equivalent to calling get_event_loop_policy().new_event_loop()."""
return get_event_loop_policy().new_event_loop()
def get_child_watcher():
"""Equivalent to calling get_event_loop_policy().get_child_watcher()."""
return get_event_loop_policy().get_child_watcher()
def set_child_watcher(watcher):
"""Equivalent to calling
get_event_loop_policy().set_child_watcher(watcher)."""
return get_event_loop_policy().set_child_watcher(watcher)
|
kashif/chainer | refs/heads/master | chainer/functions/normalization/local_response_normalization.py | 9 | import numpy
import six
from chainer import cuda
from chainer import function
from chainer.utils import type_check
def _cu_conv_sum(y, x, n):
# Convolutional sum
# TODO(beam2d): Use scan computation
rdim = x.size // (x.shape[0] * x.shape[1])
cuda.elementwise(
'raw T x, int32 rdim, int32 N, int32 n_', 'raw T y',
'''
int half_n = n_ / 2;
int offset = i / rdim * N * rdim + i % rdim;
float sum_part = 0;
for (int j = 0; j < N + half_n; ++j) {
if (j < N) {
sum_part += x[offset + j * rdim];
}
if (j >= n_) {
sum_part -= x[offset + (j - n_) * rdim];
}
if (j >= half_n) {
y[offset + (j - half_n) * rdim] = sum_part;
}
}
''', 'lrn_conv_sum')(x, rdim, x.shape[1], n, y,
size=x.shape[0] * rdim)
class LocalResponseNormalization(function.Function):
"""Cross-channel normalization function used in AlexNet."""
def __init__(self, n=5, k=2, alpha=1e-4, beta=.75):
self.n = n
self.k = k
self.alpha = alpha
self.beta = beta
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
x_type, = in_types
type_check.expect(
x_type.dtype.kind == 'f',
x_type.ndim >= 2,
)
def forward_cpu(self, x):
half_n = self.n // 2
x2 = numpy.square(x[0])
sum_part = x2.copy()
for i in six.moves.range(1, half_n + 1):
sum_part[:, i:] += x2[:, :-i]
sum_part[:, :-i] += x2[:, i:]
self.unit_scale = self.k + self.alpha * sum_part
self.scale = self.unit_scale ** -self.beta
self.y = x[0] * self.scale
return self.y,
def backward_cpu(self, x, gy):
half_n = self.n // 2
summand = self.y * gy[0] / self.unit_scale
sum_part = summand.copy()
for i in six.moves.range(1, half_n + 1):
sum_part[:, i:] += summand[:, :-i]
sum_part[:, :-i] += summand[:, i:]
gx = gy[0] * self.scale - 2 * self.alpha * self.beta * x[0] * sum_part
return gx,
def forward_gpu(self, x):
self.y = cuda.cupy.square(x[0]) # temporary
self.scale = cuda.cupy.empty_like(self.y)
_cu_conv_sum(self.scale, self.y, self.n)
cuda.elementwise(
'T x, T k, T alpha, T beta',
'T y, T scale',
'''scale = k + alpha * scale;
y = x * pow(scale, -beta);''',
'lrn_fwd')(x[0], self.k, self.alpha, self.beta,
self.y, self.scale)
return self.y,
def backward_gpu(self, x, gy):
summand = cuda.elementwise(
'T scale, T y, T gy', 'T summand',
'summand = y * gy / scale',
'lrn_bwd_summand')(self.scale, self.y, gy[0])
gx = cuda.cupy.empty_like(x[0])
_cu_conv_sum(gx, summand, self.n)
cuda.elementwise(
' T x, T gy, T scale, T beta, T coeff', 'T gx',
'gx = pow(scale, -beta) * gy - coeff * x * gx',
'lrn_bwd')(x[0], gy[0], self.scale,
self.beta, 2 * self.alpha * self.beta, gx)
return gx,
def local_response_normalization(x, n=5, k=2, alpha=1e-4, beta=.75):
"""Local response normalization across neighboring channels.
This function implements normalization across channels. Let :math:`x` an
input image with :math:`N` channels. Then, this function computes an output
image :math:`y` by following formula:
.. math::
y_i = {x_i \\over \\left( k + \\
\\alpha \\sum_{j=\\max{1, i - n/2}}^{\\min{N, i + n/2}} \\
x_j^2 \\right)^\\beta}.
Args:
x (Variable): Input variable.
n (int): Normalization window width.
k (float): Smoothing parameter.
alpha (float): Normalizer scaling parameter.
beta (float): Normalizer power parameter.
Returns:
Variable: Output variable.
See: Section 3.3 of `ImageNet Classification with Deep Convolutional \\
Neural Networks <http://www.cs.toronto.edu/~fritz/absps/imagenet.pdf>`_
"""
return LocalResponseNormalization(n, k, alpha, beta)(x)
|
GarrettArm/TheDjangoBook | refs/heads/master | mysite_project/site_core/views.py | 1 | import datetime
from django.views.generic import TemplateView
from django.contrib.auth.views import LoginView, LogoutView
from django.views import generic
from django.urls import reverse_lazy
from .forms import CustomUserCreationForm
class FrontView(TemplateView):
template_name = "site_core/frontpage.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
description_text = [
"""This page is an example of a basic <a href=https://en.wikipedia.org/wiki/Web_template_system>template</a>. Templating lets you break the view up into things that change from page to page, and things that remain the same across the site. The sidebar, for example, never changes -- so it's hardcoded in the <a href=https://github.com/GarrettArm/TheDjangoBook/blob/master/mysite_project/templates/base.html>base template html</a>. But the title text, main block, etc. do change, and they are wrapped in blocks.""",
"""Google Domains manages the domain name, linking the gaularmstrong.com resource records to the Amazon EC2 elastic IP address.""",
"""Amazon EC2 hosts the ubuntu server. It allows ssh, etc. from only my IP address; it allows https from everyone else. Within that server, Uncomplicated Firewall (ufw) further limits permitted network traffic.""",
"""Let's Encrypt provides the https certification.""",
"""Nginx serves the static content (i.e., images, css, etc) and passes other requests to gunicorn. Gunicorn runs several copies of the Django appserver, like mod_php for apache. Django takes requests from gunicorn, processes them, and gives responses back to gunicorn. Django is connected to a sql database.""",
"""Because the box is small and has little traffic, there is little cost except the annual domain name registration & EC2 subscription.""",
]
context["description"] = description_text
return context
class CurrentDateView(TemplateView):
template_name = "dateapp/current_datetime.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
current_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
context["current_datestuff"] = current_time
description_text = [
"""This simple page shows how to insert a bit of information into the response.""",
"""When the Django app gets the request for this page, it runs <a href=https://github.com/GarrettArm/TheDjangoBook/blob/master/mysite_project/site_core/views.py>a little extra function "get context data"</a>: it looks up the current time and it adds that information into the context.""",
"""When the <a href=https://github.com/GarrettArm/TheDjangoBook/blob/master/mysite_project/templates/dateapp/current_datetime.html>template</a> gets handed the context and told to make some html, it puts the square peg into the square hole, etc, then sends the generated html to gunicorn as response html.""",
]
context["description"] = description_text
return context
class MyLogoutView(LogoutView):
next_page = "/"
class MyLoginView(LoginView):
pass
class SignUpView(generic.CreateView):
form_class = CustomUserCreationForm
success_url = reverse_lazy("login")
template_name = "site_core/signup.html"
|
USGSDenverPychron/pychron | refs/heads/develop | pychron/dashboard/__init__.py | 186 | __author__ = 'ross'
|
geekosphere/zgeist | refs/heads/master | zg/task/helpers.py | 1 | """
Helpers used for celery tasks.
"""
from __future__ import division, print_function, absolute_import, unicode_literals
from zg.util import config, to_bytes, format_bytes, mktemp
from urlparse import urlparse
from zg.app import celery
from mock import Mock
import re
import os
import os.path
import logging
logger = logging.getLogger('zg.task.helpers')
class TaskMapper(object):
def __init__(self):
self.patterns = []
self.domains = []
self.mimetypes = []
def by_pattern(self, pattern):
def decorator(func):
self.patterns.append((pattern, func))
return func
return decorator
def by_domain(self, domain):
def decorator(func):
self.domains.append((domain, func))
return func
return decorator
def by_mimetype(self, mimetype):
def decorator(func):
pattern = '^{}$'.format(mimetype.replace('*', '[^/]+'))
self.mimetypes.append((pattern, func))
return func
return decorator
def get_task_by_pattern(self, url):
return next((f for (p, f) in self.patterns if re.findall(p, url)), None)
def get_task_by_domain(self, url):
url_domain = urlparse(url).hostname
return next((f for (d, f) in self.domains if d == url_domain), None)
def get_task_by_mimetype(self, mimetype):
return next((f for (m, f) in self.mimetypes if re.findall(m, mimetype)), None)
def get_task_for_url(self, url):
return self.get_task_by_pattern(url) or self.get_task_by_domain(url)
def get_task_for_mimetype(self, mimetype):
return self.get_task_by_mimetype(mimetype)
class MockTask(object):
def __init__(self, name):
self.name = name
self.mock = Mock()
def __enter__(self):
self.old = celery.tasks[self.name]
celery.tasks[self.name] = self.mock
return self.mock
def __exit__(self, type, value, traceback):
celery.tasks[self.name] = self.old
|
darkleons/BE | refs/heads/master | addons/base_report_designer/wizard/base_report_designer_modify.py | 314 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
import time
import urllib
from openerp import osv, tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
class base_report_sxw(osv.osv_memory):
"""Base Report sxw """
_name = 'base.report.sxw'
_columns = {
'report_id': fields.many2one('ir.actions.report.xml', "Report", required=True,domain=[('report_sxw_content','<>',False)],),
}
def get_report(self, cr, uid, ids, context=None):
data = self.read(cr, uid, ids, context=context)[0]
data_obj = self.pool['ir.model.data']
id2 = data_obj._get_id(cr, uid, 'base_report_designer', 'view_base_report_file_sxw')
report = self.pool['ir.actions.report.xml'].browse(cr, uid, data['report_id'], context=context)
if id2:
id2 = data_obj.browse(cr, uid, id2, context=context).res_id
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'base.report.file.sxw',
'views': [(id2, 'form')],
'view_id': False,
'type': 'ir.actions.act_window',
'target': 'new',
}
class base_report_file_sxw(osv.osv_memory):
"""Base Report File sxw """
_name = 'base.report.file.sxw'
def default_get(self, cr, uid, fields, context=None):
"""
To get default values for the object.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param fields: List of fields for which we want default values
@param context: A standard dictionary
@return: A dictionary which of fields with values.
"""
res = super(base_report_file_sxw, self).default_get(cr, uid, fields, context=context)
report_id1 = self.pool['base.report.sxw'].search(cr,uid,[])
data = self.pool['base.report.sxw'].read(cr, uid, report_id1, context=context)[0]
report = self.pool['ir.actions.report.xml'].browse(cr, uid, data['report_id'], context=context)
if context is None:
context={}
if 'report_id' in fields:
res['report_id'] = data['report_id']
res['file_sxw'] = base64.encodestring(report.report_sxw_content)
return res
_columns = {
'report_id': fields.many2one('ir.actions.report.xml', "Report", readonly=True),
'file_sxw':fields.binary('Your .SXW file',readonly=True),
'file_sxw_upload':fields.binary('Your .SXW file',required=True)
}
def upload_report(self, cr, uid, ids, context=None):
from base_report_designer import openerp_sxw2rml
import StringIO
data=self.read(cr,uid,ids)[0]
sxwval = StringIO.StringIO(base64.decodestring(data['file_sxw_upload']))
fp = tools.file_open('normalized_oo2rml.xsl',subdir='addons/base_report_designer/openerp_sxw2rml')
newrmlcontent = str(openerp_sxw2rml.sxw2rml(sxwval, xsl=fp.read()))
report = self.pool['ir.actions.report.xml'].write(cr, uid, [data['report_id']], {
'report_sxw_content': base64.decodestring(data['file_sxw_upload']),
'report_rml_content': newrmlcontent
})
cr.commit()
data_obj = self.pool['ir.model.data']
id2 = data_obj._get_id(cr, uid, 'base_report_designer', 'view_base_report_file_rml')
report = self.pool['ir.actions.report.xml'].browse(cr, uid, data['report_id'], context=context)
if id2:
id2 = data_obj.browse(cr, uid, id2, context=context).res_id
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'base.report.rml.save',
'views': [(id2, 'form')],
'view_id': False,
'type': 'ir.actions.act_window',
'target': 'new',
}
class base_report_rml_save(osv.osv_memory):
"""Base Report file Save"""
_name = 'base.report.rml.save'
def default_get(self, cr, uid, fields, context=None):
"""
To get default values for the object.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param fields: List of fields for which we want default values
@param context: A standard dictionary
@return: A dictionary which of fields with values.
"""
res = super(base_report_rml_save, self).default_get(cr, uid, fields, context=context)
report_ids = self.pool['base.report.sxw'].search(cr,uid,[], context=context)
data = self.pool['base.report.file.sxw'].read(cr, uid, report_ids, context=context)[0]
report = self.pool['ir.actions.report.xml'].browse(cr, uid, data['report_id'], context=context)
if 'file_rml' in fields:
res['file_rml'] = base64.encodestring(report.report_rml_content)
return res
_columns = {
'file_rml':fields.binary('Save As'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
androidbftab1/bf-kernel | refs/heads/master | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py | 12527 | # Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
|
glorizen/nupic | refs/heads/master | nupic/frameworks/opf/previousvaluemodel.py | 39 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Module containing the trivial predictor OPF model implementation. """
import itertools
from nupic.data import fieldmeta
from nupic.frameworks.opf import model
from nupic.frameworks.opf import opfutils
from opfutils import InferenceType
class PreviousValueModel(model.Model):
"""Previous value model."""
def __init__(self, inferenceType=InferenceType.TemporalNextStep,
fieldNames=[],
fieldTypes=[],
predictedField=None,
predictionSteps=[]):
""" PVM constructor.
inferenceType: An opfutils.InferenceType value that specifies what type of
inference (i.e. TemporalNextStep, TemporalMultiStep, etc.)
fieldNames: a list of field names
fieldTypes: a list of the types for the fields mentioned in fieldNames
predictedField: the field from fieldNames which is to be predicted
predictionSteps: a list of steps for which a prediction is made. This is
only needed in the case of multi step predictions
"""
super(PreviousValueModel, self).__init__(inferenceType)
self._logger = opfutils.initLogger(self)
self._predictedField = predictedField
self._fieldNames = fieldNames
self._fieldTypes = fieldTypes
# only implement multistep and temporalnextstep
if inferenceType == InferenceType.TemporalNextStep:
self._predictionSteps = [1]
elif inferenceType == InferenceType.TemporalMultiStep:
self._predictionSteps = predictionSteps
else:
assert False, "Previous Value Model only works for next step or multi-step."
def run(self, inputRecord):
"""Run one iteration of this model.
Args:
inputRecord: A record object formatted according to
nupic.data.FileSource.getNext() result format.
Returns:
A ModelResult named tuple (see opfutils.py). The contents of
ModelResult.inferences depends on the specific inference type of this
model, which can be queried by getInferenceType().
TODO: Implement getInferenceType()?
"""
# set the results. note that there is no translation to sensorInput
results = super(PreviousValueModel, self).run(inputRecord)
results.sensorInput = opfutils.SensorInput(dataRow= \
[inputRecord[fn] for fn in self._fieldNames])
# select the current value for the prediction with probablity of 1
results.inferences = { opfutils.InferenceElement.multiStepBestPredictions : \
dict((steps, inputRecord[self._predictedField]) \
for steps in self._predictionSteps),
opfutils.InferenceElement.multiStepPredictions : \
dict((steps, {inputRecord[self._predictedField] : 1}) \
for steps in self._predictionSteps)
}
# set the next step prediction if step of 1 is selected
if 1 in self._predictionSteps:
results.inferences[opfutils.InferenceElement.prediction] = \
inputRecord[self._predictedField]
return results
def finishLearning(self):
"""Places the model in a permanent "finished learning" mode.
The PVM does not learn, so this function has no effect.
"""
pass
def setFieldStatistics(self,fieldStats):
"""
This method is used for the data source to communicate to the
model any statistics that it knows about the fields
Since the PVM has no use for this information, this is a no-op
"""
pass
def getFieldInfo(self):
"""Returns the metadata specifying the format of the model's output.
The result may be different than the list of
nupic.data.fieldmeta.FieldMetaInfo objects supplied at initialization due
to the transcoding of some input fields into meta- fields, such as
datetime -> dayOfWeek, timeOfDay, etc.
"""
return tuple(fieldmeta.FieldMetaInfo(*args) for args in
itertools.izip(
self._fieldNames, self._fieldTypes,
itertools.repeat(fieldmeta.FieldMetaSpecial.none)))
def getRuntimeStats(self):
"""Get the runtime statistics specific to the model.
I.E. activeCellOverlapAvg
Returns:
A dict mapping statistic names to values.
"""
# TODO: Add debugging stats.
# > what sort of stats are we supposed to return?
return dict()
def _getLogger(self):
"""Get the logger created by this subclass.
Returns:
A logging.Logger object. Should not be None.
"""
return self._logger
def resetSequenceStates(self):
"""Called to indicate the start of a new sequence.
The next call to run should not perform learning.
"""
self._reset = True
def __getstate__(self):
del self._logger
return self.__dict__
def __setstate__(self):
self._logger = opfutils.initLogger(self)
|
gsnbng/erpnext | refs/heads/develop | erpnext/accounts/doctype/cash_flow_mapping/test_cash_flow_mapping.py | 19 | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestCashFlowMapping(unittest.TestCase):
def setUp(self):
if frappe.db.exists("Cash Flow Mapping", "Test Mapping"):
frappe.delete_doc('Cash Flow Mappping', 'Test Mapping')
def tearDown(self):
frappe.delete_doc('Cash Flow Mapping', 'Test Mapping')
def test_multiple_selections_not_allowed(self):
doc = frappe.new_doc('Cash Flow Mapping')
doc.mapping_name = 'Test Mapping'
doc.label = 'Test label'
doc.append(
'accounts',
{'account': 'Accounts Receivable - _TC'}
)
doc.is_working_capital = 1
doc.is_finance_cost = 1
self.assertRaises(frappe.ValidationError, doc.insert)
doc.is_finance_cost = 0
doc.insert()
|
liyy7/scrapy | refs/heads/master | scrapy/utils/ossignal.py | 204 |
from __future__ import absolute_import
from twisted.internet import reactor
import signal
signal_names = {}
for signame in dir(signal):
if signame.startswith("SIG"):
signum = getattr(signal, signame)
if isinstance(signum, int):
signal_names[signum] = signame
def install_shutdown_handlers(function, override_sigint=True):
"""Install the given function as a signal handler for all common shutdown
signals (such as SIGINT, SIGTERM, etc). If override_sigint is ``False`` the
SIGINT handler won't be install if there is already a handler in place
(e.g. Pdb)
"""
reactor._handleSignals()
signal.signal(signal.SIGTERM, function)
if signal.getsignal(signal.SIGINT) == signal.default_int_handler or \
override_sigint:
signal.signal(signal.SIGINT, function)
# Catch Ctrl-Break in windows
if hasattr(signal, "SIGBREAK"):
signal.signal(signal.SIGBREAK, function)
|
peterbraden/tensorflow | refs/heads/master | tensorflow/python/tools/freeze_graph_test.py | 14 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests the graph freezing tool."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
from tensorflow.python.framework import test_util
from tensorflow.python.tools import freeze_graph
class FreezeGraphTest(test_util.TensorFlowTestCase):
def testFreezeGraph(self):
checkpoint_prefix = os.path.join(self.get_temp_dir(), "saved_checkpoint")
checkpoint_state_name = "checkpoint_state"
input_graph_name = "input_graph.pb"
output_graph_name = "output_graph.pb"
# We'll create an input graph that has a single variable containing 1.0,
# and that then multiplies it by 2.
with tf.Graph().as_default():
variable_node = tf.Variable(1.0, name="variable_node")
output_node = tf.mul(variable_node, 2.0, name="output_node")
sess = tf.Session()
init = tf.initialize_all_variables()
sess.run(init)
output = sess.run(output_node)
self.assertNear(2.0, output, 0.00001)
saver = tf.train.Saver()
saver.save(sess, checkpoint_prefix, global_step=0,
latest_filename=checkpoint_state_name)
tf.train.write_graph(sess.graph.as_graph_def(), self.get_temp_dir(),
input_graph_name)
# We save out the graph to disk, and then call the const conversion
# routine.
input_graph_path = os.path.join(self.get_temp_dir(), input_graph_name)
input_saver_def_path = ""
input_binary = False
input_checkpoint_path = checkpoint_prefix + "-0"
output_node_names = "output_node"
restore_op_name = "save/restore_all"
filename_tensor_name = "save/Const:0"
output_graph_path = os.path.join(self.get_temp_dir(), output_graph_name)
clear_devices = False
freeze_graph.freeze_graph(input_graph_path, input_saver_def_path,
input_binary, input_checkpoint_path,
output_node_names, restore_op_name,
filename_tensor_name, output_graph_path,
clear_devices, "")
# Now we make sure the variable is now a constant, and that the graph still
# produces the expected result.
with tf.Graph().as_default():
output_graph_def = tf.GraphDef()
with open(output_graph_path, "rb") as f:
output_graph_def.ParseFromString(f.read())
_ = tf.import_graph_def(output_graph_def, name="")
self.assertEqual(4, len(output_graph_def.node))
for node in output_graph_def.node:
self.assertNotEqual("Variable", node.op)
with tf.Session() as sess:
output_node = sess.graph.get_tensor_by_name("output_node:0")
output = sess.run(output_node)
self.assertNear(2.0, output, 0.00001)
if __name__ == "__main__":
tf.test.main()
|
eepalms/gem5-newcache | refs/heads/master | src/arch/sparc/SparcInterrupts.py | 69 | # Copyright (c) 2008 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
from m5.SimObject import SimObject
class SparcInterrupts(SimObject):
type = 'SparcInterrupts'
cxx_class = 'SparcISA::Interrupts'
cxx_header = 'arch/sparc/interrupts.hh'
|
mcdonc/buildit | refs/heads/master | __init__.py | 35 | # this is a package
|
angrylogic/posixqueue | refs/heads/master | setup.py | 1 | #!/usr/bin/python
from distutils.core import setup
setup(name="posixqueue", version="1.0",
description="Python wrapper for POSIX message queue interface",
author="Greg Harris", author_email="gharris@angrylogic.net",
packages=["posixqueue"])
|
ddboline/pylearn2 | refs/heads/master | pylearn2/testing/skip.py | 49 | """
Helper functions for determining which tests to skip.
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from nose.plugins.skip import SkipTest
import os
from theano.sandbox import cuda
scipy_works = True
try:
import scipy
except ImportError:
# pyflakes gets mad if you set scipy to None here
scipy_works = False
sklearn_works = True
try:
import sklearn
except ImportError:
sklearn_works = False
h5py_works = True
try:
import h5py
except ImportError:
h5py_works = False
matplotlib_works = True
try:
from matplotlib import pyplot
except ImportError:
matplotlib_works = False
def skip_if_no_data():
if 'PYLEARN2_DATA_PATH' not in os.environ:
raise SkipTest()
def skip_if_no_scipy():
if not scipy_works:
raise SkipTest()
def skip_if_no_sklearn():
if not sklearn_works:
raise SkipTest()
def skip_if_no_gpu():
if cuda.cuda_available == False:
raise SkipTest('Optional package cuda disabled.')
def skip_if_no_h5py():
if not h5py_works:
raise SkipTest()
def skip_if_no_matplotlib():
if not matplotlib_works:
raise SkipTest("matplotlib and pyplot are not available")
|
xiaq/jadepy | refs/heads/master | jade/runtime.py | 1 | def _jade_class(classes):
if isinstance(classes, list):
return u' '.join(classes)
return classes
|
micronicstraining/python | refs/heads/master | module_2/overview.py | 1 | #! /usr/bin/env python3
# nonetype
none_object = None
# bools
bool_object_true = True
bool_object_false = False
# if False:
# print('This is a basic if branch')
# elif True:
# print('This is a basic else if branch')
# else:
# print('this is a basic else branch')
bool_not_example = not True
print(bool_not_example)
bool_and_example = True and True
# a b | a and b
# F F | F
# F T | F
# T F | F
# T T | T
bool_or_example = False or False
# a b | a or b
# F F | F
# F T | T
# T F | T
# T T | T
# integers - integral types
int_5 = 5
int_neg_5 = -5
# strings
# collections
# sequences & non-sequence (ordered?)
# immutable vs mutable?
print('This is a sentence in unicode! ' + 'I ❤ Unicode')
x = 'This is a sentence. This is another sentence'
print(x[0])
print(x[1])
print(len(x))
print(x[17])
print(x[len(x)-1])
print(x[-2])
print(x[0:4])
print(x[0:3])
print('0123456789'[2:-2])
print(x.split(' '))
print(x.split('.')[0])
# list
my_list = [None, True, False, 'hello', 5, 13+4, [1,2,3, [1,2,3]]]
my_list.append('hello world')
print(id(my_list))
print(my_list)
my_list.append(2**10)
print(my_list)
print(id(my_list))
print(type(my_list))
# for mutable types, this list will be copied
my_list2 = my_list[:]
print(id(my_list))
print(id(my_list2))
my_list.append('something else')
print(my_list2)
# merge lists
list1 = [1,2,3]
list2 = [3,4,5]
list3 = list1 + list2
print(list3)
# slices lists
print(list3[1:-1:2])
for e in list3:
print(e, e**2)
# tuple
tuple1 = (1,2,3)
tuple2 = (3,4,5)
tuple3 = tuple1 + tuple2
print(tuple3)
print(type(tuple3))
lat_long = (34.1234, -74.2342)
print(lat_long[0])
print(lat_long[1])
lat1, long1 = (32.234234, -70.23423)
print(lat1)
print(long1)
lat1, *_ = (32.23423, -70.234234, 12342)
print(lat1)
print(_)
# zip- built in function
l1 = [0,1,2]
l2 = ['a','b','c']
print(list(zip(l1, l2)))
for index, letter in enumerate(l2):
print(index, letter)
# set
# unordered = not sequence
# collection
#
# lists - mutable
[]
# problems with mutability
a = [1,2,3]
b = a
a.append(4)
# now b is also 1,2,3,4!
# How can we copy the list a to b ?
# tuples - immutable
()
# sets - mutable - unordered
{1, 2, 3}
# dictionaries (keys, value) - mutable - unordered
dict1 = {
'hello': 'a',
'goodbye': 'b',
'test': 'c'
}
print(dict1['hello'])
print(dict1['goodbye'])
print(dict1['test'])
a = dict(one=1, two=2, three=3)
b = {'one': 1, 'two': 2, 'three': 3}
c = dict(zip(['one', 'two', 'three'], [1, 2, 3]))
d = dict([('two', 2), ('one', 1), ('three', 3)])
# should be true
assert (a == b == c == d)
|
tommo/gii | refs/heads/master | support/waf/waflib/extras/package.py | 2 | #! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2011
"""
Obtain packages, unpack them in a location, and add associated uselib variables
(CFLAGS_pkgname, LIBPATH_pkgname, etc).
The default is use a Dependencies.txt file in the source directory.
This is a work in progress.
Usage:
def options(opt):
opt.load('package')
def configure(conf):
conf.load_packages()
"""
from waflib import Logs
from waflib.Configure import conf
try:
from urllib import request
except:
from urllib import urlopen
else:
urlopen = request.urlopen
CACHEVAR = 'WAFCACHE_PACKAGE'
@conf
def get_package_cache_dir(self):
cache = None
if CACHEVAR in conf.environ:
cache = conf.environ[CACHEVAR]
cache = self.root.make_node(cache)
elif self.env[CACHEVAR]:
cache = self.env[CACHEVAR]
cache = self.root.make_node(cache)
else:
cache = self.srcnode.make_node('.wafcache_package')
cache.mkdir()
return cache
@conf
def download_archive(self, src, dst):
for x in self.env.PACKAGE_REPO:
url = '/'.join((x, src))
try:
web = urlopen(url)
try:
if web.getcode() != 200:
continue
except AttributeError:
pass
except Exception:
# on python3 urlopen throws an exception
# python 2.3 does not have getcode and throws an exception to fail
continue
else:
tmp = self.root.make_node(dst)
tmp.write(web.read())
Logs.warn('Downloaded %s from %s' % (tmp.abspath(), url))
break
else:
self.fatal('Could not get the package %s' % src)
@conf
def load_packages(self):
cache = self.get_package_cache_dir()
# read the dependencies, get the archives, ..
|
kevalds51/sympy | refs/heads/master | sympy/unify/tests/test_rewrite.py | 59 | from sympy.unify.rewrite import rewriterule
from sympy import sin, Basic, Symbol, S
from sympy.abc import x, y
from sympy.strategies.rl import rebuild
from sympy.assumptions import Q
p, q = Symbol('p'), Symbol('q')
def test_simple():
rl = rewriterule(Basic(p, 1), Basic(p, 2), variables=(p,))
assert list(rl(Basic(3, 1))) == [Basic(3, 2)]
p1 = p**2
p2 = p**3
rl = rewriterule(p1, p2, variables=(p,))
expr = x**2
assert list(rl(expr)) == [x**3]
def test_simple_variables():
rl = rewriterule(Basic(x, 1), Basic(x, 2), variables=(x,))
assert list(rl(Basic(3, 1))) == [Basic(3, 2)]
rl = rewriterule(x**2, x**3, variables=(x,))
assert list(rl(y**2)) == [y**3]
def test_moderate():
p1 = p**2 + q**3
p2 = (p*q)**4
rl = rewriterule(p1, p2, (p, q))
expr = x**2 + y**3
assert list(rl(expr)) == [(x*y)**4]
def test_sincos():
p1 = sin(p)**2 + sin(p)**2
p2 = 1
rl = rewriterule(p1, p2, (p, q))
assert list(rl(sin(x)**2 + sin(x)**2)) == [1]
assert list(rl(sin(y)**2 + sin(y)**2)) == [1]
def test_Exprs_ok():
rl = rewriterule(p+q, q+p, (p, q))
next(rl(x+y)).is_commutative
str(next(rl(x+y)))
def test_condition_simple():
rl = rewriterule(x, x+1, [x], lambda x: x < 10)
assert not list(rl(S(15)))
assert rebuild(next(rl(S(5)))) == 6
def test_condition_multiple():
rl = rewriterule(x + y, x**y, [x,y], lambda x, y: x.is_integer)
a = Symbol('a')
b = Symbol('b', integer=True)
expr = a + b
assert list(rl(expr)) == [b**a]
c = Symbol('c', integer=True)
d = Symbol('d', integer=True)
assert set(rl(c + d)) == set([c**d, d**c])
def test_assumptions():
rl = rewriterule(x + y, x**y, [x, y], assume=Q.integer(x))
a, b = map(Symbol, 'ab')
expr = a + b
assert list(rl(expr, Q.integer(b))) == [b**a]
|
bradparks/csvkit__query_join_filter_CSV_cli | refs/heads/master | csvkit/headers.py | 21 | #!/usr/bin/env python
def make_default_headers(n):
"""
Make a set of simple, default headers for files that are missing them.
"""
return ['column%i' % (i + 1) for i in range(n)]
|
alexmorozov/django | refs/heads/master | django/conf/urls/static.py | 336 | import re
from django.conf import settings
from django.conf.urls import url
from django.core.exceptions import ImproperlyConfigured
from django.views.static import serve
def static(prefix, view=serve, **kwargs):
"""
Helper function to return a URL pattern for serving files in debug mode.
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
# ... the rest of your URLconf goes here ...
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
"""
# No-op if not in debug mode or an non-local prefix
if not settings.DEBUG or (prefix and '://' in prefix):
return []
elif not prefix:
raise ImproperlyConfigured("Empty static prefix not permitted")
return [
url(r'^%s(?P<path>.*)$' % re.escape(prefix.lstrip('/')), view, kwargs=kwargs),
]
|
sckott/pyobis | refs/heads/master | pyobis/obisissues.py | 1 | import re
def occ_issues_lookup(issue=None, code=None):
'''
Lookup occurrence issue definitions and short codes
:param issue: Full name of issue, e.g, CONTINENT_COUNTRY_MISMATCH
:param code: an issue short code, e.g. ccm
Usage
pygbif.occ_issues_lookup(issue = 'CONTINENT_COUNTRY_MISMATCH')
pygbif.occ_issues_lookup(issue = 'MULTIMEDIA_DATE_INVALID')
pygbif.occ_issues_lookup(issue = 'ZERO_COORDINATE')
pygbif.occ_issues_lookup(code = 'cdiv')
'''
if code is None:
bb = [trymatch(issue, x) for x in gbifissues['issue'] ]
tmp = filter(None, bb)
else:
bb = [trymatch(code, x) for x in gbifissues['code'] ]
tmp = filter(None, bb)
return tmp
def trymatch(pattern, string):
temp = re.match(pattern, string)
if temp is None:
return None
else:
return temp.string
gbifissues = {
'code': ["bri", "ccm", "cdc", "conti", "cdiv",
"cdout", "cdrep", "cdrepf", "cdreps", "cdround", "cucdmis", "cudc",
"cuiv", "cum", "depmms", "depnn", "depnmet", "depunl", "elmms",
"elnn", "elnmet", "elunl", "gass84", "gdativ", "iddativ", "iddatunl",
"mdativ", "mdatunl", "muldativ", "muluriiv", "preneglat", "preneglon",
"preswcd", "rdativ", "rdatm", "rdatunl", "refuriiv", "txmatfuz",
"txmathi", "txmatnon", "typstativ", "zerocd"],
'issue': ["BASIS_OF_RECORD_INVALID",
"CONTINENT_COUNTRY_MISMATCH", "CONTINENT_DERIVED_FROM_COORDINATES",
"CONTINENT_INVALID", "COORDINATE_INVALID", "COORDINATE_OUT_OF_RANGE",
"COORDINATE_REPROJECTED", "COORDINATE_REPROJECTION_FAILED", "COORDINATE_REPROJECTION_SUSPICIOUS",
"COORDINATE_ROUNDED", "COUNTRY_COORDINATE_MISMATCH", "COUNTRY_DERIVED_FROM_COORDINATES",
"COUNTRY_INVALID", "COUNTRY_MISMATCH", "DEPTH_MIN_MAX_SWAPPED",
"DEPTH_NON_NUMERIC", "DEPTH_NOT_METRIC", "DEPTH_UNLIKELY", "ELEVATION_MIN_MAX_SWAPPED",
"ELEVATION_NON_NUMERIC", "ELEVATION_NOT_METRIC", "ELEVATION_UNLIKELY",
"GEODETIC_DATUM_ASSUMED_WGS84", "GEODETIC_DATUM_INVALID", "IDENTIFIED_DATE_INVALID",
"IDENTIFIED_DATE_UNLIKELY", "MODIFIED_DATE_INVALID", "MODIFIED_DATE_UNLIKELY",
"MULTIMEDIA_DATE_INVALID", "MULTIMEDIA_URI_INVALID", "PRESUMED_NEGATED_LATITUDE",
"PRESUMED_NEGATED_LONGITUDE", "PRESUMED_SWAPPED_COORDINATE",
"RECORDED_DATE_INVALID", "RECORDED_DATE_MISMATCH", "RECORDED_DATE_UNLIKELY",
"REFERENCES_URI_INVALID", "TAXON_MATCH_FUZZY", "TAXON_MATCH_HIGHERRANK",
"TAXON_MATCH_NONE", "TYPE_STATUS_INVALID", "ZERO_COORDINATE"],
'description': ["The given basis of record is impossible to interpret or seriously different from the recommended vocabulary.",
"The interpreted continent and country do not match up.",
"The interpreted continent is based on the coordinates, not the verbatim string information.",
"Uninterpretable continent values found.", "Coordinate value given in some form but GBIF is unable to interpret it.",
"Coordinate has invalid lat/lon values out of their decimal max range.",
"The original coordinate was successfully reprojected from a different geodetic datum to WGS84.",
"The given decimal latitude and longitude could not be reprojected to WGS84 based on the provided datum.",
"Indicates successful coordinate reprojection according to provided datum, but which results in a datum shift larger than 0.1 decimal degrees.",
"Original coordinate modified by rounding to 5 decimals.",
"The interpreted occurrence coordinates fall outside of the indicated country.",
"The interpreted country is based on the coordinates, not the verbatim string information.",
"Uninterpretable country values found.", "Interpreted country for dwc:country and dwc:countryCode contradict each other.",
"Set if supplied min>max", "Set if depth is a non numeric value",
"Set if supplied depth is not given in the metric system, for example using feet instead of meters",
"Set if depth is larger than 11.000m or negative.", "Set if supplied min > max elevation",
"Set if elevation is a non numeric value", "Set if supplied elevation is not given in the metric system, for example using feet instead of meters",
"Set if elevation is above the troposphere (17km) or below 11km (Mariana Trench).",
"Indicating that the interpreted coordinates assume they are based on WGS84 datum as the datum was either not indicated or interpretable.",
"The geodetic datum given could not be interpreted.", "The date given for dwc:dateIdentified is invalid and cant be interpreted at all.",
"The date given for dwc:dateIdentified is in the future or before Linnean times (1700).",
"A (partial) invalid date is given for dc:modified, such as a non existing date, invalid zero month, etc.",
"The date given for dc:modified is in the future or predates unix time (1970).",
"An invalid date is given for dc:created of a multimedia object.",
"An invalid uri is given for a multimedia object.", "Latitude appears to be negated, e.g. 32.3 instead of -32.3",
"Longitude appears to be negated, e.g. 32.3 instead of -32.3",
"Latitude and longitude appear to be swapped.", "A (partial) invalid date is given, such as a non existing date, invalid zero month, etc.",
"The recording date specified as the eventDate string and the individual year, month, day are contradicting.",
"The recording date is highly unlikely, falling either into the future or represents a very old date before 1600 that predates modern taxonomy.",
"An invalid uri is given for dc:references.", "Matching to the taxonomic backbone can only be done using a fuzzy, non exact match.",
"Matching to the taxonomic backbone can only be done on a higher rank and not the scientific name.",
"Matching to the taxonomic backbone cannot be done cause there was no match at all or several matches with too little information to keep them apart (homonyms).",
"The given type status is impossible to interpret or seriously different from the recommended vocabulary.",
"Coordinate is the exact 0/0 coordinate, often indicating a bad null coordinate."]
}
|
yantrabuddhi/blocos | refs/heads/master | tabs/ConsoleTab.py | 1 | # -*- coding: utf-8 -*-
# Este arquivo é parte do programa Monitor
# Monitor é um software livre; você pode redistribui-lo e/ou
# modifica-lo dentro dos termos da Licença Pública Geral GNU como
# publicada pela Fundação do Software Livre (FSF); na versão 3 da
# Licença, ou (na sua opinião) qualquer versão.
#
# Este programa é distribuido na esperança que possa ser util,
# mas SEM NENHUMA GARANTIA; sem uma garantia implicita de ADEQUAÇÂO a qualquer
# MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a
# Licença Pública Geral GNU para maiores detalhes.
#
# Você deve ter recebido uma cópia da Licença Pública Geral GNU
# junto com este programa, se não, escreva para a Fundação do Software
# Livre(FSF) Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# Centro de Tecnologia da Informação Renato Archer, Campinas-SP, Brasil
# Projeto realizado com fundos do Conselho Nacional de Desenvolvimento Científico e Tecnológico (CNPQ)
# Esse código faz parte do projeto BR-Gogo, disponível em http://sourceforge.net/projects/br-gogo/
try:
from kiwi import tasklet
except ImportError:
print 'Kiwi precisa ser instalado:'
print "http://ftp.gnome.org/pub/GNOME/binaries/win32/kiwi/kiwi-1.9.21.win32.exe"
raw_input()
from Tab import Tab
class ConsoleTab(Tab):
def __init__(self,gui,Comunic,statusbar,liststoreSensorsTypes,sensorTypes):
### Board Console
self.Comunic=Comunic
self.statusbar=statusbar
self.gui=gui
self.sensorTypes=sensorTypes
self.motorA=gui.get_widget("checkbuttonMotorA")
self.motorB=gui.get_widget("checkbuttonMotorB")
self.motorC=gui.get_widget("checkbuttonMotorC")
self.motorD=gui.get_widget("checkbuttonMotorD")
self.motorPowerWidget=gui.get_widget("spinbuttonMotorPower")
self.entryMinPwmDuty=gui.get_widget("entryMinPwmDuty")
self.entryMaxPwmDuty=gui.get_widget("entryMaxPwmDuty")
self.hscalePwmDuty=gui.get_widget("hscalePwmDuty")
# Good limits for servos
self.minDuty = 20
self.maxDuty = 45
self.hscalePwmDuty.set_range(self.minDuty,self.maxDuty)
self.motorsActivated=""
self.buttonSetPwmDuty=gui.get_widget("buttonSetPwmDuty")
self.radiobuttonBurstFast=gui.get_widget("radiobuttonBurstFast")
self.radiobuttonBurstSlow=gui.get_widget("radiobuttonBurstSlow")
self.entryRefreshRate=gui.get_widget("entryRefreshRate")
self.sensorBars=(
gui.get_widget("progressbar1"),
gui.get_widget("progressbar2"),
gui.get_widget("progressbar3"),
gui.get_widget("progressbar4"),
gui.get_widget("progressbar5"),
gui.get_widget("progressbar6"),
gui.get_widget("progressbar7"),
gui.get_widget("progressbar8"),
)
self.entrySensors=(
gui.get_widget("entrySensor1"),
gui.get_widget("entrySensor2"),
gui.get_widget("entrySensor3"),
gui.get_widget("entrySensor4"),
gui.get_widget("entrySensor5"),
gui.get_widget("entrySensor6"),
gui.get_widget("entrySensor7"),
gui.get_widget("entrySensor8"),
)
self.checkbuttonSensors=(
gui.get_widget("checkbuttonSensor1"),
gui.get_widget("checkbuttonSensor2"),
gui.get_widget("checkbuttonSensor3"),
gui.get_widget("checkbuttonSensor4"),
gui.get_widget("checkbuttonSensor5"),
gui.get_widget("checkbuttonSensor6"),
gui.get_widget("checkbuttonSensor7"),
gui.get_widget("checkbuttonSensor8")
)
self.comboboxSensors=(
gui.get_widget("comboboxSensor1"),
gui.get_widget("comboboxSensor2"),
gui.get_widget("comboboxSensor3"),
gui.get_widget("comboboxSensor4"),
gui.get_widget("comboboxSensor5"),
gui.get_widget("comboboxSensor6"),
gui.get_widget("comboboxSensor7"),
gui.get_widget("comboboxSensor8")
)
for i in self.comboboxSensors:
i.set_model(liststoreSensorsTypes)
self.sensorValues=[0]*8
self.burstModeOnOff=False
self.refreshMode=False
self.refreshRate=1000
###/Board Console
def taskleted(func):
def new(*args,**kwargs):
tasklet.Tasklet(func(*args,**kwargs))
return new
@taskleted
def showStatusMsg(self,context,msg):
context_id=self.statusbar.get_context_id(context)
msg_id=self.statusbar.push(context_id,msg)
timeout=tasklet.WaitForTimeout(1000)
yield timeout
self.statusbar.remove(context_id,msg_id)
tasklet.get_event()
def setDisconnected(self):
self.showWarning("Gogo desconectada")
self.statusbar.push(0,"Gogo desconectada")
self.Comunic.closePort()
def buttonBeep_clicked_cb(self, widget):
try:
self.Comunic.beep()
self.showStatusMsg("Misc","Beep")
except:
self.setDisconnected()
def buttonLedOn_clicked_cb(self, widget):
try:
self.Comunic.ledOn()
self.showStatusMsg("Misc","Led On")
except:
self.setDisconnected()
def buttonLedOff_clicked_cb(self, widget):
try:
self.Comunic.ledOff()
self.showStatusMsg("Misc","Led Off")
except:
self.setDisconnected()
def buttonRun_clicked_cb(self, widget):
try:
self.Comunic.run()
self.showStatusMsg("Cmd","Run")
except:
self.setDisconnected()
def checkbuttonMotor_toggled_cb(self,widget):
m=""
if self.motorA.get_active():
m=m+'a'
if self.motorB.get_active():
m=m+'b'
if self.motorC.get_active():
m=m+'c'
if self.motorD.get_active():
m=m+'d'
try:
self.Comunic.talkToMotor(m)
self.motorsActivated=m
self.showStatusMsg("Motor","Controlar motores: "+m)
except:
self.showStatusMsg("Motor","Controlar motores: "+m)
def buttonMotorControlOn_clicked_cb(self, widget):
try:
self.Comunic.motorOn()
self.showStatusMsg("Motor","Motores "+self.motorsActivated+" Ligados")
except:
self.setDisconnected()
def buttonMotorControlOff_clicked_cb(self, widget):
try:
self.Comunic.motorOff()
self.showStatusMsg("Motor","Motores "+self.motorsActivated+" Desligados")
except:
self.setDisconnected()
def buttonMotorControlBreak_clicked_cb(self, widget):
try:
self.Comunic.motorBreak()
self.showStatusMsg("Motor","Motores "+self.motorsActivated+" Brecados")
except:
self.setDisconnected()
def buttonMotorControlCoast_clicked_cb(self, widget):
try:
self.Comunic.motorCoast()
self.showStatusMsg("Motor","Motores "+self.motorsActivated+" Parados")
except:
self.setDisconnected()
def buttonPowerSet_clicked_cb(self, widget):
try:
power=self.motorPowerWidget.get_value_as_int()
self.Comunic.setMotorPower(power)
self.showStatusMsg("Motor","Pontência dos Motores "+self.motorsActivated+" definida para "+str(power))
except:
self.setDisconnected()
def buttonMotorControlThisway_clicked_cb(self, widget):
try:
self.Comunic.motorThisway()
self.showStatusMsg("Motor","Motores "+self.motorsActivated+" para lá")
except:
self.setDisconnected()
def buttonMotorControlThatway_clicked_cb(self, widget):
try:
self.Comunic.motorThatway()
self.showStatusMsg("Motor","Motores "+self.motorsActivated+" para cá")
except:
self.setDisconnected()
def buttonMotorControlReverse_clicked_cb(self, widget):
try:
self.Comunic.motorReverse()
self.showStatusMsg("Motor","Motores "+self.motorsActivated+" revertidos")
except:
self.setDisconnected()
def entryMinPwmDuty_changed_cb(self,widget):
try:
self.minDuty=int(widget.get_text())
except:
return
else:
if self.minDuty>self.maxDuty:
self.minDuty=self.maxDuty-1
if self.minDuty<0:
self.minDuty=0
try:
self.hscalePwmDuty.set_range(self.minDuty,self.maxDuty)
except:
print self.minDuty,self.maxDuty
def entryMaxPwmDuty_changed_cb(self,widget):
try:
self.maxDuty=int(widget.get_text())
except:
return
else:
if self.maxDuty<self.minDuty:
self.maxDuty=self.minDuty+1
if self.maxDuty>255:
self.maxDuty=255
try:
self.hscalePwmDuty.set_range(self.minDuty,self.maxDuty)
except:
print self.minDuty,self.maxDuty
def buttonSetPwmDuty_clicked_cb(self,widget):
try:
duty=int(self.hscalePwmDuty.get_value())
self.Comunic.setPwmDuty(duty)
self.showStatusMsg("Motor","Motores "+self.motorsActivated+" de passo: "+str(duty))
except:
self.setDisconnected()
def get_sensor_value(self,sensorNumber):
print 'get_sensor_value'
try:
return self.Comunic.readSensor(sensorNumber)
except:
self.burstModeOnOff = False
self.refreshMode = False
#self.showWarning("Gogo desconectada##")
#self.statusbar.push(0,"Gogo desconectada")
#return sensorNumber*100
def get_sensor_text(self,sensorNumber,value):
stype=self.comboboxSensors[sensorNumber].get_active()
if stype>=0:
return self.sensorTypes[stype].get_text(value)
else:
return str(value)
def buttonRefreshAll_clicked_cb(self,widget):
self.refreshMode = True
for i in range(8):
if self.refreshMode:
value=self.get_sensor_value(i)
if value>-1:
self.sensorValues[i]=value
self.entrySensors[i].set_text(self.get_sensor_text(i,value))
self.sensorBars[i].set_fraction(self.sensorValues[i]/1023.0)
self.showStatusMsg("Sensor","Valor dos sensores atualizado")
#print self.Comunic.readSensor(0)
def refreshSensors(self):
while self.burstModeOnOff:
timeout = tasklet.WaitForTimeout(self.refreshRate)
for i in range(8):
if self.checkbuttonSensors[i].get_active() and self.burstModeOnOff:
value=self.get_sensor_value(i)
if value>-1:
self.sensorValues[i]=value
self.entrySensors[i].set_text(self.get_sensor_text(i,value))
self.sensorBars[i].set_fraction(self.sensorValues[i]/1023.0)
yield timeout
tasklet.get_event()
def burstMode(self):
if self.radiobuttonBurstFast.get_active():
self.entryRefreshRate.set_text("20")
self.refreshRate=50
self.showStatusMsg("Sensor","Leitura de sensores a 20hz")
if self.radiobuttonBurstSlow.get_active():
self.entryRefreshRate.set_text("5")
self.refreshRate=200
self.showStatusMsg("Sensor","Leitura de sensores a 5hz")
def buttonSensorBurstOn_clicked_cb(self,widget):
self.burstMode()
self.burstModeOnOff=True
tasklet.run(self.refreshSensors())
def radiobuttonBurstFast_toggled_cb(self,widget):
self.burstMode()
def radiobuttonBurstSlow_toggled_cb(self,widget):
self.burstMode()
def buttonSensorBurstOff_clicked_cb(self,widget):
self.entryRefreshRate.set_text("0")
self.burstModeOnOff=False
self.showStatusMsg("Sensor","Leitura de sensores desligada")
|
llvm-mirror/lldb | refs/heads/master | packages/Python/lldbsuite/test/functionalities/tsan/thread_numbers/TestTsanThreadNumbers.py | 5 | """
Tests that TSan and LLDB have correct thread numbers.
"""
import lldb
from lldbsuite.test.lldbtest import *
from lldbsuite.test.decorators import *
import lldbsuite.test.lldbutil as lldbutil
import json
class TsanThreadNumbersTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@expectedFailureAll(
oslist=["linux"],
bugnumber="non-core functionality, need to reenable and fix later (DES 2014.11.07)")
@expectedFailureNetBSD
@skipIfFreeBSD # llvm.org/pr21136 runtimes not yet available by default
@skipIfRemote
@skipUnlessThreadSanitizer
def test(self):
self.build()
self.tsan_tests()
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
def tsan_tests(self):
exe = self.getBuildArtifact("a.out")
self.expect(
"file " + exe,
patterns=["Current executable set to .*a.out"])
self.runCmd("run")
stop_reason = self.dbg.GetSelectedTarget().process.GetSelectedThread().GetStopReason()
if stop_reason == lldb.eStopReasonExec:
# On OS X 10.10 and older, we need to re-exec to enable
# interceptors.
self.runCmd("continue")
# the stop reason of the thread should be breakpoint.
self.expect("thread list", "A data race should be detected",
substrs=['stopped', 'stop reason = Data race detected'])
self.assertEqual(
self.dbg.GetSelectedTarget().process.GetSelectedThread().GetStopReason(),
lldb.eStopReasonInstrumentation)
report_thread_id = self.dbg.GetSelectedTarget(
).process.GetSelectedThread().GetIndexID()
self.expect(
"thread info -s",
"The extended stop info should contain the TSan provided fields",
substrs=[
"instrumentation_class",
"description",
"mops"])
output_lines = self.res.GetOutput().split('\n')
json_line = '\n'.join(output_lines[2:])
data = json.loads(json_line)
self.assertEqual(data["instrumentation_class"], "ThreadSanitizer")
self.assertEqual(data["issue_type"], "data-race")
self.assertEqual(len(data["mops"]), 2)
self.assertEqual(data["mops"][0]["thread_id"], report_thread_id)
other_thread_id = data["mops"][1]["thread_id"]
self.assertTrue(other_thread_id != report_thread_id)
other_thread = self.dbg.GetSelectedTarget(
).process.GetThreadByIndexID(other_thread_id)
self.assertTrue(other_thread.IsValid())
self.runCmd("thread select %d" % other_thread_id)
self.expect(
"thread backtrace",
"The other thread should be stopped in f1 or f2",
substrs=[
"a.out",
"main.c"])
|
furrtek/portapack-havoc | refs/heads/master | firmware/tools/extract_cpld_data.py | 2 | #!/usr/bin/env python
#
# Copyright (C) 2014 Jared Boone, ShareBrained Technology, Inc.
#
# This file is part of PortaPack.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
# Very fragile code to extract data from Altera MAX V CPLD SVF
import sys
if len(sys.argv) != 3:
print('Usage: <command> <Altera MAX V CPLD SVF file path> <revision name>')
sys.exit(-1)
f = open(sys.argv[1], 'r')
revision_name = sys.argv[2]
calculate_crc = False
# !PROGRAM
# SIR 10 TDI (203);
# RUNTEST 93 TCK;
# SDR 13 TDI (0000);
# SIR 10 TDI (2F4);
# RUNTEST 93 TCK;
# while:
# SDR 16 TDI (7FFF);
# RUNTEST 1800 TCK;
# SIR 10 TDI (203);
# RUNTEST 93 TCK;
# SDR 13 TDI (0001);
# SIR 10 TDI (2F4);
# RUNTEST 93 TCK;
phase = None
block_0 = []
block_1 = []
current_block = None
for line in f:
line = line.strip().upper()
if line == '!PROGRAM':
phase = 'block_0'
current_block = block_0
elif line == '!VERIFY':
phase = 'verify'
current_block = None
if phase == 'block_0':
if line == 'SDR 13 TDI (0001);':
phase = 'block_1'
current_block = block_1
if phase == 'block_0' or phase == 'block_1':
if line.startswith('SDR 16 TDI ('):
sdr_value = int(line.split('(', 1)[1][:4], 16)
#print('0x%04x,' % sdr_value)
current_block.append(sdr_value)
def print_block(block):
for n in range(0, len(block), 8):
chunk = block[n:n+8]
line = ['0x%04x,' % v for v in chunk]
print('\t%s' % ' '.join(line))
def crc32(blocks):
import zlib
crc_bytes = []
for block in blocks:
for word in block:
crc_bytes.append((word >> 0) & 0xff)
crc_bytes.append((word >> 8) & 0xff)
return zlib.crc32(bytearray(crc_bytes)) & 0xffffffff
print("""#include "portapack_cpld_data.hpp"
#include <cstdint>
#include <array>
namespace portapack {
namespace cpld {
namespace %s {
""" % revision_name)
print('const std::array<uint16_t, %d> block_0 { {' % len(block_0))
print_block(block_0)
print("""} };
""")
print('const std::array<uint16_t, %d> block_1 { {' % len(block_1))
print_block(block_1)
print("""} };
} /* namespace %s */
} /* namespace cpld */
} /* namespace portapack */
""" % revision_name)
if calculate_crc:
# Apply post-programming modification to make post-programming CRC correct:
programmed_block_0 = block_0[:]
programmed_block_0[0] &= 0xfbff
crc = crc32((programmed_block_0, block_1))
print('%08x' % crc)
|
PetePriority/home-assistant | refs/heads/dev | homeassistant/components/homeworks/__init__.py | 2 | """Component for interfacing to Lutron Homeworks Series 4 and 8 systems.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/homeworks/
"""
import logging
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.const import (
CONF_HOST, CONF_ID, CONF_NAME, CONF_PORT, EVENT_HOMEASSISTANT_STOP)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
from homeassistant.helpers.dispatcher import (
dispatcher_send, async_dispatcher_connect)
from homeassistant.util import slugify
REQUIREMENTS = ['pyhomeworks==0.0.6']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'homeworks'
HOMEWORKS_CONTROLLER = 'homeworks'
ENTITY_SIGNAL = 'homeworks_entity_{}'
EVENT_BUTTON_PRESS = 'homeworks_button_press'
EVENT_BUTTON_RELEASE = 'homeworks_button_release'
CONF_DIMMERS = 'dimmers'
CONF_KEYPADS = 'keypads'
CONF_ADDR = 'addr'
CONF_RATE = 'rate'
FADE_RATE = 1.
CV_FADE_RATE = vol.All(vol.Coerce(float), vol.Range(min=0, max=20))
DIMMER_SCHEMA = vol.Schema({
vol.Required(CONF_ADDR): cv.string,
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_RATE, default=FADE_RATE): CV_FADE_RATE
})
KEYPAD_SCHEMA = vol.Schema({
vol.Required(CONF_ADDR): cv.string,
vol.Required(CONF_NAME): cv.string,
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Required(CONF_DIMMERS): vol.All(cv.ensure_list, [DIMMER_SCHEMA]),
vol.Optional(CONF_KEYPADS, default=[]): vol.All(cv.ensure_list,
[KEYPAD_SCHEMA]),
}),
}, extra=vol.ALLOW_EXTRA)
def setup(hass, base_config):
"""Start Homeworks controller."""
from pyhomeworks.pyhomeworks import Homeworks
def hw_callback(msg_type, values):
"""Dispatch state changes."""
_LOGGER.debug('callback: %s, %s', msg_type, values)
addr = values[0]
signal = ENTITY_SIGNAL.format(addr)
dispatcher_send(hass, signal, msg_type, values)
config = base_config.get(DOMAIN)
controller = Homeworks(config[CONF_HOST], config[CONF_PORT], hw_callback)
hass.data[HOMEWORKS_CONTROLLER] = controller
def cleanup(event):
controller.close()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, cleanup)
dimmers = config[CONF_DIMMERS]
load_platform(hass, 'light', DOMAIN, {CONF_DIMMERS: dimmers}, base_config)
for key_config in config[CONF_KEYPADS]:
addr = key_config[CONF_ADDR]
name = key_config[CONF_NAME]
HomeworksKeypadEvent(hass, addr, name)
return True
class HomeworksDevice():
"""Base class of a Homeworks device."""
def __init__(self, controller, addr, name):
"""Controller, address, and name of the device."""
self._addr = addr
self._name = name
self._controller = controller
@property
def unique_id(self):
"""Return a unique identifier."""
return 'homeworks.{}'.format(self._addr)
@property
def name(self):
"""Device name."""
return self._name
@property
def should_poll(self):
"""No need to poll."""
return False
class HomeworksKeypadEvent:
"""When you want signals instead of entities.
Stateless sensors such as keypads are expected to generate an event
instead of a sensor entity in hass.
"""
def __init__(self, hass, addr, name):
"""Register callback that will be used for signals."""
self._hass = hass
self._addr = addr
self._name = name
self._id = slugify(self._name)
signal = ENTITY_SIGNAL.format(self._addr)
async_dispatcher_connect(
self._hass, signal, self._update_callback)
@callback
def _update_callback(self, msg_type, values):
"""Fire events if button is pressed or released."""
from pyhomeworks.pyhomeworks import (
HW_BUTTON_PRESSED, HW_BUTTON_RELEASED)
if msg_type == HW_BUTTON_PRESSED:
event = EVENT_BUTTON_PRESS
elif msg_type == HW_BUTTON_RELEASED:
event = EVENT_BUTTON_RELEASE
else:
return
data = {CONF_ID: self._id, CONF_NAME: self._name, 'button': values[1]}
self._hass.bus.async_fire(event, data)
|
jonroberts/nasaMining | refs/heads/master | states_data.py | 1 | from __future__ import unicode_literals
import requests
import json
states = [
'Alabama', 'Alaska', 'Arizona', 'Arkansas', 'California', 'Colorado', 'Connecticut', 'Delaware', 'Florida',
'Georgia', 'Hawaii', 'Idaho', 'Illinois', 'Indiana', 'Iowa', 'Kansas', 'Kentucky', 'Louisiana', 'Maine', 'Maryland',
'Massachusetts', 'Michigan', 'Minnesota', 'Mississippi', 'Missouri', 'Montana', 'Nebraska', 'Nevada', 'New Hampshire',
'New Jersey', 'New Mexico', 'New York', 'North Carolina', 'North Dakota', 'Ohio', 'Oklahoma', 'Oregon', 'Pennsylvania',
'Rhode Island', 'South Carolina', 'South Dakota', 'Tennessee', 'Texas', 'Utah', 'Vermont', 'Virginia', 'Washington',
'West Virginia', 'Wisconsin', 'Wyoming'
]
abbr = ['ca', 'wv']
if __name__ == '__main__':
for state in states:
s = state.lower().replace(' ', '')
url = 'http://data.%s.gov/data.json' % s
print url
try:
res = requests.get(url)
data = res.json()
print "'%s': '%s'," % (s, url)
with open('data/%s.json' % s, 'w') as f:
json.dump(data, f)
except:
print '\tError'
url = 'http://www.%s.gov/data.json' % s
print url
try:
res = requests.get(url)
data = res.json()
print "'%s': '%s'," % (s, url)
with open('data/%s.json' % s, 'w') as f:
json.dump(data, f)
except:
print '\tError'
if ' ' in state:
s = ''.join([s_[0] for s_ in state.split()]).lower()
url = 'http://data.%s.gov/data.json' % s
print url
try:
res = requests.get(url)
data = res.json()
print "'%s': '%s'," % (s, url)
with open('data/%s.json' % s, 'w') as f:
json.dump(data, f)
except:
print '\tError'
pass |
nino-c/plerp.org | refs/heads/master | src/mainsite/wsgi.py | 1 | """
WSGI config for mainsite project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", " mainsite.settings.production")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Wrap werkzeug debugger if DEBUG is on
from django.conf import settings
if settings.DEBUG:
try:
import django.views.debug
import six
from werkzeug.debug import DebuggedApplication
def null_technical_500_response(request, exc_type, exc_value, tb):
six.reraise(exc_type, exc_value, tb)
django.views.debug.technical_500_response = null_technical_500_response
application = DebuggedApplication(application, evalex=True)
except ImportError:
pass
|
imrandomizer/Algorithm-Implementations | refs/heads/master | Bloom_Filter/Python/argvk/BloomFilter.py | 20 | """
Bloomfilters are a memory efficient way of checking if an element is part of a set or not
here is a pretty decent tutorial http://billmill.org/bloomfilter-tutorial/
"""
from bitarray import bitarray
import mmh3
class BloomFilter:
def __init__(self):
self.HASH_SIZE = 1000
self.bits = bitarray(self.HASH_SIZE)
self.bits.setall(0)
def add(self, word):
for seed in range(self.HASH_SIZE):
bit_value = mmh3.hash(word, seed) % self.HASH_SIZE
self.bits[bit_value] = 1
def check(self, word):
for seed in range(self.HASH_SIZE):
bit_value = mmh3.hash(word, seed) % self.HASH_SIZE
if self.bits[bit_value] == 0:
return False
return True
|
tellesnobrega/sahara | refs/heads/master | sahara/service/coordinator.py | 2 | # Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import bisect
import hashlib
from oslo_config import cfg
from oslo_log import log
from oslo_utils import uuidutils
from tooz import coordination
from sahara.i18n import _LE
from sahara.i18n import _LI
LOG = log.getLogger(__name__)
coordinator_opts = [
cfg.IntOpt('coordinator_heartbeat_interval',
default=1,
help='Interval size between heartbeat execution in seconds. '
'Heartbeats are executed to make sure that connection to '
'the coordination server is active.'),
cfg.IntOpt('hash_ring_replicas_count',
default=40,
help='Number of points that belongs to each member on a hash '
'ring. The larger number leads to a better distribution.')
]
CONF = cfg.CONF
CONF.register_opts(coordinator_opts)
class Coordinator(object):
def __init__(self, backend_url):
self.coordinator = None
self.member_id = uuidutils.generate_uuid()
if backend_url:
try:
self.coordinator = coordination.get_coordinator(
backend_url, self.member_id)
self.coordinator.start()
LOG.info(_LI('Coordination backend loaded successfully.'))
except coordination.ToozError:
LOG.error(_LE('Error connecting to coordination backend.'))
raise
def is_started(self):
if self.coordinator:
return self.coordinator.is_started
return False
def heartbeat(self):
if self.coordinator:
self.coordinator.heartbeat()
def join_group(self, group_id):
if self.coordinator:
try:
self.coordinator.join_group(group_id).get()
except coordination.GroupNotCreated:
try:
self.coordinator.create_group(group_id).get()
except coordination.GroupAlreadyExist:
pass
self.coordinator.join_group(group_id).get()
def get_members(self, group_id):
if self.coordinator:
for i in range(2):
try:
members = self.coordinator.get_members(group_id).get()
if self.member_id in members:
return members
self.join_group(group_id)
except coordination.GroupNotCreated:
self.join_group(group_id)
except coordination.ToozError as e:
LOG.error(_LE("Couldn't get members of {group} group. "
"Reason: {ex}").format(
group=group_id, ex=str(e)))
return []
class HashRing(Coordinator):
def __init__(self, backend_url, group_id):
self.group_id = group_id
self.replicas = CONF.hash_ring_replicas_count
super(HashRing, self).__init__(backend_url)
self.join_group(group_id)
@staticmethod
def _hash(key):
return int(hashlib.md5(str(key)).hexdigest(), 16) # nosec
def _build_ring(self):
ring = {}
members = self.get_members(self.group_id)
for member in members:
for r in range(self.replicas):
hashed_key = self._hash('%s:%s' % (member, r))
ring[hashed_key] = member
return ring, sorted(ring.keys())
def _check_object(self, object, ring, sorted_keys):
"""Checks if this object belongs to this member or not"""
hashed_key = self._hash(object.id)
position = bisect.bisect(sorted_keys, hashed_key)
position = position if position < len(sorted_keys) else 0
return ring[sorted_keys[position]] == self.member_id
def get_subset(self, objects):
"""Returns subset that belongs to this member"""
if self.coordinator:
ring, keys = self._build_ring()
if ring:
return [obj for obj in objects if self._check_object(
obj, ring, keys)]
return []
return objects
|
Darknet-Crypto/Darknet | refs/heads/master | test/functional/wallet_hd.py | 1 | #!/usr/bin/env python3
# Copyright (c) 2016-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Hierarchical Deterministic wallet function."""
import os
import shutil
from test_framework.test_framework import PivxTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
assert_raises_rpc_error
)
class WalletHDTest(PivxTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [[], ['-keypool=0']]
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Make sure we use hd
if '-legacywallet' in self.nodes[0].extra_args:
self.log.info("Exiting HD test for non-HD wallets")
return
# keep masterkeyid
masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert_equal(len(masterkeyid), 40)
# create an internal key
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].getaddressinfo(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/44'/119'/0'/1'/0'") #first internal child key
# Import a non-HD private key in the HD wallet
non_hd_add = self.nodes[0].getnewaddress()
self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(os.path.join(self.nodes[1].datadir, "hd.bak"))
#self.nodes[1].dumpwallet(os.path.join(self.nodes[1].datadir, "hd.dump"))
# Derive some HD addresses and remember the last
# Also send funds to each add
self.nodes[0].generate(101)
hd_add = None
NUM_HD_ADDS = 10
for i in range(NUM_HD_ADDS):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].getaddressinfo(hd_add)
assert_equal(hd_info["hdkeypath"], "m/44'/119'/0'/0'/"+str(i)+"'")
assert_equal(hd_info["hdseedid"], masterkeyid)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
self.nodes[0].generate(1)
# create an internal key (again)
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].getaddressinfo(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/44'/119'/0'/1'/1'") #second internal child key
self.sync_all()
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
self.log.info("Restore backup ...")
self.stop_node(1)
# we need to delete the complete regtest directory
# otherwise node1 would auto-recover all funds in flag the keypool keys as used
shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "blocks"))
shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "chainstate"))
shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, "regtest", "wallet.dat"))
self.start_node(1)
# Assert that derivation is deterministic
hd_add_2 = None
for i in range(NUM_HD_ADDS):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].getaddressinfo(hd_add_2)
assert_equal(hd_info_2["hdkeypath"], "m/44'/119'/0'/0'/"+str(i)+"'")
assert_equal(hd_info_2["hdseedid"], masterkeyid)
assert_equal(hd_add, hd_add_2)
connect_nodes(self.nodes[0], 1)
self.sync_all()
# Needs rescan
self.stop_node(1)
self.start_node(1, extra_args=self.extra_args[1] + ['-rescan'])
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
# Try a RPC based rescan
self.stop_node(1)
shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "blocks"))
shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "chainstate"))
shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, "regtest", "wallet.dat"))
self.start_node(1, extra_args=self.extra_args[1])
connect_nodes(self.nodes[0], 1)
self.sync_all()
# Wallet automatically scans blocks older than key on startup
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
""" todo: Implement rescanblockchain
out = self.nodes[1].rescanblockchain(0, 1)
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], 1)
out = self.nodes[1].rescanblockchain()
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], self.nodes[1].getblockcount())
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
"""
# send a tx and make sure its using the internal chain for the changeoutput
txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
outs = self.nodes[1].decoderawtransaction(self.nodes[1].gettransaction(txid)['hex'])['vout']
keypath = ""
for out in outs:
if out['value'] != 1:
keypath = self.nodes[1].getaddressinfo(out['scriptPubKey']['addresses'][0])['hdkeypath']
assert_equal(keypath[0:16], "m/44'/119'/0'/1'")
# Generate a new HD seed on node 1 and make sure it is set
orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
self.nodes[1].sethdseed()
new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert orig_masterkeyid != new_masterkeyid
addr = self.nodes[1].getnewaddress()
assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/44\'/119\'/0\'/0\'/0\'') # Make sure the new address is the first from the keypool
self.nodes[1].keypoolrefill(1) # Fill keypool with 1 key
# Set a new HD seed on node 1 without flushing the keypool
new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
orig_masterkeyid = new_masterkeyid
self.nodes[1].sethdseed(False, new_seed)
new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert orig_masterkeyid != new_masterkeyid
addr = self.nodes[1].getnewaddress()
assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo(addr)['hdseedid'])
assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/44\'/119\'/0\'/0\'/1\'') # Make sure the new address continues previous keypool
# Check that the next address is from the new seed
self.nodes[1].keypoolrefill(1)
next_addr = self.nodes[1].getnewaddress()
assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo(next_addr)['hdseedid'])
assert_equal(self.nodes[1].getaddressinfo(next_addr)['hdkeypath'], 'm/44\'/119\'/0\'/0\'/0\'') # Make sure the new address is not from previous keypool
assert next_addr != addr
# Sethdseed parameter validity
assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed, False, new_seed, 0)
assert_raises_rpc_error(-5, "Invalid private key", self.nodes[1].sethdseed, False, "not_wif")
assert_raises_rpc_error(-1, "JSON value is not a boolean as expected", self.nodes[1].sethdseed, "Not_bool")
assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[1].sethdseed, False, True)
assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, new_seed)
assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress()))
if __name__ == '__main__':
WalletHDTest().main ()
|
Jannes123/inasafe | refs/heads/develop | safe/impact_functions/generic/classified_polygon_population/parameter_definitions.py | 6 | # coding=utf-8
"""InaSAFE Disaster risk tool by Australian Aid - Parameter definition for
Volcano Polygon on Population Impact Function.
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
from safe_extras.parameters.boolean_parameter import BooleanParameter
from safe_extras.parameters.string_parameter import StringParameter
def target_field():
"""Generator for the flooded target field parameter."""
field = StringParameter()
field.name = 'Target Field'
field.is_required = True
field.help_text = (
'This field of impact layer marks inundated roads by \'1\' value')
field.description = (
'This field of impact layer marks inundated roads by \'1\' value. '
'This is the longer description of this parameter.')
field.value = 'INUNDATED' # default value
return field
def affected_field():
""""Generator for selection of affected field parameter."""
field = StringParameter()
field.name = 'Affected Field'
field.is_required = True
field.help_text = (
'This field of the hazard layer contains information about inundated '
'areas')
field.description = (
'This field of the hazard layer contains information about inundated '
'areas. This is the longer description of this parameter.')
field.value = 'affected' # default value
return field
def affected_value():
"""Generator for parameter stating what values constitute 'affected'."""
field = StringParameter()
field.name = 'Affected Value'
field.is_required = True
field.help_text = (
'This value in \'affected_field\' of the hazard layer marks the areas '
'as inundated')
field.description = (
'This value in \'affected_field\' of the hazard layer marks the areas '
'as inundated. This is the longer description of this parameter.')
field.value = '1' # default value
return field
def building_type_field():
field = BooleanParameter()
field.name = 'Building Type Field'
field.is_required = True
field.value = True
return field
|
fingeronthebutton/robotframework | refs/heads/master | atest/testdata/test_libraries/MyLibDir/SubPackage/ClassLib.py | 37 | class ClassLib(object):
def keyword_in_mylibdir_subpackage_classlib(self):
pass
|
NBor/SkyPython | refs/heads/master | src/touch/MapMover.py | 1 | '''
// Copyright 2010 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Original Author: John Taylor
//
// Notification of Change: The original java source code has been
// modified in that it has been rewritten in the python programming
// language and additionally, may contain components and ideas that are
// not found in the original source code.
Copyright 2013 Neil Borle and Paul Lu
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Created on 2013-08-19
@author: Neil Borle
'''
from src.utils.Geometry import radians_to_degrees
class MapMover(object):
'''
Responsible for updating the model when dragging,
zooming or rotation occurs.
'''
def on_drag(self, x_pixels, y_pixels):
pixels_to_rads = self.model.field_of_view / self.size_times_rads_to_degs
self.control_group.change_up_down(-y_pixels * pixels_to_rads)
self.control_group.change_right_left(-x_pixels * pixels_to_rads)
return True
def on_rotate(self, degrees):
if self.allow_rotation:
self.control_group.rotate(-degrees)
return True
else:
return False
def on_stretch(self, ratio):
self.control_group.zoom_by(1.0/ratio)
return True
def on_shared_preference_change(self, prefs):
self.allow_rotation = prefs.ALLOW_ROTATION
def __init__(self, model, controller_group, shared_prefs, screen_height):
'''
Constructor
'''
self.model = model
self.control_group = controller_group
self.shared_prefs = shared_prefs
self.size_times_rads_to_degs = radians_to_degrees(screen_height)
self.allow_rotation = shared_prefs.ALLOW_ROTATION
|
CARocha/ciat_plataforma | refs/heads/master | analisis/analisis/utils.py | 3 | # -*- coding: UTF-8 -*-
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.http import HttpResponse
import json as simplejson
import os
import re
p = re.compile(r'[^0-9a-zA-Z\._]+')
#metodo para reemplazar los caracteres especiales en una cadena
def repl(match):
chars = {u'á': u'a', u'Á':u'A', u'é':u'e', u'É':u'E', u'í': u'i', u'Í':u'I', u'ó':u'o', u'Ó':'O', u'ú':u'u', u'Ú':'U', u'ñ':u'n', u'ü':u'u',}
a = ''
for i in match.group():
if i in chars:
a = a + chars[i]
else:
a = a + '_'
return a
def get_file_path(instance, filename):
ext = filename.split('.')[-1]
nombre = p.sub(repl, filename.split('.')[-2])
filename = "%s.%s" % (nombre, ext)
return os.path.join(instance.fileDir, filename)
def get_image_path(instance, filename):
ext = filename.split('.')[-1]
nombre = p.sub(repl, filename.split('.')[-2])
filename = "%s.%s" % (nombre, ext)
return os.path.join(instance.imgDir, filename)
def save_as_xls(request):
tabla = request.POST['tabla']
response = render_to_response('analisis/xls.html', {'tabla': tabla, })
response['Content-Disposition'] = 'attachment; filename=tabla.xls'
response['Content-Type'] = 'application/vnd.ms-excel'
response['Charset'] ='UTF-8'
return response
|
ovresko/erpnext | refs/heads/master | erpnext/patches/v11_0/add_index_on_nestedset_doctypes.py | 12 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("assets", "doctype", "Location")
for dt in ("Account", "Cost Center", "File", "Employee", "Location", "Task", "Customer Group", "Sales Person", "Territory"):
frappe.reload_doctype(dt)
frappe.get_doc("DocType", dt).run_module_method("on_doctype_update") |
Hearen/OnceServer | refs/heads/master | pool_management/bn-xend-core/util/xsm/__init__.py | 45382 | |
JamesTFarrington/flask | refs/heads/master | scripts/test_import_migration.py | 141 | # Tester for the flaskext_migrate.py module located in flask/scripts/
#
# Author: Keyan Pishdadian
import pytest
from redbaron import RedBaron
import flaskext_migrate as migrate
def test_simple_from_import():
red = RedBaron("from flask.ext import foo")
output = migrate.fix_tester(red)
assert output == "import flask_foo as foo"
def test_from_to_from_import():
red = RedBaron("from flask.ext.foo import bar")
output = migrate.fix_tester(red)
assert output == "from flask_foo import bar as bar"
def test_multiple_import():
red = RedBaron("from flask.ext.foo import bar, foobar, something")
output = migrate.fix_tester(red)
assert output == "from flask_foo import bar, foobar, something"
def test_multiline_import():
red = RedBaron("from flask.ext.foo import \
bar,\
foobar,\
something")
output = migrate.fix_tester(red)
assert output == "from flask_foo import bar, foobar, something"
def test_module_import():
red = RedBaron("import flask.ext.foo")
output = migrate.fix_tester(red)
assert output == "import flask_foo"
def test_named_module_import():
red = RedBaron("import flask.ext.foo as foobar")
output = migrate.fix_tester(red)
assert output == "import flask_foo as foobar"
def test_named_from_import():
red = RedBaron("from flask.ext.foo import bar as baz")
output = migrate.fix_tester(red)
assert output == "from flask_foo import bar as baz"
def test_parens_import():
red = RedBaron("from flask.ext.foo import (bar, foo, foobar)")
output = migrate.fix_tester(red)
assert output == "from flask_foo import (bar, foo, foobar)"
def test_function_call_migration():
red = RedBaron("flask.ext.foo(var)")
output = migrate.fix_tester(red)
assert output == "flask_foo(var)"
def test_nested_function_call_migration():
red = RedBaron("import flask.ext.foo\n\n"
"flask.ext.foo.bar(var)")
output = migrate.fix_tester(red)
assert output == ("import flask_foo\n\n"
"flask_foo.bar(var)")
def test_no_change_to_import():
red = RedBaron("from flask import Flask")
output = migrate.fix_tester(red)
assert output == "from flask import Flask"
|
xpac1985/ansible | refs/heads/devel | test/units/errors/__init__.py | 267 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
|
chanceraine/nupic | refs/heads/master | examples/opf/experiments/anomaly/spatial/10field_many_balanced/description.py | 96 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
This description.py file was generated by:
'~/nupic/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer,
DeferredDictLookup)
from nupic.frameworks.opf.clamodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
# Model Configuration Dictionary:
#
# Define the model parameters and adjust for any modifications if imported
# from a sub-experiment.
#
# These fields might be modified by a sub-experiment; this dict is passed
# between the sub-experiment and base experiment
#
#
# NOTE: Use of DEFERRED VALUE-GETTERs: dictionary fields and list elements
# within the config dictionary may be assigned futures derived from the
# ValueGetterBase class, such as DeferredDictLookup.
# This facility is particularly handy for enabling substitution of values in
# the config dictionary from other values in the config dictionary, which is
# needed by permutation.py-based experiments. These values will be resolved
# during the call to applyValueGettersToContainer(),
# which we call after the base experiment's config dictionary is updated from
# the sub-experiment. See ValueGetterBase and
# DeferredDictLookup for more details about value-getters.
#
# For each custom encoder parameter to be exposed to the sub-experiment/
# permutation overrides, define a variable in this section, using key names
# beginning with a single underscore character to avoid collisions with
# pre-defined keys (e.g., _dsEncoderFieldName2_N).
#
# Example:
# config = dict(
# _dsEncoderFieldName2_N = 70,
# _dsEncoderFieldName2_W = 5,
# dsEncoderSchema = [
# base=dict(
# fieldname='Name2', type='ScalarEncoder',
# name='Name2', minval=0, maxval=270, clipInput=True,
# n=DeferredDictLookup('_dsEncoderFieldName2_N'),
# w=DeferredDictLookup('_dsEncoderFieldName2_W')),
# ],
# )
# updateConfigFromSubConfig(config)
# applyValueGettersToContainer(config)
config = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': { 'fields': [ ('numericFieldNameA', 'mean'),
('numericFieldNameB', 'sum'),
('categoryFieldNameC', 'first')],
'hours': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'NontemporalAnomaly',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Example:
# dsEncoderSchema = [
# DeferredDictLookup('__field_name_encoder'),
# ],
#
# (value generated from DS_ENCODER_SCHEMA)
'encoders': {
'f0': dict(fieldname='f0', n=100, name='f0', type='SDRCategoryEncoder', w=21),
'f1': dict(fieldname='f1', n=100, name='f1', type='SDRCategoryEncoder', w=21),
'f2': dict(fieldname='f2', n=100, name='f2', type='SDRCategoryEncoder', w=21),
'f3': dict(fieldname='f3', n=100, name='f3', type='SDRCategoryEncoder', w=21),
'f4': dict(fieldname='f4', n=100, name='f4', type='SDRCategoryEncoder', w=21),
'f5': dict(fieldname='f5', n=100, name='f5', type='SDRCategoryEncoder', w=21),
'f6': dict(fieldname='f6', n=100, name='f6', type='SDRCategoryEncoder', w=21),
'f7': dict(fieldname='f7', n=100, name='f7', type='SDRCategoryEncoder', w=21),
'f8': dict(fieldname='f8', n=100, name='f8', type='SDRCategoryEncoder', w=21),
'f9': dict(fieldname='f9', n=100, name='f9', type='SDRCategoryEncoder', w=21),
},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': True,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActiveColumnsPerInhArea': 40,
'seed': 1956,
# potentialPct
# What percent of the columns's receptive field is available
# for potential synapses. At initialization time, we will
# choose potentialPct * (2*potentialRadius+1)^2
'potentialPct': 0.5,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.1,
'synPermInactiveDec': 0.01,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tpEnable' : True,
'tpParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nupic/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 20,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
# Classifier implementation selection.
'implementation': 'cpp',
'regionName' : 'CLAClassifierRegion',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'clVerbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.001,
# This is set after the call to updateConfigFromSubConfig and is
# computed from the aggregationInfo and predictAheadTime.
'steps': '1',
},
'trainSPNetOnlyIfRequested': False,
},
}
# end of config dictionary
# Adjust base config dictionary for any modifications if imported from a
# sub-experiment
updateConfigFromSubConfig(config)
# Compute predictionSteps based on the predictAheadTime and the aggregation
# period, which may be permuted over.
if config['predictAheadTime'] is not None:
predictionSteps = int(round(aggregationDivide(
config['predictAheadTime'], config['aggregationInfo'])))
assert (predictionSteps >= 1)
config['modelParams']['clParams']['steps'] = str(predictionSteps)
# Adjust config by applying ValueGetterBase-derived
# futures. NOTE: this MUST be called after updateConfigFromSubConfig() in order
# to support value-getter-based substitutions from the sub-experiment (if any)
applyValueGettersToContainer(config)
# [optional] A sequence of one or more tasks that describe what to do with the
# model. Each task consists of a task label, an input spec., iteration count,
# and a task-control spec per opfTaskSchema.json
#
# NOTE: The tasks are intended for OPF clients that make use of OPFTaskDriver.
# Clients that interact with OPFExperiment directly do not make use of
# the tasks specification.
#
control = dict(
environment='opfExperiment',
tasks = [
{
# Task label; this label string may be used for diagnostic logging and for
# constructing filenames or directory pathnames for task-specific files, etc.
'taskLabel' : "Anomaly",
# Input stream specification per py/nupic/cluster/database/StreamDef.json.
#
'dataset' : {
'info': 'test_NoProviders',
'version': 1,
'streams': [
{
'columns': ['*'],
'info': 'my simple dataset',
'source': 'file://'+os.path.join(os.path.dirname(__file__), 'data.csv'),
}
],
# TODO: Aggregation is not supported yet by run_opf_experiment.py
#'aggregation' : config['aggregationInfo']
},
# Iteration count: maximum number of iterations. Each iteration corresponds
# to one record from the (possibly aggregated) dataset. The task is
# terminated when either number of iterations reaches iterationCount or
# all records in the (possibly aggregated) database have been processed,
# whichever occurs first.
#
# iterationCount of -1 = iterate over the entire dataset
'iterationCount' : -1,
# Task Control parameters for OPFTaskDriver (per opfTaskControlSchema.json)
'taskControl' : {
# Iteration cycle list consisting of opftaskdriver.IterationPhaseSpecXXXXX
# instances.
'iterationCycle' : [
#IterationPhaseSpecLearnOnly(1000),
IterationPhaseSpecLearnAndInfer(1000, inferenceArgs=None),
#IterationPhaseSpecInferOnly(10, inferenceArgs=None),
],
'metrics' : [
],
# Logged Metrics: A sequence of regular expressions that specify which of
# the metrics from the Inference Specifications section MUST be logged for
# every prediction. The regex's correspond to the automatically generated
# metric labels. This is similar to the way the optimization metric is
# specified in permutations.py.
'loggedMetrics': ['.*nupicScore.*'],
# Callbacks for experimentation/research (optional)
'callbacks' : {
# Callbacks to be called at the beginning of a task, before model iterations.
# Signature: callback(<reference to OPFExperiment>); returns nothing
# 'setup' : [claModelControlEnableSPLearningCb, claModelControlEnableTPLearningCb],
# 'setup' : [claModelControlDisableTPLearningCb],
'setup' : [],
# Callbacks to be called after every learning/inference iteration
# Signature: callback(<reference to OPFExperiment>); returns nothing
'postIter' : [],
# Callbacks to be called when the experiment task is finished
# Signature: callback(<reference to OPFExperiment>); returns nothing
'finish' : []
}
} # End of taskControl
}, # End of task
]
)
descriptionInterface = ExperimentDescriptionAPI(modelConfig=config,
control=control)
|
yeewang/pjsip-2.4 | refs/heads/master | pjsip-apps/src/pygui/endpoint.py | 28 | # $Id: endpoint.py 4704 2014-01-16 05:30:46Z ming $
#
# pjsua Python GUI Demo
#
# Copyright (C)2013 Teluu Inc. (http://www.teluu.com)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import sys
if sys.version_info[0] >= 3: # Python 3
import tkinter as tk
from tkinter import ttk
from tkinter import messagebox as msgbox
else:
import Tkinter as tk
import tkMessageBox as msgbox
import ttk
import pjsua2 as pj
import application
class Endpoint(pj.Endpoint):
"""
This is high level Python object inherited from pj.Endpoint
"""
instance = None
def __init__(self):
pj.Endpoint.__init__(self)
Endpoint.instance = self
def validateUri(uri):
return Endpoint.instance.utilVerifyUri(uri) == pj.PJ_SUCCESS
def validateSipUri(uri):
return Endpoint.instance.utilVerifySipUri(uri) == pj.PJ_SUCCESS
if __name__ == '__main__':
application.main()
|
rshk/pygpgme | refs/heads/master | tests/test_genkey.py | 2 | # pygpgme - a Python wrapper for the gpgme library
# Copyright (C) 2006 James Henstridge
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import unittest
try:
from io import BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
import gpgme
from tests.util import GpgHomeTestCase
# See /usr/share/doc/gnupg/DETAILS.gz
# XXX we are using a passwordless key because the passphrase_cb
# backend seems to be currently broken.
signing_only_param = """
<GnupgKeyParms format="internal">
Key-Type: RSA
Key-Usage: sign
Key-Length: 1024
Name-Real: Testing
Name-Comment: comment
Name-Email: someone@example.com
Expire-Date: 0
</GnupgKeyParms>
"""
class GenerateKeyTestCase(GpgHomeTestCase):
def assertCanSign(self, key):
"""Check that the given key can be used to create signatures."""
ctx = gpgme.Context()
ctx.signers = [key]
plaintext = BytesIO(b'Hello World\n')
signature = BytesIO()
ctx.armor = True
new_sigs = ctx.sign(
plaintext, signature, gpgme.SIG_MODE_DETACH)
signature.seek(0)
plaintext.seek(0)
sigs = ctx.verify(signature, plaintext, None)
self.assertEqual(len(sigs), 1)
self.assertEqual(sigs[0].fpr, key.subkeys[0].fpr)
def _test_generate_signing_only_keys(self):
ctx = gpgme.Context()
result = ctx.genkey(signing_only_param)
self.assertEqual(result.primary, True)
self.assertEqual(result.sub, False)
self.assertEqual(len(result.fpr), 40)
# The generated key is part of the current keyring.
key = ctx.get_key(result.fpr, True)
self.assertEqual(key.revoked, False)
self.assertEqual(key.expired, False)
self.assertEqual(key.secret, True)
self.assertEqual(key.protocol, gpgme.PROTOCOL_OpenPGP)
# Single signing-only RSA key.
self.assertEqual(len(key.subkeys), 1)
subkey = key.subkeys[0]
self.assertEqual(subkey.secret, True)
self.assertEqual(subkey.pubkey_algo, gpgme.PK_RSA)
self.assertEqual(subkey.length, 1024)
self.assertEqual(key.can_sign, True)
self.assertEqual(key.can_encrypt, False)
# The only UID available matches the given parameters.
[uid] = key.uids
self.assertEqual(uid.name, 'Testing')
self.assertEqual(uid.comment, 'comment')
self.assertEqual(uid.email, 'someone@example.com')
# Finally check if the generated key can perform signatures.
self.assertCanSign(key)
def test_invalid_parameters(self):
ctx = gpgme.Context()
try:
ctx.genkey('garbage parameters')
except gpgme.GpgmeError as exc:
self.assertTrue(hasattr(exc, "result"))
result = exc.result
self.assertEqual(result.primary, False)
self.assertEqual(result.sub, False)
self.assertEqual(result.fpr, None)
else:
self.fail("GpgmeError not raised")
def test_suite():
loader = unittest.TestLoader()
return loader.loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main()
|
imsparsh/python-for-android | refs/heads/master | python3-alpha/python3-src/Tools/msi/sequence.py | 291 | AdminExecuteSequence = [
(u'InstallInitialize', None, 1500),
(u'InstallFinalize', None, 6600),
(u'InstallFiles', None, 4000),
(u'InstallAdminPackage', None, 3900),
(u'FileCost', None, 900),
(u'CostInitialize', None, 800),
(u'CostFinalize', None, 1000),
(u'InstallValidate', None, 1400),
]
AdminUISequence = [
(u'FileCost', None, 900),
(u'CostInitialize', None, 800),
(u'CostFinalize', None, 1000),
(u'ExecuteAction', None, 1300),
(u'ExitDialog', None, -1),
(u'FatalError', None, -3),
(u'UserExit', None, -2),
]
AdvtExecuteSequence = [
(u'InstallInitialize', None, 1500),
(u'InstallFinalize', None, 6600),
(u'CostInitialize', None, 800),
(u'CostFinalize', None, 1000),
(u'InstallValidate', None, 1400),
(u'CreateShortcuts', None, 4500),
(u'MsiPublishAssemblies', None, 6250),
(u'PublishComponents', None, 6200),
(u'PublishFeatures', None, 6300),
(u'PublishProduct', None, 6400),
(u'RegisterClassInfo', None, 4600),
(u'RegisterExtensionInfo', None, 4700),
(u'RegisterMIMEInfo', None, 4900),
(u'RegisterProgIdInfo', None, 4800),
]
InstallExecuteSequence = [
(u'InstallInitialize', None, 1500),
(u'InstallFinalize', None, 6600),
(u'InstallFiles', None, 4000),
(u'FileCost', None, 900),
(u'CostInitialize', None, 800),
(u'CostFinalize', None, 1000),
(u'InstallValidate', None, 1400),
(u'CreateShortcuts', None, 4500),
(u'MsiPublishAssemblies', None, 6250),
(u'PublishComponents', None, 6200),
(u'PublishFeatures', None, 6300),
(u'PublishProduct', None, 6400),
(u'RegisterClassInfo', None, 4600),
(u'RegisterExtensionInfo', None, 4700),
(u'RegisterMIMEInfo', None, 4900),
(u'RegisterProgIdInfo', None, 4800),
(u'AllocateRegistrySpace', u'NOT Installed', 1550),
(u'AppSearch', None, 400),
(u'BindImage', None, 4300),
(u'CCPSearch', u'NOT Installed', 500),
(u'CreateFolders', None, 3700),
(u'DeleteServices', u'VersionNT', 2000),
(u'DuplicateFiles', None, 4210),
(u'FindRelatedProducts', None, 200),
(u'InstallODBC', None, 5400),
(u'InstallServices', u'VersionNT', 5800),
(u'IsolateComponents', None, 950),
(u'LaunchConditions', None, 100),
(u'MigrateFeatureStates', None, 1200),
(u'MoveFiles', None, 3800),
(u'PatchFiles', None, 4090),
(u'ProcessComponents', None, 1600),
(u'RegisterComPlus', None, 5700),
(u'RegisterFonts', None, 5300),
(u'RegisterProduct', None, 6100),
(u'RegisterTypeLibraries', None, 5500),
(u'RegisterUser', None, 6000),
(u'RemoveDuplicateFiles', None, 3400),
(u'RemoveEnvironmentStrings', None, 3300),
(u'RemoveExistingProducts', None, 6700),
(u'RemoveFiles', None, 3500),
(u'RemoveFolders', None, 3600),
(u'RemoveIniValues', None, 3100),
(u'RemoveODBC', None, 2400),
(u'RemoveRegistryValues', None, 2600),
(u'RemoveShortcuts', None, 3200),
(u'RMCCPSearch', u'NOT Installed', 600),
(u'SelfRegModules', None, 5600),
(u'SelfUnregModules', None, 2200),
(u'SetODBCFolders', None, 1100),
(u'StartServices', u'VersionNT', 5900),
(u'StopServices', u'VersionNT', 1900),
(u'MsiUnpublishAssemblies', None, 1750),
(u'UnpublishComponents', None, 1700),
(u'UnpublishFeatures', None, 1800),
(u'UnregisterClassInfo', None, 2700),
(u'UnregisterComPlus', None, 2100),
(u'UnregisterExtensionInfo', None, 2800),
(u'UnregisterFonts', None, 2500),
(u'UnregisterMIMEInfo', None, 3000),
(u'UnregisterProgIdInfo', None, 2900),
(u'UnregisterTypeLibraries', None, 2300),
(u'ValidateProductID', None, 700),
(u'WriteEnvironmentStrings', None, 5200),
(u'WriteIniValues', None, 5100),
(u'WriteRegistryValues', None, 5000),
]
InstallUISequence = [
(u'FileCost', None, 900),
(u'CostInitialize', None, 800),
(u'CostFinalize', None, 1000),
(u'ExecuteAction', None, 1300),
(u'ExitDialog', None, -1),
(u'FatalError', None, -3),
(u'UserExit', None, -2),
(u'AppSearch', None, 400),
(u'CCPSearch', u'NOT Installed', 500),
(u'FindRelatedProducts', None, 200),
(u'IsolateComponents', None, 950),
(u'LaunchConditions', None, 100),
(u'MigrateFeatureStates', None, 1200),
(u'RMCCPSearch', u'NOT Installed', 600),
(u'ValidateProductID', None, 700),
]
tables=['AdminExecuteSequence', 'AdminUISequence', 'AdvtExecuteSequence', 'InstallExecuteSequence', 'InstallUISequence']
|
JackKelly/neuralnilm_prototype | refs/heads/master | scripts/disag_545d.py | 2 | from __future__ import print_function, division
#import matplotlib
import logging
from sys import stdout
# matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
import matplotlib.pyplot as plt
from neuralnilm import (Net, RealApplianceSource)
from neuralnilm.source import (standardise, discretize, fdiff, power_and_fdiff,
RandomSegments, RandomSegmentsInMemory,
SameLocation)
from neuralnilm.experiment import run_experiment, init_experiment
from neuralnilm.net import TrainingError
from neuralnilm.layers import (MixtureDensityLayer, DeConv1DLayer,
SharedWeightsDenseLayer)
from neuralnilm.objectives import (scaled_cost, mdn_nll,
scaled_cost_ignore_inactive, ignore_inactive,
scaled_cost3)
from neuralnilm.plot import (
StartEndMeanPlotter, plot_disaggregate_start_stop_end)
from neuralnilm.disaggregate import (
disaggregate_start_stop_end, rectangles_to_matrix,
rectangles_matrix_to_vector, save_rectangles)
from neuralnilm.rectangulariser import rectangularise
from lasagne.nonlinearities import sigmoid, rectify, tanh, identity, softmax
from lasagne.objectives import squared_error, binary_crossentropy
from lasagne.init import Uniform, Normal
from lasagne.layers import (DenseLayer, Conv1DLayer,
ReshapeLayer, FeaturePoolLayer,
DimshuffleLayer, DropoutLayer, ConcatLayer, PadLayer)
from lasagne.updates import nesterov_momentum, momentum
from functools import partial
import os
import __main__
from copy import deepcopy
from math import sqrt
import numpy as np
import theano.tensor as T
import gc
NAME = 'e545'
PATH = "/data/dk3810/figures"
SAVE_PLOT_INTERVAL = 25000
N_SEQ_PER_BATCH = 64
MAX_TARGET_POWER = 2500
full_exp_name = NAME + 'd'
path = os.path.join(PATH, full_exp_name)
print("Changing directory to", path)
os.chdir(path)
logger = logging.getLogger(full_exp_name)
if not logger.handlers:
fh = logging.FileHandler(full_exp_name + '.log')
formatter = logging.Formatter('%(asctime)s %(message)s')
fh.setFormatter(formatter)
logger.addHandler(fh)
logger.addHandler(logging.StreamHandler(stream=stdout))
logger.setLevel(logging.DEBUG)
logger.info("***********************************")
logger.info("Preparing " + full_exp_name + "...")
# Load input stats
input_stats = {
'mean': np.load("input_stats_mean.npy"),
'std': np.load("input_stats_std.npy")
}
source_dict = dict(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
['washer dryer', 'washing machine'],
'kettle',
'HTPC',
'dish washer'
],
max_appliance_powers=[300, 2400, 2600, 200, 2500],
on_power_thresholds=[5] * 5,
min_on_durations=[60, 1800, 30, 60, 1800],
min_off_durations=[12, 600, 1, 12, 1800],
# Just load a tiny bit of data. Won't be used.
window=("2013-04-12", "2013-04-27"),
seq_length=2048,
output_one_appliance=True,
train_buildings=[1],
validation_buildings=[1],
n_seq_per_batch=N_SEQ_PER_BATCH,
standardise_input=True,
independently_center_inputs=False,
skip_probability=0.75,
target_is_start_and_end_and_mean=True,
one_target_per_seq=False,
input_stats=input_stats
)
net_dict = dict(
save_plot_interval=SAVE_PLOT_INTERVAL,
loss_function=lambda x, t: squared_error(x, t).mean(),
updates_func=nesterov_momentum,
learning_rate=1e-3,
do_save_activations=True,
auto_reshape=False,
plotter=StartEndMeanPlotter(
n_seq_to_plot=32, max_target_power=MAX_TARGET_POWER)
)
def exp_a(name):
global source
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
logger=logging.getLogger(name)
))
source = RealApplianceSource(**source_dict_copy)
net_dict_copy = deepcopy(net_dict)
net_dict_copy.update(dict(
experiment_name=name,
source=source
))
net_dict_copy['layers_config'] = [
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1) # (batch, features, time)
},
{
'type': PadLayer,
'width': 4
},
{
'type': Conv1DLayer, # convolve over the time axis
'num_filters': 16,
'filter_size': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'valid'
},
{
'type': Conv1DLayer, # convolve over the time axis
'num_filters': 16,
'filter_size': 4,
'stride': 1,
'nonlinearity': None,
'border_mode': 'valid'
},
{
'type': DimshuffleLayer,
'pattern': (0, 2, 1), # back to (batch, time, features)
'label': 'dimshuffle3'
},
{
'type': DenseLayer,
'num_units': 512 * 16,
'nonlinearity': rectify,
'label': 'dense0'
},
{
'type': DenseLayer,
'num_units': 512 * 8,
'nonlinearity': rectify,
'label': 'dense1'
},
{
'type': DenseLayer,
'num_units': 512 * 4,
'nonlinearity': rectify,
'label': 'dense2'
},
{
'type': DenseLayer,
'num_units': 512,
'nonlinearity': rectify
},
{
'type': DenseLayer,
'num_units': 3,
'nonlinearity': None
}
]
net = Net(**net_dict_copy)
net.load_params(300000)
return net
# Load neural net
net = exp_a(full_exp_name)
net.print_net()
net.compile()
# Generate mains data
# create new source, based on net's source,
# but with 5 outputs (so each seq includes entire appliance activation,
# and to make it easier to plot every appliance),
# and long seq length,
# then make one long mains by concatenating each seq
source_dict_copy = deepcopy(source_dict)
source_dict_copy.update(dict(
logger=logger,
seq_length=2048,
border=100,
output_one_appliance=False,
input_stats=input_stats,
target_is_start_and_end_and_mean=False,
window=("2014-12-10", None)
))
mains_source = RealApplianceSource(**source_dict_copy)
mains_source.start()
N_BATCHES = 1
logger.info("Preparing synthetic mains data for {} batches.".format(N_BATCHES))
mains = None
targets = None
TARGET_I = 4
for batch_i in range(N_BATCHES):
batch = mains_source.queue.get(timeout=30)
mains_batch, targets_batch = batch.data
if mains is None:
mains = mains_batch
targets = targets_batch[:, :, TARGET_I]
else:
mains = np.concatenate((mains, mains_batch))
targets = np.concatenate((targets, targets_batch[:, :, TARGET_I]))
mains_source.stop()
# Post-process data
seq_length = net.input_shape[1]
def pad(data):
return np.pad(data, (seq_length, seq_length), mode='constant',
constant_values=(data.min().astype(float), ))
mains = pad(mains.flatten())
targets = pad(targets.flatten())
logger.info("Done preparing synthetic mains data!")
# Unstandardise for plotting
targets *= MAX_TARGET_POWER
mains_unstandardised = (mains * input_stats['std']) + input_stats['mean']
mains_unstandardised *= mains_source.max_input_power
# disag
STRIDE = 16
logger.info("Starting disag...")
rectangles = disaggregate_start_stop_end(
mains, net, stride=STRIDE, max_target_power=MAX_TARGET_POWER)
rectangles_matrix = rectangles_to_matrix(rectangles[0], MAX_TARGET_POWER)
disag_vector = rectangles_matrix_to_vector(
rectangles_matrix, min_on_power=500, overlap_threshold=0.30)
# save data to disk
logger.info("Saving data to disk...")
np.save('mains', mains_unstandardised)
np.save('targets', targets)
np.save('disag_vector', disag_vector)
save_rectangles(rectangles)
# plot
logger.info("Plotting...")
fig, axes = plt.subplots(4, 1, sharex=True)
alpha = STRIDE / seq_length
plot_disaggregate_start_stop_end(rectangles, ax=axes[0], alpha=alpha)
axes[0].set_title('Network output')
axes[1].plot(disag_vector)
axes[1].set_title("Disaggregated vector")
axes[2].plot(targets)
axes[2].set_title("Target")
axes[3].plot(mains_unstandardised)
axes[3].set_title('Network input')
axes[3].set_xlim((0, len(mains)))
plt.show()
logger.info("DONE!")
"""
Emacs variables
Local Variables:
compile-command: "cp /home/jack/workspace/python/neuralnilm/scripts/disag_545d.py /mnt/sshfs/imperial/workspace/python/neuralnilm/scripts/"
End:
"""
|
chaen/Phronesis | refs/heads/master | src/analyzerCompiler/profile.py | 1 | #!/usr/bin/python
import sys
import pstats
import os
if len(sys.argv) > 1:
arg = sys.argv[1]
else:
arg = ""
os.system("python -m cProfile -o profile.stat phronesisCompiler.py %s"%arg)
p = pstats.Stats('profile.stat')
p.strip_dirs().sort_stats(-1).print_stats()
|
shybovycha/buck | refs/heads/master | third-party/py/unittest2/unittest2/__init__.py | 155 | """
unittest2
unittest2 is a backport of the new features added to the unittest testing
framework in Python 2.7. It is tested to run on Python 2.4 - 2.6.
To use unittest2 instead of unittest simply replace ``import unittest`` with
``import unittest2``.
Copyright (c) 1999-2003 Steve Purcell
Copyright (c) 2003-2010 Python Software Foundation
This module is free software, and you may redistribute it and/or modify
it under the same terms as Python itself, so long as this copyright message
and disclaimer are retained in their original form.
IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
"""
__all__ = ['TestResult', 'TestCase', 'TestSuite',
'TextTestRunner', 'TestLoader', 'FunctionTestCase', 'main',
'defaultTestLoader', 'SkipTest', 'skip', 'skipIf', 'skipUnless',
'expectedFailure', 'TextTestResult', '__version__', 'collector']
__version__ = '0.5.1'
# Expose obsolete functions for backwards compatibility
__all__.extend(['getTestCaseNames', 'makeSuite', 'findTestCases'])
from unittest2.collector import collector
from unittest2.result import TestResult
from unittest2.case import (
TestCase, FunctionTestCase, SkipTest, skip, skipIf,
skipUnless, expectedFailure
)
from unittest2.suite import BaseTestSuite, TestSuite
from unittest2.loader import (
TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
findTestCases
)
from unittest2.main import TestProgram, main, main_
from unittest2.runner import TextTestRunner, TextTestResult
try:
from unittest2.signals import (
installHandler, registerResult, removeResult, removeHandler
)
except ImportError:
# Compatibility with platforms that don't have the signal module
pass
else:
__all__.extend(['installHandler', 'registerResult', 'removeResult',
'removeHandler'])
# deprecated
_TextTestResult = TextTestResult
__unittest = True |
PriviPK/privipk-sync-engine | refs/heads/privipk | kls/dht_node.py | 1 | from twisted.internet import reactor
import twisted.internet.threads
from entangled.node import EntangledNode
from entangled.kademlia.datastore import DictDataStore
#from entangled.kademlia.datastore import SQLiteDataStore
import sys
import hashlib
import threading
class DhtNode:
# entry_nodes: array of tuples of form (gethostbyname(addr), int(port))
def __init__(self, entry_nodes, udp_port):
self.udp_port = udp_port
self.entry_nodes = entry_nodes
self.started = threading.Semaphore(0)
def start(self, sameThread=False):
def thread(sameThread):
# create a local data store
self.dataStore = DictDataStore()
#self.dataStore = SQLiteDataStore(
# dbFile='/tmp/dbFile%s.db' % self.udp_port)
self.node = EntangledNode(udpPort=self.udp_port,
dataStore=self.dataStore)
if len(self.entry_nodes) > 0:
print "Joining network using entry nodes:",\
self.entry_nodes, type(self.entry_nodes[0][0]),\
type(self.entry_nodes[0][1])
else:
print "Starting network by myself!"
self.node.joinNetwork(self.entry_nodes)
print "Running Twisted reactor loop..."
#print "( sameThread =", sameThread, ")"
# signal a semaphore when the reactor starts
reactor.callWhenRunning(
lambda: self.started.release())
# Twisted can't install signal handlers when the reactor is
# started in a different thread
# TODO: not sure how to install those handlers then, and
# what's the side-effect of not having them installed
reactor.run(installSignalHandlers=sameThread)
if not sameThread:
import threading
t = threading.Thread(target=thread, args=(False, ))
t.start()
else:
thread(True)
def stop(self):
print "Destroying DHT node..."
# Wait for the reactor to start before trying to stop it
self.started.acquire()
# NOTE: We need to use callFromThread here to stop the reactor
# because we started it in a different thread
reactor.callFromThread(
reactor.stop)
sys.stdout.flush()
def OpAsync(self, op, key, value, execFn, errFn=None):
self._log(op, key, value, "Executing...")
hkey = hashlib.sha1(key).digest()
#print "Hashed", key, "to", hkey.encode('hex')
deferredResult = execFn(op, hkey, value)
if op == 'Get':
def successCallback(result):
if type(result) == dict and hkey in result:
self.log_success("Get", key, result[hkey], "OK")
return result[hkey]
else:
self.log_fail("Get", key, '', "Key '" + key + "' not found")
return None
successFn = successCallback
else:
successFn = lambda *args, **kwargs: self.log_success(
op, key, value, "OK")
deferredResult.addCallback(successFn)
if errFn is None:
errFn = lambda fail: self.log_fail(op, key, value,
str(fail))
deferredResult.addErrback(errFn)
return deferredResult
def PutAsync(self, key, value):
return self.OpAsync("Put", key, value, lambda o, k, v:
self.node.iterativeStore(k, v))
def GetAsync(self, key):
def errCallback(fail):
self.log_fail("Get", key, '', str(fail))
return None
return self.OpAsync("Get", key, "", lambda o, k, v:
self.node.iterativeFindValue(k),
errCallback)
def DeleteAsync(self, key):
return self.OpAsync("Delete", key, "", lambda o, k, v:
self.node.iterativeDelete(k))
def Put(self, key, value):
result = twisted.internet.threads.blockingCallFromThread(
reactor,
lambda: self.PutAsync(key, value))
return result
def Get(self, key):
result = twisted.internet.threads.blockingCallFromThread(
reactor,
lambda: self.GetAsync(key))
return result
def Delete(self, key):
result = twisted.internet.threads.blockingCallFromThread(
reactor,
lambda: self.DeleteAsync(key))
return result
def _log(self, op, key, value, message):
if op == "Put":
#print op + "(" + key + ", " + value + "): " + message
pass
elif op == "Get" or op == "Delete":
#print op + "(" + key + "): " + value + " (" + message + ")"
pass
else:
#print "Unknown op: " + op
pass
def log_success(self, op, key, value, message):
self._log(op, key, value, message)
return True
def log_fail(self, op, key, value, message):
self._log(op, key, value, "FAIL: " + message)
return False
# destructor, gets called when object is destroyed
#def __del__(self):
# self.stop()
#def parse_entry_nodes(entry_nodes):
# new_entry_nodes = []
# for node in entry_nodes:
# host = node['host']
# port = int(node['port'])
# print host, port, type(host), type(port)
# new_entry_nodes.append((host, port))
# return new_entry_nodes
|
zanderle/django | refs/heads/master | django/db/backends/base/schema.py | 339 | import hashlib
import logging
from django.db.backends.utils import truncate_name
from django.db.transaction import atomic
from django.utils import six
from django.utils.encoding import force_bytes
logger = logging.getLogger('django.db.backends.schema')
def _related_non_m2m_objects(old_field, new_field):
# Filters out m2m objects from reverse relations.
# Returns (old_relation, new_relation) tuples.
return zip(
(obj for obj in old_field.model._meta.related_objects if not obj.field.many_to_many),
(obj for obj in new_field.model._meta.related_objects if not obj.field.many_to_many)
)
class BaseDatabaseSchemaEditor(object):
"""
This class (and its subclasses) are responsible for emitting schema-changing
statements to the databases - model creation/removal/alteration, field
renaming, index fiddling, and so on.
It is intended to eventually completely replace DatabaseCreation.
This class should be used by creating an instance for each set of schema
changes (e.g. a migration file), and by first calling start(),
then the relevant actions, and then commit(). This is necessary to allow
things like circular foreign key references - FKs will only be created once
commit() is called.
"""
# Overrideable SQL templates
sql_create_table = "CREATE TABLE %(table)s (%(definition)s)"
sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s"
sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s"
sql_delete_table = "DROP TABLE %(table)s CASCADE"
sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s"
sql_alter_column = "ALTER TABLE %(table)s %(changes)s"
sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s"
sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL"
sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL"
sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s"
sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT"
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE"
sql_rename_column = "ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s"
sql_update_with_default = "UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL"
sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)"
sql_delete_check = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_create_unique = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE (%(columns)s)"
sql_delete_unique = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_create_fk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) "
"REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED"
)
sql_create_inline_fk = None
sql_delete_fk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s"
sql_delete_index = "DROP INDEX %(name)s"
sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
sql_delete_pk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
def __init__(self, connection, collect_sql=False):
self.connection = connection
self.collect_sql = collect_sql
if self.collect_sql:
self.collected_sql = []
# State-managing methods
def __enter__(self):
self.deferred_sql = []
if self.connection.features.can_rollback_ddl:
self.atomic = atomic(self.connection.alias)
self.atomic.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
for sql in self.deferred_sql:
self.execute(sql)
if self.connection.features.can_rollback_ddl:
self.atomic.__exit__(exc_type, exc_value, traceback)
# Core utility functions
def execute(self, sql, params=[]):
"""
Executes the given SQL statement, with optional parameters.
"""
# Log the command we're running, then run it
logger.debug("%s; (params %r)" % (sql, params))
if self.collect_sql:
ending = "" if sql.endswith(";") else ";"
if params is not None:
self.collected_sql.append((sql % tuple(map(self.quote_value, params))) + ending)
else:
self.collected_sql.append(sql + ending)
else:
with self.connection.cursor() as cursor:
cursor.execute(sql, params)
def quote_name(self, name):
return self.connection.ops.quote_name(name)
@classmethod
def _digest(cls, *args):
"""
Generates a 32-bit digest of a set of arguments that can be used to
shorten identifying names.
"""
h = hashlib.md5()
for arg in args:
h.update(force_bytes(arg))
return h.hexdigest()[:8]
# Field <-> database mapping functions
def column_sql(self, model, field, include_default=False):
"""
Takes a field and returns its column definition.
The field must already have had set_attributes_from_name called.
"""
# Get the column's type and use that as the basis of the SQL
db_params = field.db_parameters(connection=self.connection)
sql = db_params['type']
params = []
# Check for fields that aren't actually columns (e.g. M2M)
if sql is None:
return None, None
# Work out nullability
null = field.null
# If we were told to include a default value, do so
include_default = include_default and not self.skip_default(field)
if include_default:
default_value = self.effective_default(field)
if default_value is not None:
if self.connection.features.requires_literal_defaults:
# Some databases can't take defaults as a parameter (oracle)
# If this is the case, the individual schema backend should
# implement prepare_default
sql += " DEFAULT %s" % self.prepare_default(default_value)
else:
sql += " DEFAULT %s"
params += [default_value]
# Oracle treats the empty string ('') as null, so coerce the null
# option whenever '' is a possible value.
if (field.empty_strings_allowed and not field.primary_key and
self.connection.features.interprets_empty_strings_as_nulls):
null = True
if null and not self.connection.features.implied_column_null:
sql += " NULL"
elif not null:
sql += " NOT NULL"
# Primary key/unique outputs
if field.primary_key:
sql += " PRIMARY KEY"
elif field.unique:
sql += " UNIQUE"
# Optionally add the tablespace if it's an implicitly indexed column
tablespace = field.db_tablespace or model._meta.db_tablespace
if tablespace and self.connection.features.supports_tablespaces and field.unique:
sql += " %s" % self.connection.ops.tablespace_sql(tablespace, inline=True)
# Return the sql
return sql, params
def skip_default(self, field):
"""
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob).
"""
return False
def prepare_default(self, value):
"""
Only used for backends which have requires_literal_defaults feature
"""
raise NotImplementedError(
'subclasses of BaseDatabaseSchemaEditor for backends which have '
'requires_literal_defaults must provide a prepare_default() method'
)
def effective_default(self, field):
"""
Returns a field's effective database default value
"""
if field.has_default():
default = field.get_default()
elif not field.null and field.blank and field.empty_strings_allowed:
if field.get_internal_type() == "BinaryField":
default = six.binary_type()
else:
default = six.text_type()
else:
default = None
# If it's a callable, call it
if six.callable(default):
default = default()
# Run it through the field's get_db_prep_save method so we can send it
# to the database.
default = field.get_db_prep_save(default, self.connection)
return default
def quote_value(self, value):
"""
Returns a quoted version of the value so it's safe to use in an SQL
string. This is not safe against injection from user code; it is
intended only for use in making SQL scripts or preparing default values
for particularly tricky backends (defaults are not user-defined, though,
so this is safe).
"""
raise NotImplementedError()
# Actions
def create_model(self, model):
"""
Takes a model and creates a table for it in the database.
Will also create any accompanying indexes or unique constraints.
"""
# Create column SQL, add FK deferreds if needed
column_sqls = []
params = []
for field in model._meta.local_fields:
# SQL
definition, extra_params = self.column_sql(model, field)
if definition is None:
continue
# Check constraints can go on the column SQL here
db_params = field.db_parameters(connection=self.connection)
if db_params['check']:
definition += " CHECK (%s)" % db_params['check']
# Autoincrement SQL (for backends with inline variant)
col_type_suffix = field.db_type_suffix(connection=self.connection)
if col_type_suffix:
definition += " %s" % col_type_suffix
params.extend(extra_params)
# FK
if field.remote_field and field.db_constraint:
to_table = field.remote_field.model._meta.db_table
to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column
if self.connection.features.supports_foreign_keys:
self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s"))
elif self.sql_create_inline_fk:
definition += " " + self.sql_create_inline_fk % {
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
}
# Add the SQL to our big list
column_sqls.append("%s %s" % (
self.quote_name(field.column),
definition,
))
# Autoincrement SQL (for backends with post table definition variant)
if field.get_internal_type() == "AutoField":
autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column)
if autoinc_sql:
self.deferred_sql.extend(autoinc_sql)
# Add any unique_togethers (always deferred, as some fields might be
# created afterwards, like geometry fields with some backends)
for fields in model._meta.unique_together:
columns = [model._meta.get_field(field).column for field in fields]
self.deferred_sql.append(self._create_unique_sql(model, columns))
# Make the table
sql = self.sql_create_table % {
"table": self.quote_name(model._meta.db_table),
"definition": ", ".join(column_sqls)
}
if model._meta.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)
if tablespace_sql:
sql += ' ' + tablespace_sql
# Prevent using [] as params, in the case a literal '%' is used in the definition
self.execute(sql, params or None)
# Add any field index and index_together's (deferred as SQLite3 _remake_table needs it)
self.deferred_sql.extend(self._model_indexes_sql(model))
# Make M2M tables
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.create_model(field.remote_field.through)
def delete_model(self, model):
"""
Deletes a model from the database.
"""
# Handle auto-created intermediary models
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.delete_model(field.remote_field.through)
# Delete the table
self.execute(self.sql_delete_table % {
"table": self.quote_name(model._meta.db_table),
})
def alter_unique_together(self, model, old_unique_together, new_unique_together):
"""
Deals with a model changing its unique_together.
Note: The input unique_togethers must be doubly-nested, not the single-
nested ["foo", "bar"] format.
"""
olds = set(tuple(fields) for fields in old_unique_together)
news = set(tuple(fields) for fields in new_unique_together)
# Deleted uniques
for fields in olds.difference(news):
self._delete_composed_index(model, fields, {'unique': True}, self.sql_delete_unique)
# Created uniques
for fields in news.difference(olds):
columns = [model._meta.get_field(field).column for field in fields]
self.execute(self._create_unique_sql(model, columns))
def alter_index_together(self, model, old_index_together, new_index_together):
"""
Deals with a model changing its index_together.
Note: The input index_togethers must be doubly-nested, not the single-
nested ["foo", "bar"] format.
"""
olds = set(tuple(fields) for fields in old_index_together)
news = set(tuple(fields) for fields in new_index_together)
# Deleted indexes
for fields in olds.difference(news):
self._delete_composed_index(model, fields, {'index': True}, self.sql_delete_index)
# Created indexes
for field_names in news.difference(olds):
fields = [model._meta.get_field(field) for field in field_names]
self.execute(self._create_index_sql(model, fields, suffix="_idx"))
def _delete_composed_index(self, model, fields, constraint_kwargs, sql):
columns = [model._meta.get_field(field).column for field in fields]
constraint_names = self._constraint_names(model, columns, **constraint_kwargs)
if len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of constraints for %s(%s)" % (
len(constraint_names),
model._meta.db_table,
", ".join(columns),
))
self.execute(self._delete_constraint_sql(sql, model, constraint_names[0]))
def alter_db_table(self, model, old_db_table, new_db_table):
"""
Renames the table a model points to.
"""
if old_db_table == new_db_table:
return
self.execute(self.sql_rename_table % {
"old_table": self.quote_name(old_db_table),
"new_table": self.quote_name(new_db_table),
})
def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace):
"""
Moves a model's table between tablespaces
"""
self.execute(self.sql_retablespace_table % {
"table": self.quote_name(model._meta.db_table),
"old_tablespace": self.quote_name(old_db_tablespace),
"new_tablespace": self.quote_name(new_db_tablespace),
})
def add_field(self, model, field):
"""
Creates a field on a model.
Usually involves adding a column, but may involve adding a
table instead (for M2M fields)
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.create_model(field.remote_field.through)
# Get the column's definition
definition, params = self.column_sql(model, field, include_default=True)
# It might not actually have a column behind it
if definition is None:
return
# Check constraints can go on the column SQL here
db_params = field.db_parameters(connection=self.connection)
if db_params['check']:
definition += " CHECK (%s)" % db_params['check']
# Build the SQL and run it
sql = self.sql_create_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
"definition": definition,
}
self.execute(sql, params)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if not self.skip_default(field) and field.default is not None:
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": self.sql_alter_column_no_default % {
"column": self.quote_name(field.column),
}
}
self.execute(sql)
# Add an index, if required
if field.db_index and not field.unique:
self.deferred_sql.append(self._create_index_sql(model, [field]))
# Add any FK constraints later
if field.remote_field and self.connection.features.supports_foreign_keys and field.db_constraint:
self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s"))
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def remove_field(self, model, field):
"""
Removes a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.delete_model(field.remote_field.through)
# It might not actually have a column behind it
if field.db_parameters(connection=self.connection)['type'] is None:
return
# Drop any FK constraints, MySQL requires explicit deletion
if field.remote_field:
fk_names = self._constraint_names(model, [field.column], foreign_key=True)
for fk_name in fk_names:
self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name))
# Delete the column
sql = self.sql_delete_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
}
self.execute(sql)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def alter_field(self, model, old_field, new_field, strict=False):
"""
Allows a field's type, uniqueness, nullability, default, column,
constraints etc. to be modified.
Requires a copy of the old field as well so we can only perform
changes that are required.
If strict is true, raises errors if the old column does not match old_field precisely.
"""
# Ensure this field is even column-based
old_db_params = old_field.db_parameters(connection=self.connection)
old_type = old_db_params['type']
new_db_params = new_field.db_parameters(connection=self.connection)
new_type = new_db_params['type']
if ((old_type is None and old_field.remote_field is None) or
(new_type is None and new_field.remote_field is None)):
raise ValueError(
"Cannot alter field %s into %s - they do not properly define "
"db_type (are you using a badly-written custom field?)" %
(old_field, new_field),
)
elif old_type is None and new_type is None and (
old_field.remote_field.through and new_field.remote_field.through and
old_field.remote_field.through._meta.auto_created and
new_field.remote_field.through._meta.auto_created):
return self._alter_many_to_many(model, old_field, new_field, strict)
elif old_type is None and new_type is None and (
old_field.remote_field.through and new_field.remote_field.through and
not old_field.remote_field.through._meta.auto_created and
not new_field.remote_field.through._meta.auto_created):
# Both sides have through models; this is a no-op.
return
elif old_type is None or new_type is None:
raise ValueError(
"Cannot alter field %s into %s - they are not compatible types "
"(you cannot alter to or from M2M fields, or add or remove "
"through= on M2M fields)" % (old_field, new_field)
)
self._alter_field(model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict)
def _alter_field(self, model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict=False):
"""Actually perform a "physical" (non-ManyToMany) field update."""
# Drop any FK constraints, we'll remake them later
fks_dropped = set()
if old_field.remote_field and old_field.db_constraint:
fk_names = self._constraint_names(model, [old_field.column], foreign_key=True)
if strict and len(fk_names) != 1:
raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % (
len(fk_names),
model._meta.db_table,
old_field.column,
))
for fk_name in fk_names:
fks_dropped.add((old_field.column,))
self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name))
# Has unique been removed?
if old_field.unique and (not new_field.unique or (not old_field.primary_key and new_field.primary_key)):
# Find the unique constraint for this field
constraint_names = self._constraint_names(model, [old_field.column], unique=True)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % (
len(constraint_names),
model._meta.db_table,
old_field.column,
))
for constraint_name in constraint_names:
self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name))
# Drop incoming FK constraints if we're a primary key and things are going
# to change.
if old_field.primary_key and new_field.primary_key and old_type != new_type:
# '_meta.related_field' also contains M2M reverse fields, these
# will be filtered out
for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field):
rel_fk_names = self._constraint_names(
new_rel.related_model, [new_rel.field.column], foreign_key=True
)
for fk_name in rel_fk_names:
self.execute(self._delete_constraint_sql(self.sql_delete_fk, new_rel.related_model, fk_name))
# Removed an index? (no strict check, as multiple indexes are possible)
if (old_field.db_index and not new_field.db_index and
not old_field.unique and not
(not new_field.unique and old_field.unique)):
# Find the index for this field
index_names = self._constraint_names(model, [old_field.column], index=True)
for index_name in index_names:
self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
# Change check constraints?
if old_db_params['check'] != new_db_params['check'] and old_db_params['check']:
constraint_names = self._constraint_names(model, [old_field.column], check=True)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % (
len(constraint_names),
model._meta.db_table,
old_field.column,
))
for constraint_name in constraint_names:
self.execute(self._delete_constraint_sql(self.sql_delete_check, model, constraint_name))
# Have they renamed the column?
if old_field.column != new_field.column:
self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type))
# Next, start accumulating actions to do
actions = []
null_actions = []
post_actions = []
# Type change?
if old_type != new_type:
fragment, other_actions = self._alter_column_type_sql(
model._meta.db_table, old_field, new_field, new_type
)
actions.append(fragment)
post_actions.extend(other_actions)
# When changing a column NULL constraint to NOT NULL with a given
# default value, we need to perform 4 steps:
# 1. Add a default for new incoming writes
# 2. Update existing NULL rows with new default
# 3. Replace NULL constraint with NOT NULL
# 4. Drop the default again.
# Default change?
old_default = self.effective_default(old_field)
new_default = self.effective_default(new_field)
needs_database_default = (
old_default != new_default and
new_default is not None and
not self.skip_default(new_field)
)
if needs_database_default:
if self.connection.features.requires_literal_defaults:
# Some databases can't take defaults as a parameter (oracle)
# If this is the case, the individual schema backend should
# implement prepare_default
actions.append((
self.sql_alter_column_default % {
"column": self.quote_name(new_field.column),
"default": self.prepare_default(new_default),
},
[],
))
else:
actions.append((
self.sql_alter_column_default % {
"column": self.quote_name(new_field.column),
"default": "%s",
},
[new_default],
))
# Nullability change?
if old_field.null != new_field.null:
if (self.connection.features.interprets_empty_strings_as_nulls and
new_field.get_internal_type() in ("CharField", "TextField")):
# The field is nullable in the database anyway, leave it alone
pass
elif new_field.null:
null_actions.append((
self.sql_alter_column_null % {
"column": self.quote_name(new_field.column),
"type": new_type,
},
[],
))
else:
null_actions.append((
self.sql_alter_column_not_null % {
"column": self.quote_name(new_field.column),
"type": new_type,
},
[],
))
# Only if we have a default and there is a change from NULL to NOT NULL
four_way_default_alteration = (
new_field.has_default() and
(old_field.null and not new_field.null)
)
if actions or null_actions:
if not four_way_default_alteration:
# If we don't have to do a 4-way default alteration we can
# directly run a (NOT) NULL alteration
actions = actions + null_actions
# Combine actions together if we can (e.g. postgres)
if self.connection.features.supports_combined_alters and actions:
sql, params = tuple(zip(*actions))
actions = [(", ".join(sql), sum(params, []))]
# Apply those actions
for sql, params in actions:
self.execute(
self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if four_way_default_alteration:
# Update existing rows with default value
self.execute(
self.sql_update_with_default % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(new_field.column),
"default": "%s",
},
[new_default],
)
# Since we didn't run a NOT NULL change before we need to do it
# now
for sql, params in null_actions:
self.execute(
self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if post_actions:
for sql, params in post_actions:
self.execute(sql, params)
# Added a unique?
if (not old_field.unique and new_field.unique) or (
old_field.primary_key and not new_field.primary_key and new_field.unique
):
self.execute(self._create_unique_sql(model, [new_field.column]))
# Added an index?
if (not old_field.db_index and new_field.db_index and
not new_field.unique and not
(not old_field.unique and new_field.unique)):
self.execute(self._create_index_sql(model, [new_field], suffix="_uniq"))
# Type alteration on primary key? Then we need to alter the column
# referring to us.
rels_to_update = []
if old_field.primary_key and new_field.primary_key and old_type != new_type:
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Changed to become primary key?
# Note that we don't detect unsetting of a PK, as we assume another field
# will always come along and replace it.
if not old_field.primary_key and new_field.primary_key:
# First, drop the old PK
constraint_names = self._constraint_names(model, primary_key=True)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of PK constraints for %s" % (
len(constraint_names),
model._meta.db_table,
))
for constraint_name in constraint_names:
self.execute(self._delete_constraint_sql(self.sql_delete_pk, model, constraint_name))
# Make the new one
self.execute(
self.sql_create_pk % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(self._create_index_name(model, [new_field.column], suffix="_pk")),
"columns": self.quote_name(new_field.column),
}
)
# Update all referencing columns
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Handle our type alters on the other end of rels from the PK stuff above
for old_rel, new_rel in rels_to_update:
rel_db_params = new_rel.field.db_parameters(connection=self.connection)
rel_type = rel_db_params['type']
fragment, other_actions = self._alter_column_type_sql(
new_rel.related_model._meta.db_table, old_rel.field, new_rel.field, rel_type
)
self.execute(
self.sql_alter_column % {
"table": self.quote_name(new_rel.related_model._meta.db_table),
"changes": fragment[0],
},
fragment[1],
)
for sql, params in other_actions:
self.execute(sql, params)
# Does it have a foreign key?
if (new_field.remote_field and
(fks_dropped or not old_field.remote_field or not old_field.db_constraint) and
new_field.db_constraint):
self.execute(self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s"))
# Rebuild FKs that pointed to us if we previously had to drop them
if old_field.primary_key and new_field.primary_key and old_type != new_type:
for rel in new_field.model._meta.related_objects:
if not rel.many_to_many:
self.execute(self._create_fk_sql(rel.related_model, rel.field, "_fk"))
# Does it have check constraints we need to add?
if old_db_params['check'] != new_db_params['check'] and new_db_params['check']:
self.execute(
self.sql_create_check % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(self._create_index_name(model, [new_field.column], suffix="_check")),
"column": self.quote_name(new_field.column),
"check": new_db_params['check'],
}
)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if needs_database_default:
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": self.sql_alter_column_no_default % {
"column": self.quote_name(new_field.column),
}
}
self.execute(sql)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def _alter_column_type_sql(self, table, old_field, new_field, new_type):
"""
Hook to specialize column type alteration for different backends,
for cases when a creation type is different to an alteration type
(e.g. SERIAL in PostgreSQL, PostGIS fields).
Should return two things; an SQL fragment of (sql, params) to insert
into an ALTER TABLE statement, and a list of extra (sql, params) tuples
to run once the field is altered.
"""
return (
(
self.sql_alter_column_type % {
"column": self.quote_name(new_field.column),
"type": new_type,
},
[],
),
[],
)
def _alter_many_to_many(self, model, old_field, new_field, strict):
"""
Alters M2Ms to repoint their to= endpoints.
"""
# Rename the through table
if old_field.remote_field.through._meta.db_table != new_field.remote_field.through._meta.db_table:
self.alter_db_table(old_field.remote_field.through, old_field.remote_field.through._meta.db_table,
new_field.remote_field.through._meta.db_table)
# Repoint the FK to the other side
self.alter_field(
new_field.remote_field.through,
# We need the field that points to the target model, so we can tell alter_field to change it -
# this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model)
old_field.remote_field.through._meta.get_field(old_field.m2m_reverse_field_name()),
new_field.remote_field.through._meta.get_field(new_field.m2m_reverse_field_name()),
)
self.alter_field(
new_field.remote_field.through,
# for self-referential models we need to alter field from the other end too
old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()),
new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()),
)
def _create_index_name(self, model, column_names, suffix=""):
"""
Generates a unique name for an index/unique constraint.
"""
# If there is just one column in the index, use a default algorithm from Django
if len(column_names) == 1 and not suffix:
return truncate_name(
'%s_%s' % (model._meta.db_table, self._digest(column_names[0])),
self.connection.ops.max_name_length()
)
# Else generate the name for the index using a different algorithm
table_name = model._meta.db_table.replace('"', '').replace('.', '_')
index_unique_name = '_%s' % self._digest(table_name, *column_names)
max_length = self.connection.ops.max_name_length() or 200
# If the index name is too long, truncate it
index_name = ('%s_%s%s%s' % (
table_name, column_names[0], index_unique_name, suffix,
)).replace('"', '').replace('.', '_')
if len(index_name) > max_length:
part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix))
index_name = '%s%s' % (table_name[:(max_length - len(part))], part)
# It shouldn't start with an underscore (Oracle hates this)
if index_name[0] == "_":
index_name = index_name[1:]
# If it's STILL too long, just hash it down
if len(index_name) > max_length:
index_name = hashlib.md5(force_bytes(index_name)).hexdigest()[:max_length]
# It can't start with a number on Oracle, so prepend D if we need to
if index_name[0].isdigit():
index_name = "D%s" % index_name[:-1]
return index_name
def _create_index_sql(self, model, fields, suffix="", sql=None):
"""
Return the SQL statement to create the index for one or several fields.
`sql` can be specified if the syntax differs from the standard (GIS
indexes, ...).
"""
if len(fields) == 1 and fields[0].db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(fields[0].db_tablespace)
elif model._meta.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)
else:
tablespace_sql = ""
if tablespace_sql:
tablespace_sql = " " + tablespace_sql
columns = [field.column for field in fields]
sql_create_index = sql or self.sql_create_index
return sql_create_index % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(self._create_index_name(model, columns, suffix=suffix)),
"columns": ", ".join(self.quote_name(column) for column in columns),
"extra": tablespace_sql,
}
def _model_indexes_sql(self, model):
"""
Return all index SQL statements (field indexes, index_together) for the
specified model, as a list.
"""
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
return []
output = []
for field in model._meta.local_fields:
if field.db_index and not field.unique:
output.append(self._create_index_sql(model, [field], suffix=""))
for field_names in model._meta.index_together:
fields = [model._meta.get_field(field) for field in field_names]
output.append(self._create_index_sql(model, fields, suffix="_idx"))
return output
def _rename_field_sql(self, table, old_field, new_field, new_type):
return self.sql_rename_column % {
"table": self.quote_name(table),
"old_column": self.quote_name(old_field.column),
"new_column": self.quote_name(new_field.column),
"type": new_type,
}
def _create_fk_sql(self, model, field, suffix):
from_table = model._meta.db_table
from_column = field.column
to_table = field.target_field.model._meta.db_table
to_column = field.target_field.column
suffix = suffix % {
"to_table": to_table,
"to_column": to_column,
}
return self.sql_create_fk % {
"table": self.quote_name(from_table),
"name": self.quote_name(self._create_index_name(model, [from_column], suffix=suffix)),
"column": self.quote_name(from_column),
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
}
def _create_unique_sql(self, model, columns):
return self.sql_create_unique % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(self._create_index_name(model, columns, suffix="_uniq")),
"columns": ", ".join(self.quote_name(column) for column in columns),
}
def _delete_constraint_sql(self, template, model, name):
return template % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(name),
}
def _constraint_names(self, model, column_names=None, unique=None,
primary_key=None, index=None, foreign_key=None,
check=None):
"""
Returns all constraint names matching the columns and conditions
"""
column_names = list(column_names) if column_names else None
with self.connection.cursor() as cursor:
constraints = self.connection.introspection.get_constraints(cursor, model._meta.db_table)
result = []
for name, infodict in constraints.items():
if column_names is None or column_names == infodict['columns']:
if unique is not None and infodict['unique'] != unique:
continue
if primary_key is not None and infodict['primary_key'] != primary_key:
continue
if index is not None and infodict['index'] != index:
continue
if check is not None and infodict['check'] != check:
continue
if foreign_key is not None and not infodict['foreign_key']:
continue
result.append(name)
return result
|
createwindow/pjsip-blf | refs/heads/master | tests/pjsua/scripts-pesq/200_codec_l16_16000_stereo.py | 59 | # $Id: 200_codec_l16_16000_stereo.py 2075 2008-06-27 16:18:13Z nanang $
#
from inc_cfg import *
ADD_PARAM = ""
if (HAS_SND_DEV == 0):
ADD_PARAM += "--null-audio"
# Call with L16/16000/2 codec
test_param = TestParam(
"PESQ defaults pjsua settings",
[
InstanceParam("UA1", ADD_PARAM + " --stereo --max-calls=1 --clock-rate 16000 --add-codec L16/16000/2 --play-file wavs/input.2.16.wav"),
InstanceParam("UA2", "--null-audio --stereo --max-calls=1 --clock-rate 16000 --add-codec L16/16000/2 --rec-file wavs/tmp.2.16.wav --auto-answer 200")
]
)
pesq_threshold = None
|
illume/numpy3k | refs/heads/master | numpy/distutils/mingw32ccompiler.py | 4 | """
Support code for building Python extensions on Windows.
# NT stuff
# 1. Make sure libpython<version>.a exists for gcc. If not, build it.
# 2. Force windows to use gcc (we're struggling with MSVC and g77 support)
# 3. Force windows to use g77
"""
import os
import subprocess
import sys
import log
import subprocess
import re
# Overwrite certain distutils.ccompiler functions:
import numpy.distutils.ccompiler
# NT stuff
# 1. Make sure libpython<version>.a exists for gcc. If not, build it.
# 2. Force windows to use gcc (we're struggling with MSVC and g77 support)
# --> this is done in numpy/distutils/ccompiler.py
# 3. Force windows to use g77
import distutils.cygwinccompiler
from distutils.version import StrictVersion
from numpy.distutils.ccompiler import gen_preprocess_options, gen_lib_options
from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
from distutils.unixccompiler import UnixCCompiler
from distutils.msvccompiler import get_build_version as get_build_msvc_version
from numpy.distutils.misc_util import msvc_runtime_library, get_build_architecture
# Useful to generate table of symbols from a dll
_START = re.compile(r'\[Ordinal/Name Pointer\] Table')
_TABLE = re.compile(r'^\s+\[([\s*[0-9]*)\] ([a-zA-Z0-9_]*)')
# the same as cygwin plus some additional parameters
class Mingw32CCompiler(distutils.cygwinccompiler.CygwinCCompiler):
""" A modified MingW32 compiler compatible with an MSVC built Python.
"""
compiler_type = 'mingw32'
def __init__ (self,
verbose=0,
dry_run=0,
force=0):
distutils.cygwinccompiler.CygwinCCompiler.__init__ (self,
verbose,dry_run, force)
# we need to support 3.2 which doesn't match the standard
# get_versions methods regex
if self.gcc_version is None:
import re
p = subprocess.Popen(['gcc', '-dumpversion'], shell=True,
stdout=subprocess.PIPE)
out_string = p.stdout.read()
p.stdout.close()
result = re.search('(\d+\.\d+)',out_string)
if result:
self.gcc_version = StrictVersion(result.group(1))
# A real mingw32 doesn't need to specify a different entry point,
# but cygwin 2.91.57 in no-cygwin-mode needs it.
if self.gcc_version <= "2.91.57":
entry_point = '--entry _DllMain@12'
else:
entry_point = ''
if self.linker_dll == 'dllwrap':
# Commented out '--driver-name g++' part that fixes weird
# g++.exe: g++: No such file or directory
# error (mingw 1.0 in Enthon24 tree, gcc-3.4.5).
# If the --driver-name part is required for some environment
# then make the inclusion of this part specific to that environment.
self.linker = 'dllwrap' # --driver-name g++'
elif self.linker_dll == 'gcc':
self.linker = 'g++'
# **changes: eric jones 4/11/01
# 1. Check for import library on Windows. Build if it doesn't exist.
build_import_library()
# **changes: eric jones 4/11/01
# 2. increased optimization and turned off all warnings
# 3. also added --driver-name g++
#self.set_executables(compiler='gcc -mno-cygwin -O2 -w',
# compiler_so='gcc -mno-cygwin -mdll -O2 -w',
# linker_exe='gcc -mno-cygwin',
# linker_so='%s --driver-name g++ -mno-cygwin -mdll -static %s'
# % (self.linker, entry_point))
# MS_WIN64 should be defined when building for amd64 on windows, but
# python headers define it only for MS compilers, which has all kind of
# bad consequences, like using Py_ModuleInit4 instead of
# Py_ModuleInit4_64, etc... So we add it here
if get_build_architecture() == 'AMD64':
self.set_executables(
compiler='gcc -g -DDEBUG -DMS_WIN64 -mno-cygwin -O0 -Wall',
compiler_so='gcc -g -DDEBUG -DMS_WIN64 -mno-cygwin -O0 -Wall -Wstrict-prototypes',
linker_exe='gcc -g -mno-cygwin',
linker_so='gcc -g -mno-cygwin -shared')
else:
if self.gcc_version <= "3.0.0":
self.set_executables(compiler='gcc -mno-cygwin -O2 -w',
compiler_so='gcc -mno-cygwin -mdll -O2 -w -Wstrict-prototypes',
linker_exe='g++ -mno-cygwin',
linker_so='%s -mno-cygwin -mdll -static %s'
% (self.linker, entry_point))
else:
self.set_executables(compiler='gcc -mno-cygwin -O2 -Wall',
compiler_so='gcc -mno-cygwin -O2 -Wall -Wstrict-prototypes',
linker_exe='g++ -mno-cygwin',
linker_so='g++ -mno-cygwin -shared')
# added for python2.3 support
# we can't pass it through set_executables because pre 2.2 would fail
self.compiler_cxx = ['g++']
# Maybe we should also append -mthreads, but then the finished
# dlls need another dll (mingwm10.dll see Mingw32 docs)
# (-mthreads: Support thread-safe exception handling on `Mingw32')
# no additional libraries needed
#self.dll_libraries=[]
return
# __init__ ()
def link(self,
target_desc,
objects,
output_filename,
output_dir,
libraries,
library_dirs,
runtime_library_dirs,
export_symbols = None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
# Include the appropiate MSVC runtime library if Python was built
# with MSVC >= 7.0 (MinGW standard is msvcrt)
runtime_library = msvc_runtime_library()
if runtime_library:
if not libraries:
libraries = []
libraries.append(runtime_library)
args = (self,
target_desc,
objects,
output_filename,
output_dir,
libraries,
library_dirs,
runtime_library_dirs,
None, #export_symbols, we do this in our def-file
debug,
extra_preargs,
extra_postargs,
build_temp,
target_lang)
if self.gcc_version < "3.0.0":
func = distutils.cygwinccompiler.CygwinCCompiler.link
else:
func = UnixCCompiler.link
func(*args[:func.im_func.func_code.co_argcount])
return
def object_filenames (self,
source_filenames,
strip_dir=0,
output_dir=''):
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
(base, ext) = os.path.splitext (os.path.normcase(src_name))
# added these lines to strip off windows drive letters
# without it, .o files are placed next to .c files
# instead of the build directory
drv,base = os.path.splitdrive(base)
if drv:
base = base[1:]
if ext not in (self.src_extensions + ['.rc','.res']):
raise UnknownFileError, \
"unknown file type '%s' (from '%s')" % \
(ext, src_name)
if strip_dir:
base = os.path.basename (base)
if ext == '.res' or ext == '.rc':
# these need to be compiled to object files
obj_names.append (os.path.join (output_dir,
base + ext + self.obj_extension))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
# object_filenames ()
def find_python_dll():
maj, min, micro = [int(i) for i in sys.version_info[:3]]
dllname = 'python%d%d.dll' % (maj, min)
print "Looking for %s" % dllname
# We can't do much here:
# - find it in python main dir
# - in system32,
# - ortherwise (Sxs), I don't know how to get it.
lib_dirs = []
lib_dirs.append(os.path.join(sys.prefix, 'lib'))
try:
lib_dirs.append(os.path.join(os.environ['SYSTEMROOT'], 'system32'))
except KeyError:
pass
for d in lib_dirs:
dll = os.path.join(d, dllname)
if os.path.exists(dll):
return dll
raise ValueError("%s not found in %s" % (dllname, lib_dirs))
def dump_table(dll):
st = subprocess.Popen(["objdump.exe", "-p", dll], stdout=subprocess.PIPE)
return st.stdout.readlines()
def generate_def(dll, dfile):
"""Given a dll file location, get all its exported symbols and dump them
into the given def file.
The .def file will be overwritten"""
dump = dump_table(dll)
for i in range(len(dump)):
if _START.match(dump[i]):
break
if i == len(dump):
raise ValueError("Symbol table not found")
syms = []
for j in range(i+1, len(dump)):
m = _TABLE.match(dump[j])
if m:
syms.append((int(m.group(1).strip()), m.group(2)))
else:
break
if len(syms) == 0:
log.warn('No symbols found in %s' % dll)
d = open(dfile, 'w')
d.write('LIBRARY %s\n' % os.path.basename(dll))
d.write(';CODE PRELOAD MOVEABLE DISCARDABLE\n')
d.write(';DATA PRELOAD SINGLE\n')
d.write('\nEXPORTS\n')
for s in syms:
#d.write('@%d %s\n' % (s[0], s[1]))
d.write('%s\n' % s[1])
d.close()
def build_import_library():
if os.name != 'nt':
return
arch = get_build_architecture()
if arch == 'AMD64':
return _build_import_library_amd64()
elif arch == 'Intel':
return _build_import_library_x86()
else:
raise ValueError("Unhandled arch %s" % arch)
def _build_import_library_amd64():
dll_file = find_python_dll()
out_name = "libpython%d%d.a" % tuple(sys.version_info[:2])
out_file = os.path.join(sys.prefix, 'libs', out_name)
if os.path.isfile(out_file):
log.debug('Skip building import library: "%s" exists' % (out_file))
return
def_name = "python%d%d.def" % tuple(sys.version_info[:2])
def_file = os.path.join(sys.prefix,'libs',def_name)
log.info('Building import library (arch=AMD64): "%s" (from %s)' \
% (out_file, dll_file))
generate_def(dll_file, def_file)
cmd = ['dlltool', '-d', def_file, '-l', out_file]
subprocess.Popen(cmd)
def _build_import_library_x86():
""" Build the import libraries for Mingw32-gcc on Windows
"""
lib_name = "python%d%d.lib" % tuple(sys.version_info[:2])
lib_file = os.path.join(sys.prefix,'libs',lib_name)
out_name = "libpython%d%d.a" % tuple(sys.version_info[:2])
out_file = os.path.join(sys.prefix,'libs',out_name)
if not os.path.isfile(lib_file):
log.warn('Cannot build import library: "%s" not found' % (lib_file))
return
if os.path.isfile(out_file):
log.debug('Skip building import library: "%s" exists' % (out_file))
return
log.info('Building import library (ARCH=x86): "%s"' % (out_file))
from numpy.distutils import lib2def
def_name = "python%d%d.def" % tuple(sys.version_info[:2])
def_file = os.path.join(sys.prefix,'libs',def_name)
nm_cmd = '%s %s' % (lib2def.DEFAULT_NM, lib_file)
nm_output = lib2def.getnm(nm_cmd)
dlist, flist = lib2def.parse_nm(nm_output)
lib2def.output_def(dlist, flist, lib2def.DEF_HEADER, open(def_file, 'w'))
dll_name = "python%d%d.dll" % tuple(sys.version_info[:2])
args = (dll_name,def_file,out_file)
cmd = 'dlltool --dllname %s --def %s --output-lib %s' % args
status = os.system(cmd)
# for now, fail silently
if status:
log.warn('Failed to build import library for gcc. Linking will fail.')
#if not success:
# msg = "Couldn't find import library, and failed to build it."
# raise DistutilsPlatformError, msg
return
#=====================================
# Dealing with Visual Studio MANIFESTS
#=====================================
# Functions to deal with visual studio manifests. Manifest are a mechanism to
# enforce strong DLL versioning on windows, and has nothing to do with
# distutils MANIFEST. manifests are XML files with version info, and used by
# the OS loader; they are necessary when linking against a DLL not in the
# system path; in particular, official python 2.6 binary is built against the
# MS runtime 9 (the one from VS 2008), which is not available on most windows
# systems; python 2.6 installer does install it in the Win SxS (Side by side)
# directory, but this requires the manifest for this to work. This is a big
# mess, thanks MS for a wonderful system.
# XXX: ideally, we should use exactly the same version as used by python. I
# submitted a patch to get this version, but it was only included for python
# 2.6.1 and above. So for versions below, we use a "best guess".
_MSVCRVER_TO_FULLVER = {}
if sys.platform == 'win32':
try:
import msvcrt
if hasattr(msvcrt, "CRT_ASSEMBLY_VERSION"):
_MSVCRVER_TO_FULLVER['90'] = msvcrt.CRT_ASSEMBLY_VERSION
else:
_MSVCRVER_TO_FULLVER['90'] = "9.0.21022.8"
# I took one version in my SxS directory: no idea if it is the good
# one, and we can't retrieve it from python
_MSVCRVER_TO_FULLVER['80'] = "8.0.50727.42"
except ImportError:
# If we are here, means python was not built with MSVC. Not sure what to do
# in that case: manifest building will fail, but it should not be used in
# that case anyway
log.warn('Cannot import msvcrt: using manifest will not be possible')
def msvc_manifest_xml(maj, min):
"""Given a major and minor version of the MSVCR, returns the
corresponding XML file."""
try:
fullver = _MSVCRVER_TO_FULLVER[str(maj * 10 + min)]
except KeyError:
raise ValueError("Version %d,%d of MSVCRT not supported yet" \
% (maj, min))
# Don't be fooled, it looks like an XML, but it is not. In particular, it
# should not have any space before starting, and its size should be
# divisible by 4, most likely for alignement constraints when the xml is
# embedded in the binary...
# This template was copied directly from the python 2.6 binary (using
# strings.exe from mingw on python.exe).
template = """\
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false"></requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
<dependentAssembly>
<assemblyIdentity type="win32" name="Microsoft.VC%(maj)d%(min)d.CRT" version="%(fullver)s" processorArchitecture="*" publicKeyToken="1fc8b3b9a1e18e3b"></assemblyIdentity>
</dependentAssembly>
</dependency>
</assembly>"""
return template % {'fullver': fullver, 'maj': maj, 'min': min}
def manifest_rc(name, type='dll'):
"""Return the rc file used to generate the res file which will be embedded
as manifest for given manifest file name, of given type ('dll' or
'exe').
Parameters
---------- name: str
name of the manifest file to embed
type: str ('dll', 'exe')
type of the binary which will embed the manifest"""
if type == 'dll':
rctype = 2
elif type == 'exe':
rctype = 1
else:
raise ValueError("Type %s not supported" % type)
return """\
#include "winuser.h"
%d RT_MANIFEST %s""" % (rctype, name)
def check_embedded_msvcr_match_linked(msver):
"""msver is the ms runtime version used for the MANIFEST."""
# check msvcr major version are the same for linking and
# embedding
msvcv = msvc_runtime_library()
if msvcv:
maj = int(msvcv[5:6])
if not maj == int(msver):
raise ValueError, \
"Discrepancy between linked msvcr " \
"(%d) and the one about to be embedded " \
"(%d)" % (int(msver), maj)
def configtest_name(config):
base = os.path.basename(config._gen_temp_sourcefile("yo", [], "c"))
return os.path.splitext(base)[0]
def manifest_name(config):
# Get configest name (including suffix)
root = configtest_name(config)
exext = config.compiler.exe_extension
return root + exext + ".manifest"
def rc_name(config):
# Get configest name (including suffix)
root = configtest_name(config)
return root + ".rc"
def generate_manifest(config):
msver = get_build_msvc_version()
if msver is not None:
if msver >= 8:
check_embedded_msvcr_match_linked(msver)
ma = int(msver)
mi = int((msver - ma) * 10)
# Write the manifest file
manxml = msvc_manifest_xml(ma, mi)
man = open(manifest_name(config), "w")
config.temp_files.append(manifest_name(config))
man.write(manxml)
man.close()
# # Write the rc file
# manrc = manifest_rc(manifest_name(self), "exe")
# rc = open(rc_name(self), "w")
# self.temp_files.append(manrc)
# rc.write(manrc)
# rc.close()
|
ning-yang/compute-image-windows | refs/heads/master | examples/windows_auth_python_sample.py | 4 | #!/usr/bin/env python
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import copy
import datetime
import json
import time
# PyCrypto library: https://pypi.python.org/pypi/pycrypto
from Crypto.Cipher import PKCS1_OAEP
from Crypto.PublicKey import RSA
from Crypto.Util.number import long_to_bytes
# Google API Client Library for Python:
# https://developers.google.com/api-client-library/python/start/get_started
from oauth2client.client import GoogleCredentials
from googleapiclient.discovery import build
def GetCompute():
"""Get a compute object for communicating with the Compute Engine API."""
credentials = GoogleCredentials.get_application_default()
compute = build('compute', 'v1', credentials=credentials)
return compute
def GetInstance(compute, instance, zone, project):
"""Get the data for a Google Compute Engine instance."""
cmd = compute.instances().get(instance=instance, project=project,
zone=zone)
return cmd.execute()
def GetKey():
"""Get an RSA key for encryption."""
# This uses the PyCrypto library
key = RSA.generate(2048)
return key
def GetModulusExponentInBase64(key):
"""Return the public modulus and exponent for the key in bas64 encoding."""
mod = long_to_bytes(key.n)
exp = long_to_bytes(key.e)
modulus = base64.b64encode(mod)
exponent = base64.b64encode(exp)
return modulus, exponent
def GetExpirationTimeString():
"""Return an RFC3339 UTC timestamp for 5 minutes from now."""
utc_now = datetime.datetime.utcnow()
# These metadata entries are one-time-use, so the expiration time does
# not need to be very far in the future. In fact, one minute would
# generally be sufficient. Five minutes allows for minor variations
# between the time on the client and the time on the server.
expire_time = utc_now + datetime.timedelta(minutes=5)
return expire_time.strftime('%Y-%m-%dT%H:%M:%SZ')
def GetJsonString(user, modulus, exponent, email):
"""Return the JSON string object that represents the windows-keys entry."""
expire = GetExpirationTimeString()
data = {'userName': user,
'modulus': modulus,
'exponent': exponent,
'email': email,
'expireOn': expire}
return json.dumps(data)
def UpdateWindowsKeys(old_metadata, metadata_entry):
"""Return updated metadata contents with the new windows-keys entry."""
# Simply overwrites the "windows-keys" metadata entry. Production code may
# want to append new lines to the metadata value and remove any expired
# entries.
new_metadata = copy.deepcopy(old_metadata)
new_metadata['items'] = [{
'key': "windows-keys",
'value': metadata_entry
}]
return new_metadata
def UpdateInstanceMetadata(compute, instance, zone, project, new_metadata):
"""Update the instance metadata."""
cmd = compute.instances().setMetadata(instance=instance, project=project,
zone=zone, body=new_metadata)
return cmd.execute()
def GetSerialPortFourOutput(compute, instance, zone, project):
"""Get the output from serial port 4 from the instance."""
# Encrypted passwords are printed to COM4 on the windows server:
port = 4
cmd = compute.instances().getSerialPortOutput(instance=instance,
project=project,
zone=zone, port=port)
output = cmd.execute()
return output['contents']
def GetEncryptedPasswordFromSerialPort(serial_port_output, modulus):
"""Find and return the correct encrypted password, based on the modulus."""
# In production code, this may need to be run multiple times if the output
# does not yet contain the correct entry.
output = serial_port_output.split('\n')
for line in reversed(output):
try:
entry = json.loads(line)
if modulus == entry['modulus']:
return entry['encryptedPassword']
except ValueError:
pass
def DecryptPassword(encrypted_password, key):
"""Decrypt a base64 encoded encrypted password using the provided key."""
decoded_password = base64.b64decode(encrypted_password)
cipher = PKCS1_OAEP.new(key)
password = cipher.decrypt(decoded_password)
return password
def main(instance, zone, project, user, email):
# Setup
compute = GetCompute()
key = GetKey()
modulus, exponent = GetModulusExponentInBase64(key)
# Get existing metadata
instance_ref = GetInstance(compute, instance, zone, project)
old_metadata = instance_ref['metadata']
# Create and set new metadata
metadata_entry = GetJsonString(user, modulus,
exponent, email)
new_metadata = UpdateWindowsKeys(old_metadata, metadata_entry)
result = UpdateInstanceMetadata(compute, instance, zone, project,
new_metadata)
# For this sample code, just sleep for 30 seconds instead of checking for
# responses. In production code, this should monitor the status of the
# metadata update operation.
time.sleep(30)
# Get and decrypt password from serial port output
serial_port_output = GetSerialPortFourOutput(compute, instance,
zone, project)
enc_password = GetEncryptedPasswordFromSerialPort(serial_port_output,
modulus)
password = DecryptPassword(enc_password, key)
# Display the username, password and IP address for the instance
print 'Username: {0}'.format(user)
print 'Password: {0}'.format(password)
ip = instance_ref['networkInterfaces'][0]['accessConfigs'][0]['natIP']
print 'IP Address: {0}'.format(ip)
if __name__ == '__main__':
instance = 'my-instance'
zone = 'us-central1-a'
project = 'my-project'
user = 'example-user'
email = 'user@example.com'
main(instance, zone, project, user, email)
|
psi4/psi4 | refs/heads/master | tests/pytests/test_zeta.py | 7 | import pytest
from .utils import *
import psi4
import numpy as np
pytestmark = pytest.mark.skip
ref = {
"conv": {
"ene": {
"d": -100.01941126909895,
"t": -100.05801143109962,
"q": -100.06768524807164,
"5": -100.0704303543983,
"6": -100.07073832974459,
"7": -100.07079540040381,
},
"grd": {
"d": np.array([[0, 0, -1.97897507e-02], [0, 0, 1.97897507e-02]]),
"t": np.array([[0, 0, -2.44719827e-02], [0, 0, 2.44719827e-02]]),
"q": np.array([[0, 0, -2.56632476e-02], [0, 0, 2.56632476e-02]]),
"5": np.array([[0, 0, -2.55690860e-02], [0, 0, 2.55690860e-02]]),
"6": np.array([[0, 0, -2.55202901e-02], [0, 0, 2.55202901e-02]]),
"7": np.array([[0, 0, -2.54893228e-02], [0, 0, 2.54893228e-02]]),
},
"hss": {
"d": np.array(
[
[1.14201580e-02, 2.88636997e-16, 3.97687335e-16, -1.14201580e-02, -2.78228656e-16, -3.97687335e-16],
[2.88636997e-16, 1.14201580e-02, -5.22281113e-16, -3.80143660e-16, -1.14201580e-02, 5.22281113e-16],
[3.97687335e-16, -5.22281113e-16, 6.37133401e-01, -3.97687335e-16, 5.22281113e-16, -6.37133401e-01],
[-1.14201580e-02, -3.80143660e-16, -3.97687335e-16, 1.14201580e-02, 3.58459616e-16, 3.97687335e-16],
[-2.78228656e-16, -1.14201580e-02, 5.22281113e-16, 3.58459616e-16, 1.14201580e-02, -5.22281113e-16],
[-3.97687335e-16, 5.22281113e-16, -6.37133401e-01, 3.97687335e-16, -5.22281113e-16, 6.37133401e-01],
]
),
"t": np.array(
[
[1.41221543e-02, 3.43562420e-15, -5.56983751e-16, -1.41221543e-02, -8.82111243e-16, 5.56983751e-16],
[3.43562420e-15, 1.41221543e-02, -5.72652355e-16, -4.26135257e-15, -1.41221543e-02, 5.72652355e-16],
[-5.56983751e-16, -5.72652355e-16, 6.33543175e-01, 5.56983751e-16, 5.72652355e-16, -6.33543175e-01],
[-1.41221543e-02, -4.26135257e-15, 5.56983751e-16, 1.41221543e-02, 1.60375621e-15, -5.56983751e-16],
[-8.82111243e-16, -1.41221543e-02, 5.72652355e-16, 1.60375621e-15, 1.41221543e-02, -5.72652355e-16],
[5.56983751e-16, 5.72652355e-16, -6.33543175e-01, -5.56983751e-16, -5.72652355e-16, 6.33543175e-01],
]
),
"q": np.array(
[
[1.48096027e-02, -5.90539645e-15, 9.84881270e-16, -1.48096027e-02, 4.75007061e-15, -9.84881270e-16],
[-5.90539645e-15, 1.48096027e-02, 9.45937039e-17, 1.76222627e-15, -1.48096027e-02, -9.45937039e-17],
[9.84881270e-16, 9.45937039e-17, 6.27190954e-01, -9.84881270e-16, -9.45937039e-17, -6.27190954e-01],
[-1.48096027e-02, 1.76222627e-15, -9.84881270e-16, 1.48096027e-02, -7.22703890e-15, 9.84881270e-16],
[4.75007061e-15, -1.48096027e-02, -9.45937039e-17, -7.22703890e-15, 1.48096027e-02, 9.45937039e-17],
[-9.84881270e-16, -9.45937039e-17, -6.27190954e-01, 9.84881270e-16, 9.45937039e-17, 6.27190954e-01],
]
),
"5": np.array(
[
[1.47552645e-02, 2.85244420e-13, 4.57266232e-16, -1.47552645e-02, -4.56798164e-13, -4.57266234e-16],
[2.85244420e-13, 1.47552645e-02, 4.63692795e-16, -2.69798442e-13, -1.47552645e-02, -4.63692795e-16],
[4.57266232e-16, 4.63692795e-16, 6.26827336e-01, -4.57266233e-16, -4.63692795e-16, -6.26827336e-01],
[
-1.47552645e-02,
-2.69798442e-13,
-4.57266233e-16,
1.47552645e-02,
-1.26964042e-13,
4.57266233e-16,
],
[
-4.56798164e-13,
-1.47552645e-02,
-4.63692795e-16,
-1.26964042e-13,
1.47552645e-02,
4.63692795e-16,
],
[-4.57266234e-16, -4.63692795e-16, -6.26827336e-01, 4.57266233e-16, 4.63692795e-16, 6.26827336e-01],
]
),
"6": np.array(
[
[
1.47271054e-02,
-3.46663981e-13,
-6.58652686e-17,
-1.47271054e-02,
-4.04281331e-13,
6.58652675e-17,
],
[-3.46663981e-13, 1.47271054e-02, 9.55744800e-16, 1.12365491e-14, -1.47271054e-02, -9.55744797e-16],
[-6.58652686e-17, 9.55744800e-16, 6.26683611e-01, 6.58652679e-17, -9.55744800e-16, -6.26683610e-01],
[-1.47271054e-02, 1.12365491e-14, 6.58652679e-17, 1.47271054e-02, 5.85830351e-14, -6.58652754e-17],
[-4.04281331e-13, -1.47271054e-02, -9.55744800e-16, 5.85830351e-14, 1.47271054e-02, 9.55744798e-16],
[6.58652675e-17, -9.55744797e-16, -6.26683610e-01, -6.58652754e-17, 9.55744798e-16, 6.26683610e-01],
]
),
# "7": np.zeros(36).reshape((6, 6)),
},
},
"df": {
"ene": {
"d": -100.01940060570712,
"t": -100.05800433992121,
"q": -100.06768368288756,
"5": -100.0704283042375,
"6/5": -100.07073665835942,
"6": -100.07073587640389,
"7/6": -100.07079295732615,
# "7": 0.0,
},
"grd": {
"d": np.array([[0, 0, -1.97887695e-02], [0, 0, 1.97887695e-02]]),
"t": np.array([[0, 0, -2.44674251e-02], [0, 0, 2.44674251e-02]]),
"q": np.array([[0, 0, -2.56623539e-02], [0, 0, 2.56623539e-02]]),
"5": np.array([[0, 0, -2.55665605e-02], [0, 0, 2.55665605e-02]]),
"6/5": np.array([[0, 0, -2.55170642e-02], [0, 0, 2.55170642e-02]]),
"6": np.array([[0, 0, -2.55207994e-02], [0, 0, 2.55207994e-02]]),
"7/6": np.array([[0, 0, -2.54898158e-02], [0, 0, 2.54898158e-02]]),
# "7": np.zeros(6).reshape(2, 3),
},
"hss": {
"d": np.array(
[
[1.14195918e-02, -8.20455290e-15, 8.48376579e-16, -1.14195918e-02, 1.01425643e-14, -8.48376580e-16],
[-8.20455290e-15, 1.14195918e-02, 7.87137296e-16, 8.63378854e-15, -1.14195918e-02, -7.87137294e-16],
[8.48376579e-16, 7.87137296e-16, 6.37098987e-01, -8.48376579e-16, -7.87137296e-16, -6.37098987e-01],
[-1.14195918e-02, 8.63378854e-15, -8.48376579e-16, 1.14195918e-02, -8.52027257e-15, 8.48376579e-16],
[1.01425643e-14, -1.14195918e-02, -7.87137296e-16, -8.52027257e-15, 1.14195918e-02, 7.87137295e-16],
[-8.48376580e-16, -7.87137294e-16, -6.37098987e-01, 8.48376579e-16, 7.87137295e-16, 6.37098987e-01],
]
),
"t": np.array(
[
[1.41195242e-02, -1.44279825e-13, -2.62663252e-16, -1.41195242e-02, 2.52178758e-13, 2.62663246e-16],
[-1.44279825e-13, 1.41195242e-02, 7.25775059e-16, 1.88123226e-13, -1.41195242e-02, -7.25775063e-16],
[-2.62663252e-16, 7.25775059e-16, 6.33561555e-01, 2.62663253e-16, -7.25775060e-16, -6.33561555e-01],
[-1.41195242e-02, 1.88123226e-13, 2.62663253e-16, 1.41195242e-02, -3.52532511e-13, -2.62663248e-16],
[2.52178758e-13, -1.41195242e-02, -7.25775060e-16, -3.52532511e-13, 1.41195242e-02, 7.25775063e-16],
[2.62663246e-16, -7.25775063e-16, -6.33561555e-01, -2.62663248e-16, 7.25775063e-16, 6.33561555e-01],
]
),
"q": np.array(
[
[1.48090869e-02, 2.05969460e-14, -1.49618888e-15, -1.48090869e-02, 3.81831832e-14, 1.49618913e-15],
[2.05969460e-14, 1.48090870e-02, -9.64983006e-16, 1.26240566e-13, -1.48090869e-02, 9.64982950e-16],
[-1.49618888e-15, -9.64983006e-16, 6.27200845e-01, 1.49618893e-15, 9.64982952e-16, -6.27200845e-01],
[-1.48090869e-02, 1.26240566e-13, 1.49618893e-15, 1.48090869e-02, -8.38158432e-14, -1.49618905e-15],
[3.81831832e-14, -1.48090869e-02, 9.64982952e-16, -8.38158432e-14, 1.48090869e-02, -9.64982956e-16],
[1.49618913e-15, 9.64982950e-16, -6.27200845e-01, -1.49618905e-15, -9.64982956e-16, 6.27200845e-01],
]
),
"5": np.array(
[
[1.47538071e-02, 5.47383498e-13, -1.15209254e-15, -1.47538071e-02, -2.32833029e-13, 1.15209256e-15],
[5.47383498e-13, 1.47538071e-02, 4.09315798e-16, -5.13303988e-13, -1.47538071e-02, -4.09315765e-16],
[-1.15209254e-15, 4.09315798e-16, 6.26827395e-01, 1.15209262e-15, -4.09316413e-16, -6.26827395e-01],
[-1.47538071e-02, -5.13303988e-13, 1.15209262e-15, 1.47538071e-02, 4.11866032e-13, -1.15209258e-15],
[-2.32833029e-13, -1.47538071e-02, -4.09316413e-16, 4.11866032e-13, 1.47538071e-02, 4.09315929e-16],
[1.15209256e-15, -4.09315765e-16, -6.26827395e-01, -1.15209258e-15, 4.09315929e-16, 6.26827395e-01],
]
),
"6/5": np.array(
[
[1.47252438e-02, 5.92051339e-14, -1.14461969e-15, -1.47252438e-02, 5.68396132e-14, 1.14461967e-15],
[5.92051339e-14, 1.47252438e-02, 6.43834765e-16, -9.29323460e-14, -1.47252437e-02, -6.43834148e-16],
[-1.14461969e-15, 6.43834765e-16, 6.26683723e-01, 1.14461973e-15, -6.43834668e-16, -6.26683723e-01],
[-1.47252438e-02, -9.29323460e-14, 1.14461973e-15, 1.47252438e-02, 3.73113650e-14, -1.14461967e-15],
[5.68396132e-14, -1.47252437e-02, -6.43834668e-16, 3.73113650e-14, 1.47252438e-02, 6.43834161e-16],
[1.14461967e-15, -6.43834148e-16, -6.26683723e-01, -1.14461967e-15, 6.43834161e-16, 6.26683723e-01],
]
),
# "6": np.zeros(36).reshape((6, 6)),
# "7/6": np.zeros(36).reshape((6, 6)),
# "7": np.zeros(36).reshape((6, 6)),
},
},
}
# hand-adjust zetas -- what to pass/xfail/fail is NOT read from Libint2 config
# * leaving at dtq5 so running pytest from source doesn't catch hours-long 7z
# * @pytest.mark.parametrize("zeta", ["d", "t", "q", "5", "6/5", "6", "7/6", "7"])
@pytest.mark.parametrize("zeta", ["d", "t", "q", "5"])
@pytest.mark.parametrize("scftype", ["conv", "df"])
@pytest.mark.parametrize("der", ["ene", "grd", "hss"])
def test_zeta(scftype, zeta, der, request):
if zeta not in ref[scftype][der]:
pytest.skip()
hf = psi4.geometry(
"""
H 0. 0. -1.64558411
F 0. 0. 0.08729475
symmetry c1
units bohr
"""
)
if zeta in "dtq56":
basis = f"cc-pv{zeta}z"
elif zeta == "6/5":
basis = "cc-pv6z"
elif zeta in ["7/6", "7"]:
basis = "7zapa-nr"
psi4.set_options(
{
# "e_convergence": 10,
# "d_convergence": 9,
"scf_type": {
"df": "df",
"conv": "direct",
}[scftype],
"basis": basis,
}
)
df_basis_scf = None
if scftype == "df":
if zeta in "dtq5":
df_basis_scf = f"cc-pv{zeta}z-jkfit"
elif zeta == "6/5":
df_basis_scf = "cc-pv5z-jkfit"
elif zeta in ["6", "7/6"]:
df_basis_scf = "cc-pv6z-ri"
elif scftype == "conv":
# * only used for preiterations
# * 7zapa gets aug-cc-pv6z-ri by default
if zeta == "6":
df_basis_scf = "cc-pv6z-ri"
if df_basis_scf:
psi4.set_options({"df_basis_scf": df_basis_scf})
if der == "hss":
kwargs = {"ref_gradient": psi4.core.Matrix.from_array(ref[scftype]["grd"][zeta])}
else:
kwargs = {}
ans, wfn = {"ene": psi4.energy, "grd": psi4.gradient, "hss": psi4.hessian,}[
der
]("hf", return_wfn=True, **kwargs)
if isinstance(ans, float):
print(ans)
else:
print(ans.np)
assert compare_values(ref[scftype][der][zeta], ans, 6, f"Hartree--Fock {scftype} {der} {zeta}-zeta")
|
SofiaReis/django-cms | refs/heads/develop | cms/test_utils/project/second_cms_urls_for_apphook_tests.py | 54 | from cms.utils.compat.dj import is_installed
from django.conf.urls import include, url
from cms.apphook_pool import apphook_pool
from cms.views import details
from django.conf import settings
if settings.APPEND_SLASH:
reg = url(r'^(?P<slug>[0-9A-Za-z-_.//]+)/$', details, name='pages-details-by-slug')
else:
reg = url(r'^(?P<slug>[0-9A-Za-z-_.//]+)$', details, name='pages-details-by-slug')
urlpatterns = [
# Public pages
url(r'^example/',
include('cms.test_utils.project.sampleapp.urls_example', namespace="example1", app_name='example_app')),
url(r'^example2/',
include('cms.test_utils.project.sampleapp.urls_example', namespace="example2", app_name='example_app')),
url(r'^$', details, {'slug': ''}, name='pages-root'),
reg,
]
if apphook_pool.get_apphooks():
"""If there are some application urls, add special resolver, so we will
have standard reverse support.
"""
from cms.appresolver import get_app_patterns
urlpatterns = get_app_patterns() + urlpatterns
if settings.DEBUG and is_installed('debug_toolbar'):
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
DavidPurcell/murano_temp | refs/heads/master | murano/db/migration/alembic_migrations/versions/015_adding_text_description.py | 1 | # Copyright 2016 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Increase time resolution for status reports
Revision ID: 015
Create Date: 2016-06-17
"""
# revision identifiers, used by Alembic.
revision = '015'
down_revision = '014'
from alembic import op
import sqlalchemy as sa
MYSQL_ENGINE = 'InnoDB'
MYSQL_CHARSET = 'utf8'
def upgrade():
op.add_column('environment', sa.Column('description_text', sa.Text(),
nullable=True))
op.add_column('environment-template', sa.Column('description_text',
sa.Text(),
nullable=True))
def downgrade():
with op.batch_alter_table("environment") as batch_op:
batch_op.drop_column('description_text')
with op.batch_alter_table("environment-template") as batch_op2:
batch_op2.drop_column('description_text')
|
jazzmes/ryu | refs/heads/master | ryu/tests/unit/ofproto/test_ofproto_parser.py | 22 | # Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import binascii
import unittest
from nose.tools import *
import struct
from ryu import exception
from ryu.ofproto import ofproto_common, ofproto_parser
from ryu.ofproto import ofproto_v1_0, ofproto_v1_0_parser
import logging
LOG = logging.getLogger(__name__)
class TestOfproto_Parser(unittest.TestCase):
def setUp(self):
LOG.debug('setUp')
self.bufHello = binascii.unhexlify('0100000800000001')
fr = '010600b0000000020000000000000abc' \
+ '00000100010000000000008700000fff' \
+ '0002aefa39d2b9177472656d61302d30' \
+ '00000000000000000000000000000000' \
+ '000000c0000000000000000000000000' \
+ 'fffe723f9a764cc87673775f30786162' \
+ '63000000000000000000000100000001' \
+ '00000082000000000000000000000000' \
+ '00012200d6c5a1947472656d61312d30' \
+ '00000000000000000000000000000000' \
+ '000000c0000000000000000000000000'
self.bufFeaturesReply = binascii.unhexlify(fr)
pi = '010a005200000000000001010040' \
+ '00020000000000000002000000000001' \
+ '080045000032000000004011f967c0a8' \
+ '0001c0a8000200010001001e00000000' \
+ '00000000000000000000000000000000' \
+ '00000000'
self.bufPacketIn = binascii.unhexlify(pi)
def tearDown(self):
LOG.debug('tearDown')
pass
def testHello(self):
(version,
msg_type,
msg_len,
xid) = ofproto_parser.header(self.bufHello)
eq_(version, 1)
eq_(msg_type, 0)
eq_(msg_len, 8)
eq_(xid, 1)
def testFeaturesReply(self):
(version,
msg_type,
msg_len,
xid) = ofproto_parser.header(self.bufFeaturesReply)
msg = ofproto_parser.msg(self,
version,
msg_type,
msg_len,
xid,
self.bufFeaturesReply)
LOG.debug(msg)
ok_(isinstance(msg, ofproto_v1_0_parser.OFPSwitchFeatures))
LOG.debug(msg.ports[65534])
ok_(isinstance(msg.ports[1], ofproto_v1_0_parser.OFPPhyPort))
ok_(isinstance(msg.ports[2], ofproto_v1_0_parser.OFPPhyPort))
ok_(isinstance(msg.ports[65534], ofproto_v1_0_parser.OFPPhyPort))
def testPacketIn(self):
(version,
msg_type,
msg_len,
xid) = ofproto_parser.header(self.bufPacketIn)
msg = ofproto_parser.msg(self,
version,
msg_type,
msg_len,
xid,
self.bufPacketIn)
LOG.debug(msg)
ok_(isinstance(msg, ofproto_v1_0_parser.OFPPacketIn))
@raises(AssertionError)
def test_check_msg_len(self):
(version,
msg_type,
msg_len,
xid) = ofproto_parser.header(self.bufPacketIn)
msg_len = len(self.bufPacketIn) + 1
ofproto_parser.msg(self,
version,
msg_type,
msg_len,
xid,
self.bufPacketIn)
@raises(exception.OFPUnknownVersion)
def test_check_msg_parser(self):
(version,
msg_type,
msg_len,
xid) = ofproto_parser.header(self.bufPacketIn)
version = 0xff
ofproto_parser.msg(self,
version,
msg_type,
msg_len,
xid,
self.bufPacketIn)
class TestMsgBase(unittest.TestCase):
""" Test case for ofproto_parser.MsgBase
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
pass
def test_set_xid(self):
xid = 3841413783
c = ofproto_parser.MsgBase(object)
c.set_xid(xid)
eq_(xid, c.xid)
@raises(AssertionError)
def test_set_xid_check_xid(self):
xid = 2160492514
c = ofproto_parser.MsgBase(object)
c.xid = xid
c.set_xid(xid)
def _test_parser(self, msg_type=ofproto_v1_0.OFPT_HELLO):
version = ofproto_v1_0.OFP_VERSION
msg_len = ofproto_v1_0.OFP_HEADER_SIZE
xid = 2183948390
data = '\x00\x01\x02\x03'
fmt = ofproto_v1_0.OFP_HEADER_PACK_STR
buf = struct.pack(fmt, version, msg_type, msg_len, xid) \
+ data
res = ofproto_v1_0_parser.OFPHello.parser(
object, version, msg_type, msg_len, xid, bytearray(buf))
eq_(version, res.version)
eq_(msg_type, res.msg_type)
eq_(msg_len, res.msg_len)
eq_(xid, res.xid)
eq_(buffer(buf), res.buf)
# test __str__()
list_ = ('version:', 'msg_type', 'xid')
check = {}
str_ = str(res)
str_ = str_.rsplit()
i = 0
for s in str_:
if s in list_:
check[str_[i]] = str_[i + 1]
i += 1
eq_(hex(ofproto_v1_0.OFP_VERSION).find(check['version:']), 0)
eq_(hex(ofproto_v1_0.OFPT_HELLO).find(check['msg_type']), 0)
eq_(hex(xid).find(check['xid']), 0)
return True
def test_parser(self):
ok_(self._test_parser())
@raises(AssertionError)
def test_parser_check_msg_type(self):
self._test_parser(ofproto_v1_0.OFPT_ERROR)
def _test_serialize(self):
class Datapath(object):
ofproto = ofproto_v1_0
ofproto_parser = ofproto_v1_0_parser
c = ofproto_v1_0_parser.OFPHello(Datapath)
c.serialize()
eq_(ofproto_v1_0.OFP_VERSION, c.version)
eq_(ofproto_v1_0.OFPT_HELLO, c.msg_type)
eq_(0, c.xid)
return True
def test_serialize(self):
ok_(self._test_serialize())
class TestMsgPackInto(unittest.TestCase):
""" Test case for ofproto_parser.msg_pack_into
"""
def _test_msg_pack_into(self, offset_type='e'):
fmt = '!HH'
len_ = struct.calcsize(fmt)
buf = bytearray(len_)
offset = len_
arg1 = 1
arg2 = 2
if offset_type == 'l':
offset += 1
elif offset_type == 'g':
offset -= 1
ofproto_parser.msg_pack_into(fmt, buf, offset, arg1, arg2)
check_offset = len(buf) - len_
res = struct.unpack_from(fmt, buffer(buf), check_offset)
eq_(arg1, res[0])
eq_(arg2, res[1])
return True
def test_msg_pack_into(self):
ok_(self._test_msg_pack_into())
def test_msg_pack_into_less(self):
ok_(self._test_msg_pack_into('l'))
def test_msg_pack_into_greater(self):
ok_(self._test_msg_pack_into('g'))
class TestMsgStrAttr(unittest.TestCase):
""" Test case for ofproto_parser.msg_str_attr
"""
def test_msg_str_attr(self):
class Check(object):
check = 'msg_str_attr_test'
c = Check()
buf = ''
res = ofproto_parser.msg_str_attr(c, buf, ('check',))
str_ = str(res)
str_ = str_.rsplit()
eq_('check', str_[0])
eq_('msg_str_attr_test', str_[1])
|
jas02/easybuild-easyblocks | refs/heads/master | easybuild/easyblocks/m/metis.py | 10 | ##
# Copyright 2009-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for METIS, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
import os
import shutil
from distutils.version import LooseVersion
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import mkdir
from easybuild.tools.run import run_cmd
class EB_METIS(ConfigureMake):
"""Support for building and installing METIS."""
def __init__(self, *args, **kwargs):
"""Define custom class variables for METIS."""
super(EB_METIS, self).__init__(*args, **kwargs)
self.lib_exts = []
def configure_step(self, *args, **kwargs):
"""Configure build using 'make config' (only for recent versions (>= v5))."""
if LooseVersion(self.version) >= LooseVersion("5"):
cmd = "make %s config prefix=%s" % (self.cfg['configopts'], self.installdir)
run_cmd(cmd, log_all=True, simple=True)
if 'shared=1' in self.cfg['configopts']:
self.lib_exts.append('so')
else:
self.lib_exts.append('a')
def build_step(self):
"""Add make options before building."""
self.cfg.update('buildopts', 'LIBDIR=""')
if self.toolchain.options['pic']:
self.cfg.update('buildopts', 'CC="$CC -fPIC"')
super(EB_METIS, self).build_step()
def install_step(self):
"""
Install by manually copying files to install dir, for old versions,
or by running 'make install' for new versions.
Create symlinks where expected by other applications
(in Lib instead of lib)
"""
if LooseVersion(self.version) < LooseVersion("5"):
libdir = os.path.join(self.installdir, 'lib')
mkdir(libdir)
includedir = os.path.join(self.installdir, 'include')
mkdir(includedir)
# copy libraries
try:
src = os.path.join(self.cfg['start_dir'], 'libmetis.a')
dst = os.path.join(libdir, 'libmetis.a')
shutil.copy2(src, dst)
except OSError, err:
raise EasyBuildError("Copying file libmetis.a to lib dir failed: %s", err)
# copy include files
try:
for f in ['defs.h', 'macros.h', 'metis.h', 'proto.h', 'rename.h', 'struct.h']:
src = os.path.join(self.cfg['start_dir'], 'Lib', f)
dst = os.path.join(includedir, f)
shutil.copy2(src, dst)
os.chmod(dst, 0755)
except OSError, err:
raise EasyBuildError("Copying file metis.h to include dir failed: %s", err)
# other applications depending on ParMETIS (SuiteSparse for one) look for both ParMETIS libraries
# and header files in the Lib directory (capital L). The following symlinks are hence created.
try:
Libdir = os.path.join(self.installdir, 'Lib')
os.symlink(libdir, Libdir)
for f in ['defs.h', 'macros.h', 'metis.h', 'proto.h', 'rename.h', 'struct.h']:
os.symlink(os.path.join(includedir, f), os.path.join(libdir, f))
except OSError, err:
raise EasyBuildError("Something went wrong during symlink creation: %s", err)
else:
super(EB_METIS, self).install_step()
def sanity_check_step(self):
"""Custom sanity check for METIS (more extensive for recent version (>= v5))"""
binfiles = []
if LooseVersion(self.version) > LooseVersion("5"):
binfiles += ["cmpfillin", "gpmetis", "graphchk", "m2gmetis", "mpmetis", "ndmetis"]
incfiles = ["metis.h"]
if LooseVersion(self.version) < LooseVersion("5"):
incfiles += ["defs.h", "macros.h", "proto.h", "rename.h", "struct.h"]
dirs = []
if LooseVersion(self.version) < LooseVersion("5"):
dirs += ["Lib"]
custom_paths = {
'files': ['bin/%s' % x for x in binfiles] + ['include/%s' % x for x in incfiles] +
['lib/libmetis.%s' % x for x in self.lib_exts],
'dirs' : dirs,
}
super(EB_METIS, self).sanity_check_step(custom_paths=custom_paths)
|
terrycojones/dark-matter | refs/heads/master | dark/filter.py | 1 | import re
from math import ceil
from collections import OrderedDict
from dark.simplify import simplifyTitle
from dark.utils import parseRangeString
class TitleFilter(object):
"""
Provide an acceptance test for sequence titles.
@param whitelist: If not C{None}, a C{set} of exact titles that are always
acceptable.
@param blacklist: If not C{None}, a C{set} of exact titles that are never
acceptable.
@param whitelistFile: If not C{None}, a C{str} filename containing lines
that give exact titles that are always acceptable.
@param blacklistFile: If not C{None}, a C{str} filename containing lines
that give exact titles that are never acceptable.
@param positiveRegex: If not C{None}, a C{str} regex that sequence titles
must match (case is ignored).
@param negativeRegex: If not C{None}, a C{str} regex that sequence titles
must not match (case is ignored).
@param truncateAfter: A C{str} that titles will be truncated beyond. If
a truncated title has already been seen, that title will no longer
be acceptable.
"""
REJECT = 0
WHITELIST_ACCEPT = 1
DEFAULT_ACCEPT = 2
def __init__(self, whitelist=None, blacklist=None,
whitelistFile=None, blacklistFile=None,
positiveRegex=None, negativeRegex=None, truncateAfter=None):
whitelist = whitelist or set()
if whitelistFile:
with open(whitelistFile) as fp:
for line in fp:
whitelist.add(line[:-1])
self._whitelist = whitelist
blacklist = blacklist or set()
if blacklistFile:
with open(blacklistFile) as fp:
for line in fp:
blacklist.add(line[:-1])
self._blacklist = blacklist
if truncateAfter is None:
self._truncated = None
else:
self._truncateAfter = truncateAfter
self._truncated = {}
if positiveRegex is None:
self._positiveRegex = None
else:
self._positiveRegex = re.compile(positiveRegex, re.I)
if negativeRegex is None:
self._negativeRegex = None
else:
self._negativeRegex = re.compile(negativeRegex, re.I)
def accept(self, title):
"""
Return a value (see below) to indicate if a title is acceptable (and,
if so, in what way).
@param title: A C{str} sequence title.
@return: An C{int} to indicate an acceptable title or not. This will be
C{self.REJECT} if the title is unacceptable.
C{self.WHITELIST_ACCEPT} if the title is whitelisted.
C{self.DEFAULT_ACCEPT} if the title is acceptable by default.
These three values are needed so our caller can distinguish between
the two reasons for acceptance.
"""
if self._whitelist and title in self._whitelist:
return self.WHITELIST_ACCEPT
if self._blacklist and title in self._blacklist:
return self.REJECT
# If we have a positive regex but we don't match it, reject.
if self._positiveRegex and self._positiveRegex.search(title) is None:
return self.REJECT
# If we have a negative regex and we do match it, reject.
if (self._negativeRegex and
self._negativeRegex.search(title) is not None):
return self.REJECT
if self._truncated is not None:
truncated = simplifyTitle(title, self._truncateAfter)
if truncated in self._truncated:
# We've already seen this (truncated) title. Reject unless
# this is the original title that we truncated to make this
# entry. That title must continue to be accepted.
if self._truncated[truncated] == title:
return self.DEFAULT_ACCEPT
else:
return self.REJECT
else:
self._truncated[truncated] = title
return self.DEFAULT_ACCEPT
class ReadSetFilter(object):
"""
Provide an acceptance test based on sequence read set.
@param minNew: The C{float} fraction of its reads by which a new read set
must differ from all previously seen read sets in order to be
considered acceptably different.
"""
def __init__(self, minNew):
self._minNew = minNew
# Use an OrderedDict so that each time we walk through self._titles
# we do it in the same order. This makes our runs deterministic /
# reproducible.
self._titles = OrderedDict()
def accept(self, title, titleAlignments):
"""
Return C{True} if the read id set in C{titleAlignments} is sufficiently
different from all previously seen read sets.
@param title: A C{str} sequence title.
@param titleAlignments: An instance of L{TitleAlignment}.
@return: A C{bool} indicating whether a title has an acceptably novel
read set or not.
"""
# Sanity check: titles can only be passed once.
assert title not in self._titles, (
'Title %r seen multiple times.' % title)
readIds = titleAlignments.readIds()
newReadsRequired = ceil(self._minNew * len(readIds))
for readSet, invalidatedTitles in self._titles.values():
if len(readIds - readSet) < newReadsRequired:
# Add this title to the set of titles invalidated by this
# previously seen read set.
invalidatedTitles.append(title)
return False
# Remember the new read set and an empty list of invalidated titles.
self._titles[title] = (readIds, [])
return True
def invalidates(self, title):
"""
Report on which other titles were invalidated by a given title.
@param title: A C{str} sequence title.
@return: A C{list} of titles that the passed title invalidated.
"""
try:
return self._titles[title][1]
except KeyError:
return []
def addFASTAFilteringCommandLineOptions(parser):
"""
Add standard FASTA filtering command-line options to an argparse parser.
These are options that can be used to select or omit entire FASTA records,
NOT options that change them (for that see
addFASTAEditingCommandLineOptions).
@param parser: An C{argparse.ArgumentParser} instance.
"""
parser.add_argument(
'--minLength', type=int,
help='The minimum sequence length')
parser.add_argument(
'--maxLength', type=int,
help='The maximum sequence length')
parser.add_argument(
'--whitelist', action='append',
help='Sequence titles (ids) that should be whitelisted')
parser.add_argument(
'--blacklist', action='append',
help='Sequence titles (ids) that should be blacklisted')
parser.add_argument(
'--whitelistFile',
help=('The name of a file that contains sequence titles (ids) that '
'should be whitelisted, one per line'))
parser.add_argument(
'--blacklistFile',
help=('The name of a file that contains sequence titles (ids) that '
'should be blacklisted, one per line'))
parser.add_argument(
'--titleRegex', help='A regex that sequence titles (ids) must match.')
parser.add_argument(
'--negativeTitleRegex',
help='A regex that sequence titles (ids) must not match.')
# A mutually exclusive group for --keepSequences and --removeSequences.
group = parser.add_mutually_exclusive_group()
group.add_argument(
'--keepSequences',
help=('Specify (1-based) ranges of sequence numbers that should be '
'kept. E.g., --keepSequences 1-3,5 will output just the 1st, '
'2nd, 3rd, and 5th sequences. All others will be omitted.'))
group.add_argument(
'--removeSequences',
help=('Specify (1-based) ranges of sequence numbers that should be '
'removed. E.g., --removeSequences 1-3,5 will output all but the '
'1st, 2nd, 3rd, and 5th sequences. All others will be ouput.'))
parser.add_argument(
'--head', type=int, metavar='N',
help='Only the first N sequences will be printed.')
parser.add_argument(
'--removeDuplicates', action='store_true', default=False,
help=('Duplicate reads will be removed, based only on '
'sequence identity. The first occurrence is kept.'))
parser.add_argument(
'--removeDuplicatesById', action='store_true', default=False,
help=('Duplicate reads will be removed, based only on '
'read id. The first occurrence is kept.'))
# See the docstring for dark.reads.Reads.filter for more detail on
# randomSubset.
parser.add_argument(
'--randomSubset', type=int,
help=('An integer giving the number of sequences that should be kept. '
'These will be selected at random.'))
# See the docstring for dark.reads.Reads.filter for more detail on
# trueLength.
parser.add_argument(
'--trueLength', type=int,
help=('The number of reads in the FASTA input. Only to be used with '
'randomSubset'))
parser.add_argument(
'--sampleFraction', type=float,
help=('A [0.0, 1.0] C{float} indicating a fraction of the reads that '
'should be allowed to pass through the filter. The sample size '
'will only be approximately the product of the sample fraction '
'and the number of reads. The sample is taken at random.'))
parser.add_argument(
'--sequenceNumbersFile',
help=('A file of (1-based) sequence numbers to retain. Numbers must '
'be one per line.'))
def parseFASTAFilteringCommandLineOptions(args, reads):
"""
Examine parsed FASTA filtering command-line options and return filtered
reads.
@param args: An argparse namespace, as returned by the argparse
C{parse_args} function.
@param reads: A C{Reads} instance to filter.
@return: The filtered C{Reads} instance.
"""
keepSequences = (
parseRangeString(args.keepSequences, convertToZeroBased=True)
if args.keepSequences else None)
removeSequences = (
parseRangeString(args.removeSequences, convertToZeroBased=True)
if args.removeSequences else None)
return reads.filter(
minLength=args.minLength, maxLength=args.maxLength,
whitelist=set(args.whitelist) if args.whitelist else None,
blacklist=set(args.blacklist) if args.blacklist else None,
whitelistFile=args.whitelistFile, blacklistFile=args.blacklistFile,
titleRegex=args.titleRegex,
negativeTitleRegex=args.negativeTitleRegex,
keepSequences=keepSequences, removeSequences=removeSequences,
head=args.head, removeDuplicates=args.removeDuplicates,
removeDuplicatesById=args.removeDuplicatesById,
randomSubset=args.randomSubset, trueLength=args.trueLength,
sampleFraction=args.sampleFraction,
sequenceNumbersFile=args.sequenceNumbersFile)
def addFASTAEditingCommandLineOptions(parser):
"""
Add standard FASTA editing command-line options to an argparse parser.
These are options that can be used to alter FASTA records, NOT options
that simply select or reject those things (for those see
addFASTAFilteringCommandLineOptions).
@param parser: An C{argparse.ArgumentParser} instance.
"""
# A mutually exclusive group for --keepSites, --keepSitesFile,
# --removeSites, and --removeSitesFile.
group = parser.add_mutually_exclusive_group()
# In the 4 options below, the 'indices' alternate names are kept for
# backwards compatibility.
group.add_argument(
'--keepSites', '--keepIndices',
help=('Specify 1-based sequence sites to keep. All other sites will '
'be removed. The sites must be given in the form e.g., '
'24,100-200,260. Note that the requested sites will be taken '
'from the input sequences in order, not in the order given by '
'--keepSites. I.e., --keepSites 5,8-10 will get you the same '
'result as --keepSites 8-10,5.'))
group.add_argument(
'--keepSitesFile', '--keepIndicesFile',
help=('Specify a file containing 1-based sites to keep. All other '
'sequence sites will be removed. Lines in the file must be '
'given in the form e.g., 24,100-200,260. See --keepSites for '
'more detail.'))
group.add_argument(
'--removeSites', '--removeIndices',
help=('Specify 1-based sites to remove. All other sequence sites will '
'be kept. The sites must be given in the form e.g., '
'24,100-200,260. See --keepSites for more detail.'))
group.add_argument(
'--removeSitesFile', '--removeIndicesFile',
help=('Specify a file containing 1-based sites to remove. All other '
'sequence sites will be kept. Lines in the file must be given '
'in the form e.g., 24,100-200,260. See --keepSites for more '
'detail.'))
parser.add_argument(
'--removeGaps', action='store_true', default=False,
help="If True, gap ('-') characters in sequences will be removed.")
parser.add_argument(
'--truncateTitlesAfter',
help=('A string that sequence titles (ids) will be truncated beyond. '
'If the truncated version of a title has already been seen, '
'that title will be skipped.'))
parser.add_argument(
'--removeDescriptions', action='store_true', default=False,
help=('Read id descriptions will be removed. The '
'description is the part of a sequence id after the '
'first whitespace (if any).'))
parser.add_argument(
'--idLambda', metavar='LAMBDA-FUNCTION',
help=('A one-argument function taking and returning a read id. '
'E.g., --idLambda "lambda id: id.split(\'_\')[0]" or '
'--idLambda "lambda id: id[:10]". If the function returns None, '
'the read will be filtered out.'))
parser.add_argument(
'--readLambda', metavar='LAMBDA-FUNCTION',
help=('A one-argument function taking and returning a read. '
'E.g., --readLambda "lambda r: Read(r.id.split(\'_\')[0], '
'r.sequence.strip(\'-\')". Make sure to also modify the quality '
'string if you change the length of a FASTQ sequence. If the '
'function returns None, the read will be filtered out. The '
'function will be passed to eval with the dark.reads classes '
'Read, DNARead, AARead, etc. all in scope.'))
parser.add_argument(
'--reverse', action='store_true', default=False,
help=('Reverse the sequences. Note that this is NOT reverse '
'complementing.'))
parser.add_argument(
'--reverseComplement', action='store_true', default=False,
help='Reverse complement the sequences.')
def parseFASTAEditingCommandLineOptions(args, reads):
"""
Examine parsed FASTA editing command-line options and return information
about kept sites and sequences.
@param args: An argparse namespace, as returned by the argparse
C{parse_args} function.
@param reads: A C{Reads} instance to filter.
@return: The filtered C{Reads} instance.
"""
removeGaps = args.removeGaps
removeDescriptions = args.removeDescriptions
truncateTitlesAfter = args.truncateTitlesAfter
keepSites = (
parseRangeString(args.keepSites, convertToZeroBased=True)
if args.keepSites else None)
if args.keepSitesFile:
keepSites = keepSites or set()
with open(args.keepSitesFile) as fp:
for lineNumber, line in enumerate(fp):
try:
keepSites.update(
parseRangeString(line, convertToZeroBased=True))
except ValueError as e:
raise ValueError(
'Keep sites file %r line %d could not be parsed: '
'%s' % (args.keepSitesFile, lineNumber, e))
removeSites = (
parseRangeString(args.removeSites, convertToZeroBased=True)
if args.removeSites else None)
if args.removeSitesFile:
removeSites = removeSites or set()
with open(args.removeSitesFile) as fp:
for lineNumber, line in enumerate(fp):
try:
removeSites.update(
parseRangeString(line, convertToZeroBased=True))
except ValueError as e:
raise ValueError(
'Remove sites file %r line %d parse error: %s'
% (args.removeSitesFile, lineNumber, e))
return reads.filter(
removeGaps=removeGaps,
truncateTitlesAfter=truncateTitlesAfter,
removeDescriptions=removeDescriptions,
idLambda=args.idLambda, readLambda=args.readLambda,
keepSites=keepSites, removeSites=removeSites,
reverse=args.reverse, reverseComplement=args.reverseComplement)
|
agry/NGECore2 | refs/heads/master | scripts/object/tangible/ship/attachment/weapon/kse_firespray_weapon1_s04.py | 85615 | import sys
def setup(core, object):
return |
jcoady9/youtube-dl | refs/heads/master | youtube_dl/extractor/ruleporn.py | 76 | from __future__ import unicode_literals
from .nuevo import NuevoBaseIE
class RulePornIE(NuevoBaseIE):
_VALID_URL = r'https?://(?:www\.)?ruleporn\.com/(?:[^/?#&]+/)*(?P<id>[^/?#&]+)'
_TEST = {
'url': 'http://ruleporn.com/brunette-nympho-chick-takes-her-boyfriend-in-every-angle/',
'md5': '86861ebc624a1097c7c10eaf06d7d505',
'info_dict': {
'id': '48212',
'display_id': 'brunette-nympho-chick-takes-her-boyfriend-in-every-angle',
'ext': 'mp4',
'title': 'Brunette Nympho Chick Takes Her Boyfriend In Every Angle',
'description': 'md5:6d28be231b981fff1981deaaa03a04d5',
'age_limit': 18,
'duration': 635.1,
}
}
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
video_id = self._search_regex(
r'lovehomeporn\.com/embed/(\d+)', webpage, 'video id')
title = self._search_regex(
r'<h2[^>]+title=(["\'])(?P<url>.+?)\1',
webpage, 'title', group='url')
description = self._html_search_meta('description', webpage)
info = self._extract_nuevo(
'http://lovehomeporn.com/media/nuevo/econfig.php?key=%s&rp=true' % video_id,
video_id)
info.update({
'display_id': display_id,
'title': title,
'description': description,
'age_limit': 18
})
return info
|
Eddy0402/Environment | refs/heads/master | vim/ycmd/cpp/ycm/tests/gmock/scripts/upload.py | 2511 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for uploading diffs from a version control system to the codereview app.
Usage summary: upload.py [options] [-- diff_options]
Diff options are passed to the diff command of the underlying system.
Supported version control systems:
Git
Mercurial
Subversion
It is important for Git/Mercurial users to specify a tree/node/branch to diff
against by using the '--rev' option.
"""
# This code is derived from appcfg.py in the App Engine SDK (open source),
# and from ASPN recipe #146306.
import cookielib
import getpass
import logging
import md5
import mimetypes
import optparse
import os
import re
import socket
import subprocess
import sys
import urllib
import urllib2
import urlparse
try:
import readline
except ImportError:
pass
# The logging verbosity:
# 0: Errors only.
# 1: Status messages.
# 2: Info logs.
# 3: Debug logs.
verbosity = 1
# Max size of patch or base file.
MAX_UPLOAD_SIZE = 900 * 1024
def GetEmail(prompt):
"""Prompts the user for their email address and returns it.
The last used email address is saved to a file and offered up as a suggestion
to the user. If the user presses enter without typing in anything the last
used email address is used. If the user enters a new address, it is saved
for next time we prompt.
"""
last_email_file_name = os.path.expanduser("~/.last_codereview_email_address")
last_email = ""
if os.path.exists(last_email_file_name):
try:
last_email_file = open(last_email_file_name, "r")
last_email = last_email_file.readline().strip("\n")
last_email_file.close()
prompt += " [%s]" % last_email
except IOError, e:
pass
email = raw_input(prompt + ": ").strip()
if email:
try:
last_email_file = open(last_email_file_name, "w")
last_email_file.write(email)
last_email_file.close()
except IOError, e:
pass
else:
email = last_email
return email
def StatusUpdate(msg):
"""Print a status message to stdout.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print msg
def ErrorExit(msg):
"""Print an error message to stderr and exit."""
print >>sys.stderr, msg
sys.exit(1)
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, host_override=None, extra_headers={},
save_cookies=False):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.authenticated = False
self.extra_headers = extra_headers
self.save_cookies = save_cookies
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplementedError()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = "GOOGLE"
if self.host.endswith(".google.com"):
# Needed for use inside Google.
account_type = "HOSTED"
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode({
"Email": email,
"Passwd": password,
"service": "ah",
"source": "rietveld-codereview-upload",
"accountType": account_type,
}),
)
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=")
for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg,
e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
# This is a dummy value to allow us to identify when we're successful.
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
req = self._CreateRequest("http://%s/_ah/login?%s" %
(self.host, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response and directs us to
authenticate ourselves with ClientLogin.
"""
for i in range(3):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.reason == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.reason == "ServiceDisabled":
print >>sys.stderr, ("The user's access to the service has been "
"disabled.")
break
if e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def Send(self, request_path, payload=None,
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
# TODO: Don't require authentication. Let the server say
# whether it is necessary.
if not self.authenticated:
self._Authenticate()
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s" % (self.host, request_path)
if args:
url += "?" + urllib.urlencode(args)
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
if tries > 3:
raise
elif e.code == 401:
self._Authenticate()
## elif e.code >= 500 and e.code < 600:
## # Server Error - try again.
## continue
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
def _Authenticate(self):
"""Save the cookie jar after authentication."""
super(HttpRpcServer, self)._Authenticate()
if self.save_cookies:
StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(urllib2.HTTPSHandler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
if os.path.exists(self.cookie_file):
try:
self.cookie_jar.load()
self.authenticated = True
StatusUpdate("Loaded authentication cookies from %s" %
self.cookie_file)
except (cookielib.LoadError, IOError):
# Failed to load cookies - just ignore them.
pass
else:
# Create an empty cookie file with mode 600
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
os.close(fd)
# Always chmod the cookie file
os.chmod(self.cookie_file, 0600)
else:
# Don't save cookies across runs of update.py.
self.cookie_jar = cookielib.CookieJar()
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
parser.add_option("-y", "--assume_yes", action="store_true",
dest="assume_yes", default=False,
help="Assume that the answer to yes/no questions is 'yes'.")
# Logging
group = parser.add_option_group("Logging options")
group.add_option("-q", "--quiet", action="store_const", const=0,
dest="verbose", help="Print errors only.")
group.add_option("-v", "--verbose", action="store_const", const=2,
dest="verbose", default=1,
help="Print info level logs (default).")
group.add_option("--noisy", action="store_const", const=3,
dest="verbose", help="Print all logs.")
# Review server
group = parser.add_option_group("Review server options")
group.add_option("-s", "--server", action="store", dest="server",
default="codereview.appspot.com",
metavar="SERVER",
help=("The server to upload to. The format is host[:port]. "
"Defaults to 'codereview.appspot.com'."))
group.add_option("-e", "--email", action="store", dest="email",
metavar="EMAIL", default=None,
help="The username to use. Will prompt if omitted.")
group.add_option("-H", "--host", action="store", dest="host",
metavar="HOST", default=None,
help="Overrides the Host header sent with all RPCs.")
group.add_option("--no_cookies", action="store_false",
dest="save_cookies", default=True,
help="Do not save authentication cookies to local disk.")
# Issue
group = parser.add_option_group("Issue options")
group.add_option("-d", "--description", action="store", dest="description",
metavar="DESCRIPTION", default=None,
help="Optional description when creating an issue.")
group.add_option("-f", "--description_file", action="store",
dest="description_file", metavar="DESCRIPTION_FILE",
default=None,
help="Optional path of a file that contains "
"the description when creating an issue.")
group.add_option("-r", "--reviewers", action="store", dest="reviewers",
metavar="REVIEWERS", default=None,
help="Add reviewers (comma separated email addresses).")
group.add_option("--cc", action="store", dest="cc",
metavar="CC", default=None,
help="Add CC (comma separated email addresses).")
# Upload options
group = parser.add_option_group("Patch options")
group.add_option("-m", "--message", action="store", dest="message",
metavar="MESSAGE", default=None,
help="A message to identify the patch. "
"Will prompt if omitted.")
group.add_option("-i", "--issue", type="int", action="store",
metavar="ISSUE", default=None,
help="Issue number to which to add. Defaults to new issue.")
group.add_option("--download_base", action="store_true",
dest="download_base", default=False,
help="Base files will be downloaded by the server "
"(side-by-side diffs may not work on files with CRs).")
group.add_option("--rev", action="store", dest="revision",
metavar="REV", default=None,
help="Branch/tree/revision to diff against (used by DVCS).")
group.add_option("--send_mail", action="store_true",
dest="send_mail", default=False,
help="Send notification email to reviewers.")
def GetRpcServer(options):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
def GetUserCredentials():
"""Prompts the user for a username and password."""
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
return (email, password)
# If this is the dev_appserver, use fake authentication.
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie":
'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host,
save_cookies=options.save_cookies)
def EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for (key, value) in fields:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
lines.append(value)
for (key, filename, value) in files:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
(key, filename))
lines.append('Content-Type: %s' % GetContentType(filename))
lines.append('')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def GetContentType(filename):
"""Helper to guess the content-type from the filename."""
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# Use a shell for subcommands on Windows to get a PATH search.
use_shell = sys.platform.startswith("win")
def RunShellWithReturnCode(command, print_output=False,
universal_newlines=True):
"""Executes a command and returns the output from stdout and the return code.
Args:
command: Command to execute.
print_output: If True, the output is printed to stdout.
If False, both stdout and stderr are ignored.
universal_newlines: Use universal_newlines flag (default: True).
Returns:
Tuple (output, return code)
"""
logging.info("Running %s", command)
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=use_shell, universal_newlines=universal_newlines)
if print_output:
output_array = []
while True:
line = p.stdout.readline()
if not line:
break
print line.strip("\n")
output_array.append(line)
output = "".join(output_array)
else:
output = p.stdout.read()
p.wait()
errout = p.stderr.read()
if print_output and errout:
print >>sys.stderr, errout
p.stdout.close()
p.stderr.close()
return output, p.returncode
def RunShell(command, silent_ok=False, universal_newlines=True,
print_output=False):
data, retcode = RunShellWithReturnCode(command, print_output,
universal_newlines)
if retcode:
ErrorExit("Got error status from %s:\n%s" % (command, data))
if not silent_ok and not data:
ErrorExit("No output from %s" % command)
return data
class VersionControlSystem(object):
"""Abstract base class providing an interface to the VCS."""
def __init__(self, options):
"""Constructor.
Args:
options: Command line options.
"""
self.options = options
def GenerateDiff(self, args):
"""Return the current diff as a string.
Args:
args: Extra arguments to pass to the diff command.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted")
def GetBaseFile(self, filename):
"""Get the content of the upstream version of a file.
Returns:
A tuple (base_content, new_content, is_binary, status)
base_content: The contents of the base file.
new_content: For text files, this is empty. For binary files, this is
the contents of the new file, since the diff output won't contain
information to reconstruct the current file.
is_binary: True iff the file is binary.
status: The status of the file.
"""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
def GetBaseFiles(self, diff):
"""Helper that calls GetBase file for each file in the patch.
Returns:
A dictionary that maps from filename to GetBaseFile's tuple. Filenames
are retrieved based on lines that start with "Index:" or
"Property changes on:".
"""
files = {}
for line in diff.splitlines(True):
if line.startswith('Index:') or line.startswith('Property changes on:'):
unused, filename = line.split(':', 1)
# On Windows if a file has property changes its filename uses '\'
# instead of '/'.
filename = filename.strip().replace('\\', '/')
files[filename] = self.GetBaseFile(filename)
return files
def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,
files):
"""Uploads the base files (and if necessary, the current ones as well)."""
def UploadFile(filename, file_id, content, is_binary, status, is_base):
"""Uploads a file to the server."""
file_too_large = False
if is_base:
type = "base"
else:
type = "current"
if len(content) > MAX_UPLOAD_SIZE:
print ("Not uploading the %s file for %s because it's too large." %
(type, filename))
file_too_large = True
content = ""
checksum = md5.new(content).hexdigest()
if options.verbose > 0 and not file_too_large:
print "Uploading %s file for %s" % (type, filename)
url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id)
form_fields = [("filename", filename),
("status", status),
("checksum", checksum),
("is_binary", str(is_binary)),
("is_current", str(not is_base)),
]
if file_too_large:
form_fields.append(("file_too_large", "1"))
if options.email:
form_fields.append(("user", options.email))
ctype, body = EncodeMultipartFormData(form_fields,
[("data", filename, content)])
response_body = rpc_server.Send(url, body,
content_type=ctype)
if not response_body.startswith("OK"):
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
patches = dict()
[patches.setdefault(v, k) for k, v in patch_list]
for filename in patches.keys():
base_content, new_content, is_binary, status = files[filename]
file_id_str = patches.get(filename)
if file_id_str.find("nobase") != -1:
base_content = None
file_id_str = file_id_str[file_id_str.rfind("_") + 1:]
file_id = int(file_id_str)
if base_content != None:
UploadFile(filename, file_id, base_content, is_binary, status, True)
if new_content != None:
UploadFile(filename, file_id, new_content, is_binary, status, False)
def IsImage(self, filename):
"""Returns true if the filename has an image extension."""
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
return False
return mimetype.startswith("image/")
class SubversionVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Subversion."""
def __init__(self, options):
super(SubversionVCS, self).__init__(options)
if self.options.revision:
match = re.match(r"(\d+)(:(\d+))?", self.options.revision)
if not match:
ErrorExit("Invalid Subversion revision %s." % self.options.revision)
self.rev_start = match.group(1)
self.rev_end = match.group(3)
else:
self.rev_start = self.rev_end = None
# Cache output from "svn list -r REVNO dirname".
# Keys: dirname, Values: 2-tuple (ouput for start rev and end rev).
self.svnls_cache = {}
# SVN base URL is required to fetch files deleted in an older revision.
# Result is cached to not guess it over and over again in GetBaseFile().
required = self.options.download_base or self.options.revision is not None
self.svn_base = self._GuessBase(required)
def GuessBase(self, required):
"""Wrapper for _GuessBase."""
return self.svn_base
def _GuessBase(self, required):
"""Returns the SVN base URL.
Args:
required: If true, exits if the url can't be guessed, otherwise None is
returned.
"""
info = RunShell(["svn", "info"])
for line in info.splitlines():
words = line.split()
if len(words) == 2 and words[0] == "URL:":
url = words[1]
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
username, netloc = urllib.splituser(netloc)
if username:
logging.info("Removed username from base URL")
if netloc.endswith("svn.python.org"):
if netloc == "svn.python.org":
if path.startswith("/projects/"):
path = path[9:]
elif netloc != "pythondev@svn.python.org":
ErrorExit("Unrecognized Python URL: %s" % url)
base = "http://svn.python.org/view/*checkout*%s/" % path
logging.info("Guessed Python base = %s", base)
elif netloc.endswith("svn.collab.net"):
if path.startswith("/repos/"):
path = path[6:]
base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
logging.info("Guessed CollabNet base = %s", base)
elif netloc.endswith(".googlecode.com"):
path = path + "/"
base = urlparse.urlunparse(("http", netloc, path, params,
query, fragment))
logging.info("Guessed Google Code base = %s", base)
else:
path = path + "/"
base = urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
logging.info("Guessed base = %s", base)
return base
if required:
ErrorExit("Can't find URL in output from svn info")
return None
def GenerateDiff(self, args):
cmd = ["svn", "diff"]
if self.options.revision:
cmd += ["-r", self.options.revision]
cmd.extend(args)
data = RunShell(cmd)
count = 0
for line in data.splitlines():
if line.startswith("Index:") or line.startswith("Property changes on:"):
count += 1
logging.info(line)
if not count:
ErrorExit("No valid patches found in output from svn diff")
return data
def _CollapseKeywords(self, content, keyword_str):
"""Collapses SVN keywords."""
# svn cat translates keywords but svn diff doesn't. As a result of this
# behavior patching.PatchChunks() fails with a chunk mismatch error.
# This part was originally written by the Review Board development team
# who had the same problem (http://reviews.review-board.org/r/276/).
# Mapping of keywords to known aliases
svn_keywords = {
# Standard keywords
'Date': ['Date', 'LastChangedDate'],
'Revision': ['Revision', 'LastChangedRevision', 'Rev'],
'Author': ['Author', 'LastChangedBy'],
'HeadURL': ['HeadURL', 'URL'],
'Id': ['Id'],
# Aliases
'LastChangedDate': ['LastChangedDate', 'Date'],
'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
'LastChangedBy': ['LastChangedBy', 'Author'],
'URL': ['URL', 'HeadURL'],
}
def repl(m):
if m.group(2):
return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
return "$%s$" % m.group(1)
keywords = [keyword
for name in keyword_str.split(" ")
for keyword in svn_keywords.get(name, [])]
return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
def GetUnknownFiles(self):
status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
unknown_files = []
for line in status.split("\n"):
if line and line[0] == "?":
unknown_files.append(line)
return unknown_files
def ReadFile(self, filename):
"""Returns the contents of a file."""
file = open(filename, 'rb')
result = ""
try:
result = file.read()
finally:
file.close()
return result
def GetStatus(self, filename):
"""Returns the status of a file."""
if not self.options.revision:
status = RunShell(["svn", "status", "--ignore-externals", filename])
if not status:
ErrorExit("svn status returned no output for %s" % filename)
status_lines = status.splitlines()
# If file is in a cl, the output will begin with
# "\n--- Changelist 'cl_name':\n". See
# http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
if (len(status_lines) == 3 and
not status_lines[0] and
status_lines[1].startswith("--- Changelist")):
status = status_lines[2]
else:
status = status_lines[0]
# If we have a revision to diff against we need to run "svn list"
# for the old and the new revision and compare the results to get
# the correct status for a file.
else:
dirname, relfilename = os.path.split(filename)
if dirname not in self.svnls_cache:
cmd = ["svn", "list", "-r", self.rev_start, dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to get status for %s." % filename)
old_files = out.splitlines()
args = ["svn", "list"]
if self.rev_end:
args += ["-r", self.rev_end]
cmd = args + [dirname or "."]
out, returncode = RunShellWithReturnCode(cmd)
if returncode:
ErrorExit("Failed to run command %s" % cmd)
self.svnls_cache[dirname] = (old_files, out.splitlines())
old_files, new_files = self.svnls_cache[dirname]
if relfilename in old_files and relfilename not in new_files:
status = "D "
elif relfilename in old_files and relfilename in new_files:
status = "M "
else:
status = "A "
return status
def GetBaseFile(self, filename):
status = self.GetStatus(filename)
base_content = None
new_content = None
# If a file is copied its status will be "A +", which signifies
# "addition-with-history". See "svn st" for more information. We need to
# upload the original file or else diff parsing will fail if the file was
# edited.
if status[0] == "A" and status[3] != "+":
# We'll need to upload the new content if we're adding a binary file
# since diff's output won't contain it.
mimetype = RunShell(["svn", "propget", "svn:mime-type", filename],
silent_ok=True)
base_content = ""
is_binary = mimetype and not mimetype.startswith("text/")
if is_binary and self.IsImage(filename):
new_content = self.ReadFile(filename)
elif (status[0] in ("M", "D", "R") or
(status[0] == "A" and status[3] == "+") or # Copied file.
(status[0] == " " and status[1] == "M")): # Property change.
args = []
if self.options.revision:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
# Don't change filename, it's needed later.
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:mime-type", url]
mimetype, returncode = RunShellWithReturnCode(cmd)
if returncode:
# File does not exist in the requested revision.
# Reset mimetype, it contains an error message.
mimetype = ""
get_base = False
is_binary = mimetype and not mimetype.startswith("text/")
if status[0] == " ":
# Empty base content just to force an upload.
base_content = ""
elif is_binary:
if self.IsImage(filename):
get_base = True
if status[0] == "M":
if not self.rev_end:
new_content = self.ReadFile(filename)
else:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end)
new_content = RunShell(["svn", "cat", url],
universal_newlines=True, silent_ok=True)
else:
base_content = ""
else:
get_base = True
if get_base:
if is_binary:
universal_newlines = False
else:
universal_newlines = True
if self.rev_start:
# "svn cat -r REV delete_file.txt" doesn't work. cat requires
# the full URL with "@REV" appended instead of using "-r" option.
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
base_content = RunShell(["svn", "cat", url],
universal_newlines=universal_newlines,
silent_ok=True)
else:
base_content = RunShell(["svn", "cat", filename],
universal_newlines=universal_newlines,
silent_ok=True)
if not is_binary:
args = []
if self.rev_start:
url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start)
else:
url = filename
args += ["-r", "BASE"]
cmd = ["svn"] + args + ["propget", "svn:keywords", url]
keywords, returncode = RunShellWithReturnCode(cmd)
if keywords and not returncode:
base_content = self._CollapseKeywords(base_content, keywords)
else:
StatusUpdate("svn status returned unexpected output: %s" % status)
sys.exit(1)
return base_content, new_content, is_binary, status[0:5]
class GitVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Git."""
def __init__(self, options):
super(GitVCS, self).__init__(options)
# Map of filename -> hash of base file.
self.base_hashes = {}
def GenerateDiff(self, extra_args):
# This is more complicated than svn's GenerateDiff because we must convert
# the diff output to include an svn-style "Index:" line as well as record
# the hashes of the base files, so we can upload them along with our diff.
if self.options.revision:
extra_args = [self.options.revision] + extra_args
gitdiff = RunShell(["git", "diff", "--full-index"] + extra_args)
svndiff = []
filecount = 0
filename = None
for line in gitdiff.splitlines():
match = re.match(r"diff --git a/(.*) b/.*$", line)
if match:
filecount += 1
filename = match.group(1)
svndiff.append("Index: %s\n" % filename)
else:
# The "index" line in a git diff looks like this (long hashes elided):
# index 82c0d44..b2cee3f 100755
# We want to save the left hash, as that identifies the base file.
match = re.match(r"index (\w+)\.\.", line)
if match:
self.base_hashes[filename] = match.group(1)
svndiff.append(line + "\n")
if not filecount:
ErrorExit("No valid patches found in output from git diff")
return "".join(svndiff)
def GetUnknownFiles(self):
status = RunShell(["git", "ls-files", "--exclude-standard", "--others"],
silent_ok=True)
return status.splitlines()
def GetBaseFile(self, filename):
hash = self.base_hashes[filename]
base_content = None
new_content = None
is_binary = False
if hash == "0" * 40: # All-zero hash indicates no base file.
status = "A"
base_content = ""
else:
status = "M"
base_content, returncode = RunShellWithReturnCode(["git", "show", hash])
if returncode:
ErrorExit("Got error status from 'git show %s'" % hash)
return (base_content, new_content, is_binary, status)
class MercurialVCS(VersionControlSystem):
"""Implementation of the VersionControlSystem interface for Mercurial."""
def __init__(self, options, repo_dir):
super(MercurialVCS, self).__init__(options)
# Absolute path to repository (we can be in a subdir)
self.repo_dir = os.path.normpath(repo_dir)
# Compute the subdir
cwd = os.path.normpath(os.getcwd())
assert cwd.startswith(self.repo_dir)
self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/")
if self.options.revision:
self.base_rev = self.options.revision
else:
self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip()
def _GetRelPath(self, filename):
"""Get relative path of a file according to the current directory,
given its logical path in the repo."""
assert filename.startswith(self.subdir), filename
return filename[len(self.subdir):].lstrip(r"\/")
def GenerateDiff(self, extra_args):
# If no file specified, restrict to the current subdir
extra_args = extra_args or ["."]
cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args
data = RunShell(cmd, silent_ok=True)
svndiff = []
filecount = 0
for line in data.splitlines():
m = re.match("diff --git a/(\S+) b/(\S+)", line)
if m:
# Modify line to make it look like as it comes from svn diff.
# With this modification no changes on the server side are required
# to make upload.py work with Mercurial repos.
# NOTE: for proper handling of moved/copied files, we have to use
# the second filename.
filename = m.group(2)
svndiff.append("Index: %s" % filename)
svndiff.append("=" * 67)
filecount += 1
logging.info(line)
else:
svndiff.append(line)
if not filecount:
ErrorExit("No valid patches found in output from hg diff")
return "\n".join(svndiff) + "\n"
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
args = []
status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."],
silent_ok=True)
unknown_files = []
for line in status.splitlines():
st, fn = line.split(" ", 1)
if st == "?":
unknown_files.append(fn)
return unknown_files
def GetBaseFile(self, filename):
# "hg status" and "hg cat" both take a path relative to the current subdir
# rather than to the repo root, but "hg diff" has given us the full path
# to the repo root.
base_content = ""
new_content = None
is_binary = False
oldrelpath = relpath = self._GetRelPath(filename)
# "hg status -C" returns two lines for moved/copied files, one otherwise
out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath])
out = out.splitlines()
# HACK: strip error message about missing file/directory if it isn't in
# the working copy
if out[0].startswith('%s: ' % relpath):
out = out[1:]
if len(out) > 1:
# Moved/copied => considered as modified, use old filename to
# retrieve base contents
oldrelpath = out[1].strip()
status = "M"
else:
status, _ = out[0].split(' ', 1)
if status != "A":
base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
silent_ok=True)
is_binary = "\0" in base_content # Mercurial's heuristic
if status != "R":
new_content = open(relpath, "rb").read()
is_binary = is_binary or "\0" in new_content
if is_binary and base_content:
# Fetch again without converting newlines
base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath],
silent_ok=True, universal_newlines=False)
if not is_binary or not self.IsImage(relpath):
new_content = None
return base_content, new_content, is_binary, status
# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.
def SplitPatch(data):
"""Splits a patch into separate pieces for each file.
Args:
data: A string containing the output of svn diff.
Returns:
A list of 2-tuple (filename, text) where text is the svn diff output
pertaining to filename.
"""
patches = []
filename = None
diff = []
for line in data.splitlines(True):
new_filename = None
if line.startswith('Index:'):
unused, new_filename = line.split(':', 1)
new_filename = new_filename.strip()
elif line.startswith('Property changes on:'):
unused, temp_filename = line.split(':', 1)
# When a file is modified, paths use '/' between directories, however
# when a property is modified '\' is used on Windows. Make them the same
# otherwise the file shows up twice.
temp_filename = temp_filename.strip().replace('\\', '/')
if temp_filename != filename:
# File has property changes but no modifications, create a new diff.
new_filename = temp_filename
if new_filename:
if filename and diff:
patches.append((filename, ''.join(diff)))
filename = new_filename
diff = [line]
continue
if diff is not None:
diff.append(line)
if filename and diff:
patches.append((filename, ''.join(diff)))
return patches
def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
"""Uploads a separate patch for each file in the diff output.
Returns a list of [patch_key, filename] for each file.
"""
patches = SplitPatch(data)
rv = []
for patch in patches:
if len(patch[1]) > MAX_UPLOAD_SIZE:
print ("Not uploading the patch for " + patch[0] +
" because the file is too large.")
continue
form_fields = [("filename", patch[0])]
if not options.download_base:
form_fields.append(("content_upload", "1"))
files = [("data", "data.diff", patch[1])]
ctype, body = EncodeMultipartFormData(form_fields, files)
url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
print "Uploading patch for " + patch[0]
response_body = rpc_server.Send(url, body, content_type=ctype)
lines = response_body.splitlines()
if not lines or lines[0] != "OK":
StatusUpdate(" --> %s" % response_body)
sys.exit(1)
rv.append([lines[1], patch[0]])
return rv
def GuessVCS(options):
"""Helper to guess the version control system.
This examines the current directory, guesses which VersionControlSystem
we're using, and returns an instance of the appropriate class. Exit with an
error if we can't figure it out.
Returns:
A VersionControlSystem instance. Exits if the VCS can't be guessed.
"""
# Mercurial has a command to get the base directory of a repository
# Try running it, but don't die if we don't have hg installed.
# NOTE: we try Mercurial first as it can sit on top of an SVN working copy.
try:
out, returncode = RunShellWithReturnCode(["hg", "root"])
if returncode == 0:
return MercurialVCS(options, out.strip())
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have hg installed.
raise
# Subversion has a .svn in all working directories.
if os.path.isdir('.svn'):
logging.info("Guessed VCS = Subversion")
return SubversionVCS(options)
# Git has a command to test if you're in a git tree.
# Try running it, but don't die if we don't have git installed.
try:
out, returncode = RunShellWithReturnCode(["git", "rev-parse",
"--is-inside-work-tree"])
if returncode == 0:
return GitVCS(options)
except OSError, (errno, message):
if errno != 2: # ENOENT -- they don't have git installed.
raise
ErrorExit(("Could not guess version control system. "
"Are you in a working copy directory?"))
def RealMain(argv, data=None):
"""The real main function.
Args:
argv: Command line arguments.
data: Diff contents. If None (default) the diff is generated by
the VersionControlSystem implementation returned by GuessVCS().
Returns:
A 2-tuple (issue id, patchset id).
The patchset id is None if the base files are not uploaded by this
script (applies only to SVN checkouts).
"""
logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
"%(lineno)s %(message)s "))
os.environ['LC_ALL'] = 'C'
options, args = parser.parse_args(argv[1:])
global verbosity
verbosity = options.verbose
if verbosity >= 3:
logging.getLogger().setLevel(logging.DEBUG)
elif verbosity >= 2:
logging.getLogger().setLevel(logging.INFO)
vcs = GuessVCS(options)
if isinstance(vcs, SubversionVCS):
# base field is only allowed for Subversion.
# Note: Fetching base files may become deprecated in future releases.
base = vcs.GuessBase(options.download_base)
else:
base = None
if not base and options.download_base:
options.download_base = True
logging.info("Enabled upload of base file")
if not options.assume_yes:
vcs.CheckForUnknownFiles()
if data is None:
data = vcs.GenerateDiff(args)
files = vcs.GetBaseFiles(data)
if verbosity >= 1:
print "Upload server:", options.server, "(change with -s/--server)"
if options.issue:
prompt = "Message describing this patch set: "
else:
prompt = "New issue subject: "
message = options.message or raw_input(prompt).strip()
if not message:
ErrorExit("A non-empty message is required")
rpc_server = GetRpcServer(options)
form_fields = [("subject", message)]
if base:
form_fields.append(("base", base))
if options.issue:
form_fields.append(("issue", str(options.issue)))
if options.email:
form_fields.append(("user", options.email))
if options.reviewers:
for reviewer in options.reviewers.split(','):
if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1:
ErrorExit("Invalid email address: %s" % reviewer)
form_fields.append(("reviewers", options.reviewers))
if options.cc:
for cc in options.cc.split(','):
if "@" in cc and not cc.split("@")[1].count(".") == 1:
ErrorExit("Invalid email address: %s" % cc)
form_fields.append(("cc", options.cc))
description = options.description
if options.description_file:
if options.description:
ErrorExit("Can't specify description and description_file")
file = open(options.description_file, 'r')
description = file.read()
file.close()
if description:
form_fields.append(("description", description))
# Send a hash of all the base file so the server can determine if a copy
# already exists in an earlier patchset.
base_hashes = ""
for file, info in files.iteritems():
if not info[0] is None:
checksum = md5.new(info[0]).hexdigest()
if base_hashes:
base_hashes += "|"
base_hashes += checksum + ":" + file
form_fields.append(("base_hashes", base_hashes))
# If we're uploading base files, don't send the email before the uploads, so
# that it contains the file status.
if options.send_mail and options.download_base:
form_fields.append(("send_mail", "1"))
if not options.download_base:
form_fields.append(("content_upload", "1"))
if len(data) > MAX_UPLOAD_SIZE:
print "Patch is large, so uploading file patches separately."
uploaded_diff_file = []
form_fields.append(("separate_patches", "1"))
else:
uploaded_diff_file = [("data", "data.diff", data)]
ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)
response_body = rpc_server.Send("/upload", body, content_type=ctype)
patchset = None
if not options.download_base or not uploaded_diff_file:
lines = response_body.splitlines()
if len(lines) >= 2:
msg = lines[0]
patchset = lines[1].strip()
patches = [x.split(" ", 1) for x in lines[2:]]
else:
msg = response_body
else:
msg = response_body
StatusUpdate(msg)
if not response_body.startswith("Issue created.") and \
not response_body.startswith("Issue updated."):
sys.exit(0)
issue = msg[msg.rfind("/")+1:]
if not uploaded_diff_file:
result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
if not options.download_base:
patches = result
if not options.download_base:
vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)
if options.send_mail:
rpc_server.Send("/" + issue + "/mail", payload="")
return issue, patchset
def main():
try:
RealMain(sys.argv)
except KeyboardInterrupt:
print
StatusUpdate("Interrupted.")
sys.exit(1)
if __name__ == "__main__":
main()
|
s40523125/2016fallcp_hw | refs/heads/gh-pages | plugin/render_math/__init__.py | 371 | from .math import *
|
alexhenrie/poedit | refs/heads/master | deps/boost/tools/build/test/explicit.py | 7 | #!/usr/bin/python
# Copyright 2003 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
t = BoostBuild.Tester(use_test_config=False)
t.write("jamroot.jam", """\
exe hello : hello.cpp ;
exe hello2 : hello.cpp ;
explicit hello2 ;
""")
t.write("hello.cpp", "int main() {}\n")
t.run_build_system()
t.ignore("*.tds")
t.expect_addition(BoostBuild.List("bin/$toolset/debug*/hello") * \
[".exe", ".obj"])
t.expect_nothing_more()
t.run_build_system(["hello2"])
t.expect_addition("bin/$toolset/debug*/hello2.exe")
t.rm(".")
# Test that 'explicit' used in a helper rule applies to the current project, and
# not to the Jamfile where the helper rule is defined.
t.write("jamroot.jam", """\
rule myinstall ( name : target )
{
install $(name)-bin : $(target) ;
explicit $(name)-bin ;
alias $(name) : $(name)-bin ;
}
""")
t.write("sub/a.cpp", "\n")
t.write("sub/jamfile.jam", "myinstall dist : a.cpp ;")
t.run_build_system(subdir="sub")
t.expect_addition("sub/dist-bin/a.cpp")
t.rm("sub/dist-bin")
t.write("sub/jamfile.jam", """\
myinstall dist : a.cpp ;
explicit dist ;
""")
t.run_build_system(subdir="sub")
t.expect_nothing_more()
t.cleanup()
|
pannkotsky/groupmate | refs/heads/master | backend/apps/forum/models.py | 1 | from django.db import models
from model_utils.models import TimeStampedModel
from users.models import EmailUser
class Topic(TimeStampedModel):
name = models.CharField(max_length=100)
author = models.ForeignKey(
EmailUser,
related_name='created_topics',
null=True,
on_delete=models.SET_NULL
)
def latest_post(self):
posts = self.posts.order_by('-created')
if posts:
return posts[0]
return None
def posts_count(self):
return self.posts.count()
def __str__(self):
return self.name
class Post(TimeStampedModel):
text = models.TextField()
topic = models.ForeignKey(
Topic,
related_name='posts',
on_delete=models.CASCADE
)
author = models.ForeignKey(
EmailUser,
related_name='posts',
null=True,
on_delete=models.SET_NULL
)
def __str__(self):
text = self.text if len(self.text) < 20 else self.text[:17] + '...'
return "{} {}: {}".format(
self.created.strftime('%d.%m.%Y'),
self.author,
text
)
class TeacherPost(Post):
class Meta:
proxy = True
|
haad/ansible | refs/heads/devel | lib/ansible/plugins/action/debug.py | 17 | # Copyright 2012, Dag Wieers <dag@wieers.com>
# Copyright 2016, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleUndefinedVariable
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_text
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
''' Print statements during execution '''
TRANSFERS_FILES = False
VALID_ARGS = frozenset(('msg', 'var', 'verbosity'))
def run(self, tmp=None, task_vars=None):
if task_vars is None:
task_vars = dict()
for arg in self._task.args:
if arg not in self.VALID_ARGS:
return {"failed": True, "msg": "'%s' is not a valid option in debug" % arg}
if 'msg' in self._task.args and 'var' in self._task.args:
return {"failed": True, "msg": "'msg' and 'var' are incompatible options"}
result = super(ActionModule, self).run(tmp, task_vars)
# get task verbosity
verbosity = int(self._task.args.get('verbosity', 0))
if verbosity <= self._display.verbosity:
if 'msg' in self._task.args:
result['msg'] = self._task.args['msg']
elif 'var' in self._task.args:
try:
results = self._templar.template(self._task.args['var'], convert_bare=True, fail_on_undefined=True, bare_deprecated=False)
if results == self._task.args['var']:
# if results is not str/unicode type, raise an exception
if not isinstance(results, string_types):
raise AnsibleUndefinedVariable
# If var name is same as result, try to template it
results = self._templar.template("{{" + results + "}}", convert_bare=True, fail_on_undefined=True)
except AnsibleUndefinedVariable as e:
results = u"VARIABLE IS NOT DEFINED!"
if self._display.verbosity > 0:
results += u": %s" % to_text(e)
if isinstance(self._task.args['var'], (list, dict)):
# If var is a list or dict, use the type as key to display
result[to_text(type(self._task.args['var']))] = results
else:
result[self._task.args['var']] = results
else:
result['msg'] = 'Hello world!'
# force flag to make debug output module always verbose
result['_ansible_verbose_always'] = True
else:
result['skipped_reason'] = "Verbosity threshold not met."
result['skipped'] = True
return result
|
veger/ansible | refs/heads/devel | lib/ansible/modules/cloud/docker/docker_prune.py | 7 | #!/usr/bin/python
#
# Copyright 2016 Red Hat | Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: docker_prune
short_description: Allows to prune various docker objects
description:
- Allows to run C(docker container prune), C(docker image prune), C(docker network prune)
and C(docker volume prune) via the Docker API.
version_added: "2.8"
options:
containers:
description:
- Whether to prune containers.
type: bool
default: no
containers_filters:
description:
- A dictionary of filter values used for selecting containers to delete.
- "For example, C(until: 24h)."
- See L(the docker documentation,https://docs.docker.com/engine/reference/commandline/container_prune/#filtering)
for more information on possible filters.
type: dict
images:
description:
- Whether to prune images.
type: bool
default: no
images_filters:
description:
- A dictionary of filter values used for selecting images to delete.
- "For example, C(dangling: true)."
- See L(the docker documentation,https://docs.docker.com/engine/reference/commandline/image_prune/#filtering)
for more information on possible filters.
type: dict
networks:
description:
- Whether to prune networks.
type: bool
default: no
networks_filters:
description:
- A dictionary of filter values used for selecting networks to delete.
- See L(the docker documentation,https://docs.docker.com/engine/reference/commandline/network_prune/#filtering)
for more information on possible filters.
type: dict
volumes:
description:
- Whether to prune volumes.
type: bool
default: no
volumes_filters:
description:
- A dictionary of filter values used for selecting volumes to delete.
- See L(the docker documentation,https://docs.docker.com/engine/reference/commandline/volume_prune/#filtering)
for more information on possible filters.
type: dict
builder_cache:
description:
- Whether to prune the builder cache.
- Requires version 3.3.0 of the Python Docker SDK or newer.
type: bool
default: no
extends_documentation_fragment:
- docker
author:
- "Felix Fontein (@felixfontein)"
requirements:
- "python >= 2.6"
- "docker >= 2.1.0"
- "Please note that the L(docker-py,https://pypi.org/project/docker-py/) Python
module has been superseded by L(docker,https://pypi.org/project/docker/)
(see L(here,https://github.com/docker/docker-py/issues/1310) for details).
Version 2.1.0 or newer is only available with the C(docker) module."
- "Docker API >= 1.25"
'''
EXAMPLES = '''
- name: Prune containers older than 24h
docker_prune:
containers: yes
containers_filters:
# only consider containers created more than 24 hours ago
until: 24h
- name: Prune everything
docker_prune:
containers: yes
images: yes
networks: yes
volumes: yes
builder_cache: yes
'''
RETURN = '''
# containers
containers:
description:
- List of IDs of deleted containers.
returned: C(containers) is C(true)
type: list
sample: '[]'
containers_space_reclaimed:
description:
- Amount of reclaimed disk space from container pruning in bytes.
returned: C(containers) is C(true)
type: int
sample: '0'
# images
images:
description:
- List of IDs of deleted images.
returned: C(images) is C(true)
type: list
sample: '[]'
images_space_reclaimed:
description:
- Amount of reclaimed disk space from image pruning in bytes.
returned: C(images) is C(true)
type: int
sample: '0'
# networks
networks:
description:
- List of IDs of deleted networks.
returned: C(networks) is C(true)
type: list
sample: '[]'
# volumes
volumes:
description:
- List of IDs of deleted volumes.
returned: C(volumes) is C(true)
type: list
sample: '[]'
volumes_space_reclaimed:
description:
- Amount of reclaimed disk space from volumes pruning in bytes.
returned: C(volumes) is C(true)
type: int
sample: '0'
# builder_cache
builder_cache_space_reclaimed:
description:
- Amount of reclaimed disk space from builder cache pruning in bytes.
returned: C(builder_cache) is C(true)
type: int
sample: '0'
'''
from distutils.version import LooseVersion
from ansible.module_utils.docker_common import AnsibleDockerClient
try:
from ansible.module_utils.docker_common import docker_version, clean_dict_booleans_for_docker_api
except Exception as dummy:
# missing docker-py handled in ansible.module_utils.docker
pass
def main():
argument_spec = dict(
containers=dict(type='bool', default=False),
containers_filters=dict(type='dict'),
images=dict(type='bool', default=False),
images_filters=dict(type='dict'),
networks=dict(type='bool', default=False),
networks_filters=dict(type='dict'),
volumes=dict(type='bool', default=False),
volumes_filters=dict(type='dict'),
builder_cache=dict(type='bool', default=False),
)
client = AnsibleDockerClient(
argument_spec=argument_spec,
# supports_check_mode=True,
min_docker_api_version='1.25',
min_docker_version='2.1.0',
)
# Version checks
cache_min_version = '3.3.0'
if client.module.params['builder_cache'] and client.docker_py_version < LooseVersion(cache_min_version):
msg = "Error: docker version is %s. Minimum version required for builds option is %s. Use `pip install --upgrade docker` to upgrade."
client.module.fail(msg=(msg % (docker_version, cache_min_version)))
result = dict()
if client.module.params['containers']:
filters = clean_dict_booleans_for_docker_api(client.module.params.get('containers_filters'))
res = client.prune_containers(filters=filters)
result['containers'] = res.get('ContainersDeleted') or []
result['containers_space_reclaimed'] = res['SpaceReclaimed']
if client.module.params['images']:
filters = clean_dict_booleans_for_docker_api(client.module.params.get('images_filters'))
res = client.prune_images(filters=filters)
result['images'] = res.get('ImagesDeleted') or []
result['images_space_reclaimed'] = res['SpaceReclaimed']
if client.module.params['networks']:
filters = clean_dict_booleans_for_docker_api(client.module.params.get('networks_filters'))
res = client.prune_networks(filters=filters)
result['networks'] = res.get('NetworksDeleted') or []
if client.module.params['volumes']:
filters = clean_dict_booleans_for_docker_api(client.module.params.get('volumes_filters'))
res = client.prune_volumes(filters=filters)
result['volumes'] = res.get('VolumesDeleted') or []
result['volumes_space_reclaimed'] = res['SpaceReclaimed']
if client.module.params['builder_cache']:
res = client.prune_builds()
result['builder_cache_space_reclaimed'] = res['SpaceReclaimed']
client.module.exit_json(**result)
if __name__ == '__main__':
main()
|
ppries/tensorflow | refs/heads/master | tensorflow/contrib/distributions/python/ops/distribution.py | 1 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Base classes for probability distributions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import contextlib
import inspect
import types
import warnings
import numpy as np
import six
from tensorflow.contrib import framework as contrib_framework
from tensorflow.contrib.distributions.python.ops import distribution_util
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
_DISTRIBUTION_PUBLIC_METHOD_WRAPPERS = [
"batch_shape", "get_batch_shape", "event_shape", "get_event_shape",
"sample_n", "log_prob", "prob", "log_cdf", "cdf", "log_survival_function",
"survival_function", "entropy", "mean", "variance", "std", "mode"]
@six.add_metaclass(abc.ABCMeta)
class _BaseDistribution(object):
"""Abstract base class needed for resolving subclass hierarchy."""
pass
def _copy_fn(fn):
"""Create a deep copy of fn.
Args:
fn: a callable
Returns:
A `FunctionType`: a deep copy of fn.
Raises:
TypeError: if `fn` is not a callable.
"""
if not callable(fn):
raise TypeError("fn is not callable: %s" % fn)
# The blessed way to copy a function. copy.deepcopy fails to create
# a non-reference copy. Since:
# types.FunctionType == type(lambda: None),
# and the docstring for the function type states:
#
# function(code, globals[, name[, argdefs[, closure]]])
#
# Create a function object from a code object and a dictionary.
# ...
#
# Here we can use this to create a new function with the old function's
# code, globals, closure, etc.
return types.FunctionType(
code=fn.__code__, globals=fn.__globals__,
name=fn.__name__, argdefs=fn.__defaults__,
closure=fn.__closure__)
def _update_docstring(old_str, append_str):
"""Update old_str by inserting append_str just before the "Args:" section."""
old_str_lines = old_str.split("\n")
# Step 0: Prepend spaces to all lines of append_str. This is
# necessary for correct markdown generation.
append_str = "\n".join(" %s" % line for line in append_str.split("\n"))
# Step 1: Find mention of "Args":
has_args_ix = [
ix for ix, line in enumerate(old_str_lines)
if line.strip().lower() == "args:"]
if has_args_ix:
final_args_ix = has_args_ix[-1]
return ("\n".join(old_str_lines[:final_args_ix])
+ "\n\n" + append_str + "\n\n"
+ "\n".join(old_str_lines[final_args_ix:]))
else:
return old_str + "\n\n" + append_str
class _DistributionMeta(abc.ABCMeta):
def __new__(mcs, classname, baseclasses, attrs):
"""Control the creation of subclasses of the Distribution class.
The main purpose of this method is to properly propagate docstrings
from private Distribution methods, like `_log_prob`, into their
public wrappers as inherited by the Distribution base class
(e.g. `log_prob`).
Args:
classname: The name of the subclass being created.
baseclasses: A tuple of parent classes.
attrs: A dict mapping new attributes to their values.
Returns:
The class object.
Raises:
TypeError: If `Distribution` is not a subclass of `BaseDistribution`, or
the new class is derived via multiple inheritance and the first
parent class is not a subclass of `BaseDistribution`.
AttributeError: If `Distribution` does not implement e.g. `log_prob`.
ValueError: If a `Distribution` public method lacks a docstring.
"""
if not baseclasses: # Nothing to be done for Distribution
raise TypeError("Expected non-empty baseclass. Does Distribution "
"not subclass _BaseDistribution?")
which_base = [
base for base in baseclasses
if base == _BaseDistribution or issubclass(base, Distribution)]
base = which_base[0]
if base == _BaseDistribution: # Nothing to be done for Distribution
return abc.ABCMeta.__new__(mcs, classname, baseclasses, attrs)
if not issubclass(base, Distribution):
raise TypeError("First parent class declared for %s must be "
"Distribution, but saw '%s'" % (classname, base.__name__))
for attr in _DISTRIBUTION_PUBLIC_METHOD_WRAPPERS:
special_attr = "_%s" % attr
class_attr_value = attrs.get(attr, None)
if attr in attrs:
# The method is being overridden, do not update its docstring
continue
base_attr_value = getattr(base, attr, None)
if not base_attr_value:
raise AttributeError(
"Internal error: expected base class '%s' to implement method '%s'"
% (base.__name__, attr))
class_special_attr_value = attrs.get(special_attr, None)
if class_special_attr_value is None:
# No _special method available, no need to update the docstring.
continue
class_special_attr_docstring = inspect.getdoc(class_special_attr_value)
if not class_special_attr_docstring:
# No docstring to append.
continue
class_attr_value = _copy_fn(base_attr_value)
class_attr_docstring = inspect.getdoc(base_attr_value)
if class_attr_docstring is None:
raise ValueError(
"Expected base class fn to contain a docstring: %s.%s"
% (base.__name__, attr))
class_attr_value.__doc__ = _update_docstring(
class_attr_value.__doc__,
("Additional documentation from `%s`:\n\n%s"
% (classname, class_special_attr_docstring)))
attrs[attr] = class_attr_value
return abc.ABCMeta.__new__(mcs, classname, baseclasses, attrs)
@six.add_metaclass(_DistributionMeta)
class Distribution(_BaseDistribution):
"""A generic probability distribution base class.
`Distribution` is a base class for constructing and organizing properties
(e.g., mean, variance) of random variables (e.g, Bernoulli, Gaussian).
### Subclassing
Subclasses are expected to implement a leading-underscore version of the
same-named function. The argument signature should be identical except for
the omission of `name="..."`. For example, to enable `log_prob(value,
name="log_prob")` a subclass should implement `_log_prob(value)`.
Subclasses can append to public-level docstrings by providing
docstrings for their method specializations. For example:
```python
@distribution_util.AppendDocstring("Some other details.")
def _log_prob(self, value):
...
```
would add the string "Some other details." to the `log_prob` function
docstring. This is implemented as a simple decorator to avoid python
linter complaining about missing Args/Returns/Raises sections in the
partial docstrings.
### Broadcasting, batching, and shapes
All distributions support batches of independent distributions of that type.
The batch shape is determined by broadcasting together the parameters.
The shape of arguments to `__init__`, `cdf`, `log_cdf`, `prob`, and
`log_prob` reflect this broadcasting, as does the return value of `sample` and
`sample_n`.
`sample_n_shape = (n,) + batch_shape + event_shape`, where `sample_n_shape` is
the shape of the `Tensor` returned from `sample_n`, `n` is the number of
samples, `batch_shape` defines how many independent distributions there are,
and `event_shape` defines the shape of samples from each of those independent
distributions. Samples are independent along the `batch_shape` dimensions, but
not necessarily so along the `event_shape` dimensions (depending on the
particulars of the underlying distribution).
Using the `Uniform` distribution as an example:
```python
minval = 3.0
maxval = [[4.0, 6.0],
[10.0, 12.0]]
# Broadcasting:
# This instance represents 4 Uniform distributions. Each has a lower bound at
# 3.0 as the `minval` parameter was broadcasted to match `maxval`'s shape.
u = Uniform(minval, maxval)
# `event_shape` is `TensorShape([])`.
event_shape = u.get_event_shape()
# `event_shape_t` is a `Tensor` which will evaluate to [].
event_shape_t = u.event_shape
# Sampling returns a sample per distribution. `samples` has shape
# (5, 2, 2), which is (n,) + batch_shape + event_shape, where n=5,
# batch_shape=(2, 2), and event_shape=().
samples = u.sample_n(5)
# The broadcasting holds across methods. Here we use `cdf` as an example. The
# same holds for `log_cdf` and the likelihood functions.
# `cum_prob` has shape (2, 2) as the `value` argument was broadcasted to the
# shape of the `Uniform` instance.
cum_prob_broadcast = u.cdf(4.0)
# `cum_prob`'s shape is (2, 2), one per distribution. No broadcasting
# occurred.
cum_prob_per_dist = u.cdf([[4.0, 5.0],
[6.0, 7.0]])
# INVALID as the `value` argument is not broadcastable to the distribution's
# shape.
cum_prob_invalid = u.cdf([4.0, 5.0, 6.0])
```
### Parameter values leading to undefined statistics or distributions.
Some distributions do not have well-defined statistics for all initialization
parameter values. For example, the beta distribution is parameterized by
positive real numbers `a` and `b`, and does not have well-defined mode if
`a < 1` or `b < 1`.
The user is given the option of raising an exception or returning `NaN`.
```python
a = tf.exp(tf.matmul(logits, weights_a))
b = tf.exp(tf.matmul(logits, weights_b))
# Will raise exception if ANY batch member has a < 1 or b < 1.
dist = distributions.beta(a, b, allow_nan_stats=False)
mode = dist.mode().eval()
# Will return NaN for batch members with either a < 1 or b < 1.
dist = distributions.beta(a, b, allow_nan_stats=True) # Default behavior
mode = dist.mode().eval()
```
In all cases, an exception is raised if *invalid* parameters are passed, e.g.
```python
# Will raise an exception if any Op is run.
negative_a = -1.0 * a # beta distribution by definition has a > 0.
dist = distributions.beta(negative_a, b, allow_nan_stats=True)
dist.mean().eval()
```
"""
def __init__(self,
dtype,
is_continuous,
is_reparameterized,
validate_args,
allow_nan_stats,
parameters=None,
graph_parents=None,
name=None):
"""Constructs the `Distribution`.
**This is a private method for subclass use.**
Args:
dtype: The type of the event samples. `None` implies no type-enforcement.
is_continuous: Python boolean. If `True` this
`Distribution` is continuous over its supported domain.
is_reparameterized: Python boolean. If `True` this
`Distribution` can be reparameterized in terms of some standard
distribution with a function whose Jacobian is constant for the support
of the standard distribution.
validate_args: Python boolean. Whether to validate input with asserts.
If `validate_args` is `False`, and the inputs are invalid,
correct behavior is not guaranteed.
allow_nan_stats: Python boolean. If `False`, raise an
exception if a statistic (e.g., mean, mode) is undefined for any batch
member. If True, batch members with valid parameters leading to
undefined statistics will return `NaN` for this statistic.
parameters: Python dictionary of parameters used to instantiate this
`Distribution`.
graph_parents: Python list of graph prerequisites of this `Distribution`.
name: A name for this distribution. Default: subclass name.
Raises:
ValueError: if any member of graph_parents is `None` or not a `Tensor`.
"""
graph_parents = [] if graph_parents is None else graph_parents
for i, t in enumerate(graph_parents):
if t is None or not contrib_framework.is_tensor(t):
raise ValueError("Graph parent item %d is not a Tensor; %s." % (i, t))
parameters = parameters or {}
self._dtype = dtype
self._is_continuous = is_continuous
self._is_reparameterized = is_reparameterized
self._allow_nan_stats = allow_nan_stats
self._validate_args = validate_args
self._parameters = parameters
self._graph_parents = graph_parents
self._name = name or type(self).__name__
@classmethod
def param_shapes(cls, sample_shape, name="DistributionParamShapes"):
"""Shapes of parameters given the desired shape of a call to `sample()`.
Subclasses should override static method `_param_shapes`.
Args:
sample_shape: `Tensor` or python list/tuple. Desired shape of a call to
`sample()`.
name: name to prepend ops with.
Returns:
`dict` of parameter name to `Tensor` shapes.
"""
with ops.name_scope(name, values=[sample_shape]):
return cls._param_shapes(sample_shape)
@classmethod
def param_static_shapes(cls, sample_shape):
"""param_shapes with static (i.e. TensorShape) shapes.
Args:
sample_shape: `TensorShape` or python list/tuple. Desired shape of a call
to `sample()`.
Returns:
`dict` of parameter name to `TensorShape`.
Raises:
ValueError: if `sample_shape` is a `TensorShape` and is not fully defined.
"""
if isinstance(sample_shape, tensor_shape.TensorShape):
if not sample_shape.is_fully_defined():
raise ValueError("TensorShape sample_shape must be fully defined")
sample_shape = sample_shape.as_list()
params = cls.param_shapes(sample_shape)
static_params = {}
for name, shape in params.items():
static_shape = tensor_util.constant_value(shape)
if static_shape is None:
raise ValueError(
"sample_shape must be a fully-defined TensorShape or list/tuple")
static_params[name] = tensor_shape.TensorShape(static_shape)
return static_params
@staticmethod
def _param_shapes(sample_shape):
raise NotImplementedError("_param_shapes not implemented")
@property
def name(self):
"""Name prepended to all ops created by this `Distribution`."""
return self._name
@property
def dtype(self):
"""The `DType` of `Tensor`s handled by this `Distribution`."""
return self._dtype
@property
def parameters(self):
"""Dictionary of parameters used to instantiate this `Distribution`."""
return self._parameters
@property
def is_continuous(self):
return self._is_continuous
@property
def is_reparameterized(self):
return self._is_reparameterized
@property
def allow_nan_stats(self):
"""Python boolean describing behavior when a stat is undefined.
Stats return +/- infinity when it makes sense. E.g., the variance
of a Cauchy distribution is infinity. However, sometimes the
statistic is undefined, e.g., if a distribution's pdf does not achieve a
maximum within the support of the distribution, the mode is undefined.
If the mean is undefined, then by definition the variance is undefined.
E.g. the mean for Student's T for df = 1 is undefined (no clear way to say
it is either + or - infinity), so the variance = E[(X - mean)^2] is also
undefined.
Returns:
allow_nan_stats: Python boolean.
"""
return self._allow_nan_stats
@property
def validate_args(self):
"""Python boolean indicated possibly expensive checks are enabled."""
return self._validate_args
def copy(self, **override_parameters_kwargs):
"""Creates a deep copy of the distribution.
Note: the copy distribution may continue to depend on the original
intialization arguments.
Args:
**override_parameters_kwargs: String/value dictionary of initialization
arguments to override with new values.
Returns:
distribution: A new instance of `type(self)` intitialized from the union
of self.parameters and override_parameters_kwargs, i.e.,
`dict(self.parameters, **override_parameters_kwargs)`.
"""
parameters = dict(self.parameters, **override_parameters_kwargs)
# Python3 leaks "__class__" into `locals()` so we remove if present.
# TODO(b/32376812): Remove this pop.
parameters.pop("__class__", None)
return type(self)(**parameters)
def _batch_shape(self):
raise NotImplementedError("batch_shape is not implemented")
def batch_shape(self, name="batch_shape"):
"""Shape of a single sample from a single event index as a 1-D `Tensor`.
The product of the dimensions of the `batch_shape` is the number of
independent distributions of this kind the instance represents.
Args:
name: name to give to the op
Returns:
batch_shape: `Tensor`.
"""
with self._name_scope(name):
return self._batch_shape()
def _get_batch_shape(self):
return tensor_shape.TensorShape(None)
def get_batch_shape(self):
"""Shape of a single sample from a single event index as a `TensorShape`.
Same meaning as `batch_shape`. May be only partially defined.
Returns:
batch_shape: `TensorShape`, possibly unknown.
"""
return self._get_batch_shape()
def _event_shape(self):
raise NotImplementedError("event_shape is not implemented")
def event_shape(self, name="event_shape"):
"""Shape of a single sample from a single batch as a 1-D int32 `Tensor`.
Args:
name: name to give to the op
Returns:
event_shape: `Tensor`.
"""
with self._name_scope(name):
return self._event_shape()
def _get_event_shape(self):
return tensor_shape.TensorShape(None)
def get_event_shape(self):
"""Shape of a single sample from a single batch as a `TensorShape`.
Same meaning as `event_shape`. May be only partially defined.
Returns:
event_shape: `TensorShape`, possibly unknown.
"""
return self._get_event_shape()
def _sample_n(self, n, seed=None):
raise NotImplementedError("sample_n is not implemented")
def sample(self, sample_shape=(), seed=None, name="sample",
**condition_kwargs):
"""Generate samples of the specified shape.
Note that a call to `sample()` without arguments will generate a single
sample.
Args:
sample_shape: 0D or 1D `int32` `Tensor`. Shape of the generated samples.
seed: Python integer seed for RNG
name: name to give to the op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
samples: a `Tensor` with prepended dimensions `sample_shape`.
"""
with self._name_scope(name, values=[sample_shape]):
sample_shape = ops.convert_to_tensor(
sample_shape, dtype=dtypes.int32, name="sample_shape")
if sample_shape.get_shape().ndims == 0:
return self.sample_n(sample_shape, seed, **condition_kwargs)
sample_shape, total = self._expand_sample_shape(sample_shape)
samples = self.sample_n(total, seed, **condition_kwargs)
output_shape = array_ops.concat_v2(
[sample_shape, array_ops.slice(array_ops.shape(samples), [1], [-1])],
0)
output = array_ops.reshape(samples, output_shape)
output.set_shape(tensor_util.constant_value_as_shape(
sample_shape).concatenate(samples.get_shape()[1:]))
return output
def sample_n(self, n, seed=None, name="sample_n", **condition_kwargs):
"""Generate `n` samples.
Args:
n: `Scalar` `Tensor` of type `int32` or `int64`, the number of
observations to sample.
seed: Python integer seed for RNG
name: name to give to the op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
samples: a `Tensor` with a prepended dimension (n,).
Raises:
TypeError: if `n` is not an integer type.
"""
warnings.warn("Please use `sample` instead of `sample_n`. `sample_n` "
"will be deprecated in December 2016.",
PendingDeprecationWarning)
with self._name_scope(name, values=[n]):
n = ops.convert_to_tensor(n, name="n")
if not n.dtype.is_integer:
raise TypeError("n.dtype=%s is not an integer type" % n.dtype)
x = self._sample_n(n, seed, **condition_kwargs)
# Set shape hints.
sample_shape = tensor_shape.TensorShape(
tensor_util.constant_value(n))
batch_ndims = self.get_batch_shape().ndims
event_ndims = self.get_event_shape().ndims
if batch_ndims is not None and event_ndims is not None:
inferred_shape = sample_shape.concatenate(
self.get_batch_shape().concatenate(
self.get_event_shape()))
x.set_shape(inferred_shape)
elif x.get_shape().ndims is not None and x.get_shape().ndims > 0:
x.get_shape()[0].merge_with(sample_shape[0])
if batch_ndims is not None and batch_ndims > 0:
x.get_shape()[1:1+batch_ndims].merge_with(self.get_batch_shape())
if event_ndims is not None and event_ndims > 0:
x.get_shape()[-event_ndims:].merge_with(self.get_event_shape())
return x
def _log_prob(self, value):
raise NotImplementedError("log_prob is not implemented")
def log_prob(self, value, name="log_prob", **condition_kwargs):
"""Log probability density/mass function (depending on `is_continuous`).
Args:
value: `float` or `double` `Tensor`.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
log_prob: a `Tensor` of shape `sample_shape(x) + self.batch_shape` with
values of type `self.dtype`.
"""
with self._name_scope(name, values=[value]):
value = ops.convert_to_tensor(value, name="value")
try:
return self._log_prob(value, **condition_kwargs)
except NotImplementedError as original_exception:
try:
return math_ops.log(self._prob(value, **condition_kwargs))
except NotImplementedError:
raise original_exception
def prob(self, value, name="prob", **condition_kwargs):
"""Probability density/mass function (depending on `is_continuous`).
Args:
value: `float` or `double` `Tensor`.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
prob: a `Tensor` of shape `sample_shape(x) + self.batch_shape` with
values of type `self.dtype`.
"""
with self._name_scope(name, values=[value]):
value = ops.convert_to_tensor(value, name="value")
try:
return self._prob(value, **condition_kwargs)
except NotImplementedError as original_exception:
try:
return math_ops.exp(self._log_prob(value, **condition_kwargs))
except NotImplementedError:
raise original_exception
def _log_cdf(self, value):
raise NotImplementedError("log_cdf is not implemented")
def log_cdf(self, value, name="log_cdf", **condition_kwargs):
"""Log cumulative distribution function.
Given random variable `X`, the cumulative distribution function `cdf` is:
```
log_cdf(x) := Log[ P[X <= x] ]
```
Often, a numerical approximation can be used for `log_cdf(x)` that yields
a more accurate answer than simply taking the logarithm of the `cdf` when
`x << -1`.
Args:
value: `float` or `double` `Tensor`.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
logcdf: a `Tensor` of shape `sample_shape(x) + self.batch_shape` with
values of type `self.dtype`.
"""
with self._name_scope(name, values=[value]):
value = ops.convert_to_tensor(value, name="value")
try:
return self._log_cdf(value, **condition_kwargs)
except NotImplementedError as original_exception:
try:
return math_ops.log(self._cdf(value, **condition_kwargs))
except NotImplementedError:
raise original_exception
def _cdf(self, value):
raise NotImplementedError("cdf is not implemented")
def cdf(self, value, name="cdf", **condition_kwargs):
"""Cumulative distribution function.
Given random variable `X`, the cumulative distribution function `cdf` is:
```
cdf(x) := P[X <= x]
```
Args:
value: `float` or `double` `Tensor`.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
cdf: a `Tensor` of shape `sample_shape(x) + self.batch_shape` with
values of type `self.dtype`.
"""
with self._name_scope(name, values=[value]):
value = ops.convert_to_tensor(value, name="value")
try:
return self._cdf(value, **condition_kwargs)
except NotImplementedError as original_exception:
try:
return math_ops.exp(self._log_cdf(value, **condition_kwargs))
except NotImplementedError:
raise original_exception
def _log_survival_function(self, value):
raise NotImplementedError("log_survival_function is not implemented")
def log_survival_function(self, value, name="log_survival_function",
**condition_kwargs):
"""Log survival function.
Given random variable `X`, the survival function is defined:
```
log_survival_function(x) = Log[ P[X > x] ]
= Log[ 1 - P[X <= x] ]
= Log[ 1 - cdf(x) ]
```
Typically, different numerical approximations can be used for the log
survival function, which are more accurate than `1 - cdf(x)` when `x >> 1`.
Args:
value: `float` or `double` `Tensor`.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
`Tensor` of shape `sample_shape(x) + self.batch_shape` with values of type
`self.dtype`.
"""
with self._name_scope(name, values=[value]):
value = ops.convert_to_tensor(value, name="value")
try:
return self._log_survival_function(value, **condition_kwargs)
except NotImplementedError as original_exception:
try:
return math_ops.log(1. - self.cdf(value, **condition_kwargs))
except NotImplementedError:
raise original_exception
def _survival_function(self, value):
raise NotImplementedError("survival_function is not implemented")
def survival_function(self, value, name="survival_function",
**condition_kwargs):
"""Survival function.
Given random variable `X`, the survival function is defined:
```
survival_function(x) = P[X > x]
= 1 - P[X <= x]
= 1 - cdf(x).
```
Args:
value: `float` or `double` `Tensor`.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
Tensor` of shape `sample_shape(x) + self.batch_shape` with values of type
`self.dtype`.
"""
with self._name_scope(name, values=[value]):
value = ops.convert_to_tensor(value, name="value")
try:
return self._survival_function(value, **condition_kwargs)
except NotImplementedError as original_exception:
try:
return 1. - self.cdf(value, **condition_kwargs)
except NotImplementedError:
raise original_exception
def _entropy(self):
raise NotImplementedError("entropy is not implemented")
def entropy(self, name="entropy"):
"""Shannon entropy in nats."""
with self._name_scope(name):
return self._entropy()
def _mean(self):
raise NotImplementedError("mean is not implemented")
def mean(self, name="mean"):
"""Mean."""
with self._name_scope(name):
return self._mean()
def _variance(self):
raise NotImplementedError("variance is not implemented")
def variance(self, name="variance"):
"""Variance."""
with self._name_scope(name):
return self._variance()
def _std(self):
raise NotImplementedError("std is not implemented")
def std(self, name="std"):
"""Standard deviation."""
with self._name_scope(name):
return self._std()
def _mode(self):
raise NotImplementedError("mode is not implemented")
def mode(self, name="mode"):
"""Mode."""
with self._name_scope(name):
return self._mode()
def log_pdf(self, value, name="log_pdf", **condition_kwargs):
"""Log probability density function.
Args:
value: `float` or `double` `Tensor`.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
log_prob: a `Tensor` of shape `sample_shape(x) + self.batch_shape` with
values of type `self.dtype`.
Raises:
TypeError: if not `is_continuous`.
"""
warnings.warn("Please use `log_prob` instead of `log_pdf`. `log_pdf` "
"will be deprecated in December 2016.",
PendingDeprecationWarning)
if not self.is_continuous:
raise TypeError("log_pdf is undefined for non-continuous distributions.")
return self.log_prob(value, name=name, **condition_kwargs)
def pdf(self, value, name="pdf", **condition_kwargs):
"""Probability density function.
Args:
value: `float` or `double` `Tensor`.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
prob: a `Tensor` of shape `sample_shape(x) + self.batch_shape` with
values of type `self.dtype`.
Raises:
TypeError: if not `is_continuous`.
"""
warnings.warn("Please use `prob` instead of `pdf`. `pdf` will be "
"deprecated in December 2016.",
PendingDeprecationWarning)
if not self.is_continuous:
raise TypeError("pdf is undefined for non-continuous distributions.")
return self.prob(value, name, **condition_kwargs)
def log_pmf(self, value, name="log_pmf", **condition_kwargs):
"""Log probability mass function.
Args:
value: `float` or `double` `Tensor`.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
log_pmf: a `Tensor` of shape `sample_shape(x) + self.batch_shape` with
values of type `self.dtype`.
Raises:
TypeError: if `is_continuous`.
"""
warnings.warn("Please use `log_prob` instead of `log_pmf`. `log_pmf` will "
"be deprecated in December 2016.",
PendingDeprecationWarning)
if self.is_continuous:
raise TypeError("log_pmf is undefined for continuous distributions.")
return self.log_prob(value, name=name, **condition_kwargs)
def pmf(self, value, name="pmf", **condition_kwargs):
"""Probability mass function.
Args:
value: `float` or `double` `Tensor`.
name: The name to give this op.
**condition_kwargs: Named arguments forwarded to subclass implementation.
Returns:
pmf: a `Tensor` of shape `sample_shape(x) + self.batch_shape` with
values of type `self.dtype`.
Raises:
TypeError: if `is_continuous`.
"""
warnings.warn("Please use `prob` instead of `pmf`. `pmf` will be "
"deprecated in December 2016.",
PendingDeprecationWarning)
if self.is_continuous:
raise TypeError("pmf is undefined for continuous distributions.")
return self.prob(value, name=name, **condition_kwargs)
@contextlib.contextmanager
def _name_scope(self, name=None, values=None):
"""Helper function to standardize op scope."""
with ops.name_scope(self.name):
with ops.name_scope(name, values=(
(values or []) + self._graph_parents)) as scope:
yield scope
def _expand_sample_shape(self, sample_shape):
"""Helper to `sample` which ensures sample_shape is 1D."""
sample_shape_static_val = tensor_util.constant_value(sample_shape)
ndims = sample_shape.get_shape().ndims
if sample_shape_static_val is None:
if ndims is None or not sample_shape.get_shape().is_fully_defined():
ndims = array_ops.rank(sample_shape)
expanded_shape = distribution_util.pick_vector(
math_ops.equal(ndims, 0),
np.array((1,), dtype=dtypes.int32.as_numpy_dtype()),
array_ops.shape(sample_shape))
sample_shape = array_ops.reshape(sample_shape, expanded_shape)
total = math_ops.reduce_prod(sample_shape) # reduce_prod([]) == 1
else:
if ndims is None:
raise ValueError(
"Shouldn't be here; ndims cannot be none when we have a "
"tf.constant shape.")
if ndims == 0:
sample_shape_static_val = np.reshape(sample_shape_static_val, [1])
sample_shape = ops.convert_to_tensor(
sample_shape_static_val,
dtype=dtypes.int32,
name="sample_shape")
total = np.prod(sample_shape_static_val,
dtype=dtypes.int32.as_numpy_dtype())
return sample_shape, total
|
lightbulb-framework/lightbulb-framework | refs/heads/master | lightbulb/data/regex/PHPIDS040/52.py | 544 | META = {
'author': 'George Argyros, Ioannis Stais',
'description': 'Automatic transformed ruleset',
'type':'Regex',
'comments': []
} |
ahb0327/intellij-community | refs/heads/master | python/lib/Lib/encodings/cp855.py | 593 | """ Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP855.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp855',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x0452, # CYRILLIC SMALL LETTER DJE
0x0081: 0x0402, # CYRILLIC CAPITAL LETTER DJE
0x0082: 0x0453, # CYRILLIC SMALL LETTER GJE
0x0083: 0x0403, # CYRILLIC CAPITAL LETTER GJE
0x0084: 0x0451, # CYRILLIC SMALL LETTER IO
0x0085: 0x0401, # CYRILLIC CAPITAL LETTER IO
0x0086: 0x0454, # CYRILLIC SMALL LETTER UKRAINIAN IE
0x0087: 0x0404, # CYRILLIC CAPITAL LETTER UKRAINIAN IE
0x0088: 0x0455, # CYRILLIC SMALL LETTER DZE
0x0089: 0x0405, # CYRILLIC CAPITAL LETTER DZE
0x008a: 0x0456, # CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
0x008b: 0x0406, # CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
0x008c: 0x0457, # CYRILLIC SMALL LETTER YI
0x008d: 0x0407, # CYRILLIC CAPITAL LETTER YI
0x008e: 0x0458, # CYRILLIC SMALL LETTER JE
0x008f: 0x0408, # CYRILLIC CAPITAL LETTER JE
0x0090: 0x0459, # CYRILLIC SMALL LETTER LJE
0x0091: 0x0409, # CYRILLIC CAPITAL LETTER LJE
0x0092: 0x045a, # CYRILLIC SMALL LETTER NJE
0x0093: 0x040a, # CYRILLIC CAPITAL LETTER NJE
0x0094: 0x045b, # CYRILLIC SMALL LETTER TSHE
0x0095: 0x040b, # CYRILLIC CAPITAL LETTER TSHE
0x0096: 0x045c, # CYRILLIC SMALL LETTER KJE
0x0097: 0x040c, # CYRILLIC CAPITAL LETTER KJE
0x0098: 0x045e, # CYRILLIC SMALL LETTER SHORT U
0x0099: 0x040e, # CYRILLIC CAPITAL LETTER SHORT U
0x009a: 0x045f, # CYRILLIC SMALL LETTER DZHE
0x009b: 0x040f, # CYRILLIC CAPITAL LETTER DZHE
0x009c: 0x044e, # CYRILLIC SMALL LETTER YU
0x009d: 0x042e, # CYRILLIC CAPITAL LETTER YU
0x009e: 0x044a, # CYRILLIC SMALL LETTER HARD SIGN
0x009f: 0x042a, # CYRILLIC CAPITAL LETTER HARD SIGN
0x00a0: 0x0430, # CYRILLIC SMALL LETTER A
0x00a1: 0x0410, # CYRILLIC CAPITAL LETTER A
0x00a2: 0x0431, # CYRILLIC SMALL LETTER BE
0x00a3: 0x0411, # CYRILLIC CAPITAL LETTER BE
0x00a4: 0x0446, # CYRILLIC SMALL LETTER TSE
0x00a5: 0x0426, # CYRILLIC CAPITAL LETTER TSE
0x00a6: 0x0434, # CYRILLIC SMALL LETTER DE
0x00a7: 0x0414, # CYRILLIC CAPITAL LETTER DE
0x00a8: 0x0435, # CYRILLIC SMALL LETTER IE
0x00a9: 0x0415, # CYRILLIC CAPITAL LETTER IE
0x00aa: 0x0444, # CYRILLIC SMALL LETTER EF
0x00ab: 0x0424, # CYRILLIC CAPITAL LETTER EF
0x00ac: 0x0433, # CYRILLIC SMALL LETTER GHE
0x00ad: 0x0413, # CYRILLIC CAPITAL LETTER GHE
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x0445, # CYRILLIC SMALL LETTER HA
0x00b6: 0x0425, # CYRILLIC CAPITAL LETTER HA
0x00b7: 0x0438, # CYRILLIC SMALL LETTER I
0x00b8: 0x0418, # CYRILLIC CAPITAL LETTER I
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x0439, # CYRILLIC SMALL LETTER SHORT I
0x00be: 0x0419, # CYRILLIC CAPITAL LETTER SHORT I
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x043a, # CYRILLIC SMALL LETTER KA
0x00c7: 0x041a, # CYRILLIC CAPITAL LETTER KA
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: 0x043b, # CYRILLIC SMALL LETTER EL
0x00d1: 0x041b, # CYRILLIC CAPITAL LETTER EL
0x00d2: 0x043c, # CYRILLIC SMALL LETTER EM
0x00d3: 0x041c, # CYRILLIC CAPITAL LETTER EM
0x00d4: 0x043d, # CYRILLIC SMALL LETTER EN
0x00d5: 0x041d, # CYRILLIC CAPITAL LETTER EN
0x00d6: 0x043e, # CYRILLIC SMALL LETTER O
0x00d7: 0x041e, # CYRILLIC CAPITAL LETTER O
0x00d8: 0x043f, # CYRILLIC SMALL LETTER PE
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x041f, # CYRILLIC CAPITAL LETTER PE
0x00de: 0x044f, # CYRILLIC SMALL LETTER YA
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x042f, # CYRILLIC CAPITAL LETTER YA
0x00e1: 0x0440, # CYRILLIC SMALL LETTER ER
0x00e2: 0x0420, # CYRILLIC CAPITAL LETTER ER
0x00e3: 0x0441, # CYRILLIC SMALL LETTER ES
0x00e4: 0x0421, # CYRILLIC CAPITAL LETTER ES
0x00e5: 0x0442, # CYRILLIC SMALL LETTER TE
0x00e6: 0x0422, # CYRILLIC CAPITAL LETTER TE
0x00e7: 0x0443, # CYRILLIC SMALL LETTER U
0x00e8: 0x0423, # CYRILLIC CAPITAL LETTER U
0x00e9: 0x0436, # CYRILLIC SMALL LETTER ZHE
0x00ea: 0x0416, # CYRILLIC CAPITAL LETTER ZHE
0x00eb: 0x0432, # CYRILLIC SMALL LETTER VE
0x00ec: 0x0412, # CYRILLIC CAPITAL LETTER VE
0x00ed: 0x044c, # CYRILLIC SMALL LETTER SOFT SIGN
0x00ee: 0x042c, # CYRILLIC CAPITAL LETTER SOFT SIGN
0x00ef: 0x2116, # NUMERO SIGN
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x044b, # CYRILLIC SMALL LETTER YERU
0x00f2: 0x042b, # CYRILLIC CAPITAL LETTER YERU
0x00f3: 0x0437, # CYRILLIC SMALL LETTER ZE
0x00f4: 0x0417, # CYRILLIC CAPITAL LETTER ZE
0x00f5: 0x0448, # CYRILLIC SMALL LETTER SHA
0x00f6: 0x0428, # CYRILLIC CAPITAL LETTER SHA
0x00f7: 0x044d, # CYRILLIC SMALL LETTER E
0x00f8: 0x042d, # CYRILLIC CAPITAL LETTER E
0x00f9: 0x0449, # CYRILLIC SMALL LETTER SHCHA
0x00fa: 0x0429, # CYRILLIC CAPITAL LETTER SHCHA
0x00fb: 0x0447, # CYRILLIC SMALL LETTER CHE
0x00fc: 0x0427, # CYRILLIC CAPITAL LETTER CHE
0x00fd: 0x00a7, # SECTION SIGN
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
u'\x00' # 0x0000 -> NULL
u'\x01' # 0x0001 -> START OF HEADING
u'\x02' # 0x0002 -> START OF TEXT
u'\x03' # 0x0003 -> END OF TEXT
u'\x04' # 0x0004 -> END OF TRANSMISSION
u'\x05' # 0x0005 -> ENQUIRY
u'\x06' # 0x0006 -> ACKNOWLEDGE
u'\x07' # 0x0007 -> BELL
u'\x08' # 0x0008 -> BACKSPACE
u'\t' # 0x0009 -> HORIZONTAL TABULATION
u'\n' # 0x000a -> LINE FEED
u'\x0b' # 0x000b -> VERTICAL TABULATION
u'\x0c' # 0x000c -> FORM FEED
u'\r' # 0x000d -> CARRIAGE RETURN
u'\x0e' # 0x000e -> SHIFT OUT
u'\x0f' # 0x000f -> SHIFT IN
u'\x10' # 0x0010 -> DATA LINK ESCAPE
u'\x11' # 0x0011 -> DEVICE CONTROL ONE
u'\x12' # 0x0012 -> DEVICE CONTROL TWO
u'\x13' # 0x0013 -> DEVICE CONTROL THREE
u'\x14' # 0x0014 -> DEVICE CONTROL FOUR
u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x0016 -> SYNCHRONOUS IDLE
u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x0018 -> CANCEL
u'\x19' # 0x0019 -> END OF MEDIUM
u'\x1a' # 0x001a -> SUBSTITUTE
u'\x1b' # 0x001b -> ESCAPE
u'\x1c' # 0x001c -> FILE SEPARATOR
u'\x1d' # 0x001d -> GROUP SEPARATOR
u'\x1e' # 0x001e -> RECORD SEPARATOR
u'\x1f' # 0x001f -> UNIT SEPARATOR
u' ' # 0x0020 -> SPACE
u'!' # 0x0021 -> EXCLAMATION MARK
u'"' # 0x0022 -> QUOTATION MARK
u'#' # 0x0023 -> NUMBER SIGN
u'$' # 0x0024 -> DOLLAR SIGN
u'%' # 0x0025 -> PERCENT SIGN
u'&' # 0x0026 -> AMPERSAND
u"'" # 0x0027 -> APOSTROPHE
u'(' # 0x0028 -> LEFT PARENTHESIS
u')' # 0x0029 -> RIGHT PARENTHESIS
u'*' # 0x002a -> ASTERISK
u'+' # 0x002b -> PLUS SIGN
u',' # 0x002c -> COMMA
u'-' # 0x002d -> HYPHEN-MINUS
u'.' # 0x002e -> FULL STOP
u'/' # 0x002f -> SOLIDUS
u'0' # 0x0030 -> DIGIT ZERO
u'1' # 0x0031 -> DIGIT ONE
u'2' # 0x0032 -> DIGIT TWO
u'3' # 0x0033 -> DIGIT THREE
u'4' # 0x0034 -> DIGIT FOUR
u'5' # 0x0035 -> DIGIT FIVE
u'6' # 0x0036 -> DIGIT SIX
u'7' # 0x0037 -> DIGIT SEVEN
u'8' # 0x0038 -> DIGIT EIGHT
u'9' # 0x0039 -> DIGIT NINE
u':' # 0x003a -> COLON
u';' # 0x003b -> SEMICOLON
u'<' # 0x003c -> LESS-THAN SIGN
u'=' # 0x003d -> EQUALS SIGN
u'>' # 0x003e -> GREATER-THAN SIGN
u'?' # 0x003f -> QUESTION MARK
u'@' # 0x0040 -> COMMERCIAL AT
u'A' # 0x0041 -> LATIN CAPITAL LETTER A
u'B' # 0x0042 -> LATIN CAPITAL LETTER B
u'C' # 0x0043 -> LATIN CAPITAL LETTER C
u'D' # 0x0044 -> LATIN CAPITAL LETTER D
u'E' # 0x0045 -> LATIN CAPITAL LETTER E
u'F' # 0x0046 -> LATIN CAPITAL LETTER F
u'G' # 0x0047 -> LATIN CAPITAL LETTER G
u'H' # 0x0048 -> LATIN CAPITAL LETTER H
u'I' # 0x0049 -> LATIN CAPITAL LETTER I
u'J' # 0x004a -> LATIN CAPITAL LETTER J
u'K' # 0x004b -> LATIN CAPITAL LETTER K
u'L' # 0x004c -> LATIN CAPITAL LETTER L
u'M' # 0x004d -> LATIN CAPITAL LETTER M
u'N' # 0x004e -> LATIN CAPITAL LETTER N
u'O' # 0x004f -> LATIN CAPITAL LETTER O
u'P' # 0x0050 -> LATIN CAPITAL LETTER P
u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
u'R' # 0x0052 -> LATIN CAPITAL LETTER R
u'S' # 0x0053 -> LATIN CAPITAL LETTER S
u'T' # 0x0054 -> LATIN CAPITAL LETTER T
u'U' # 0x0055 -> LATIN CAPITAL LETTER U
u'V' # 0x0056 -> LATIN CAPITAL LETTER V
u'W' # 0x0057 -> LATIN CAPITAL LETTER W
u'X' # 0x0058 -> LATIN CAPITAL LETTER X
u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
u'Z' # 0x005a -> LATIN CAPITAL LETTER Z
u'[' # 0x005b -> LEFT SQUARE BRACKET
u'\\' # 0x005c -> REVERSE SOLIDUS
u']' # 0x005d -> RIGHT SQUARE BRACKET
u'^' # 0x005e -> CIRCUMFLEX ACCENT
u'_' # 0x005f -> LOW LINE
u'`' # 0x0060 -> GRAVE ACCENT
u'a' # 0x0061 -> LATIN SMALL LETTER A
u'b' # 0x0062 -> LATIN SMALL LETTER B
u'c' # 0x0063 -> LATIN SMALL LETTER C
u'd' # 0x0064 -> LATIN SMALL LETTER D
u'e' # 0x0065 -> LATIN SMALL LETTER E
u'f' # 0x0066 -> LATIN SMALL LETTER F
u'g' # 0x0067 -> LATIN SMALL LETTER G
u'h' # 0x0068 -> LATIN SMALL LETTER H
u'i' # 0x0069 -> LATIN SMALL LETTER I
u'j' # 0x006a -> LATIN SMALL LETTER J
u'k' # 0x006b -> LATIN SMALL LETTER K
u'l' # 0x006c -> LATIN SMALL LETTER L
u'm' # 0x006d -> LATIN SMALL LETTER M
u'n' # 0x006e -> LATIN SMALL LETTER N
u'o' # 0x006f -> LATIN SMALL LETTER O
u'p' # 0x0070 -> LATIN SMALL LETTER P
u'q' # 0x0071 -> LATIN SMALL LETTER Q
u'r' # 0x0072 -> LATIN SMALL LETTER R
u's' # 0x0073 -> LATIN SMALL LETTER S
u't' # 0x0074 -> LATIN SMALL LETTER T
u'u' # 0x0075 -> LATIN SMALL LETTER U
u'v' # 0x0076 -> LATIN SMALL LETTER V
u'w' # 0x0077 -> LATIN SMALL LETTER W
u'x' # 0x0078 -> LATIN SMALL LETTER X
u'y' # 0x0079 -> LATIN SMALL LETTER Y
u'z' # 0x007a -> LATIN SMALL LETTER Z
u'{' # 0x007b -> LEFT CURLY BRACKET
u'|' # 0x007c -> VERTICAL LINE
u'}' # 0x007d -> RIGHT CURLY BRACKET
u'~' # 0x007e -> TILDE
u'\x7f' # 0x007f -> DELETE
u'\u0452' # 0x0080 -> CYRILLIC SMALL LETTER DJE
u'\u0402' # 0x0081 -> CYRILLIC CAPITAL LETTER DJE
u'\u0453' # 0x0082 -> CYRILLIC SMALL LETTER GJE
u'\u0403' # 0x0083 -> CYRILLIC CAPITAL LETTER GJE
u'\u0451' # 0x0084 -> CYRILLIC SMALL LETTER IO
u'\u0401' # 0x0085 -> CYRILLIC CAPITAL LETTER IO
u'\u0454' # 0x0086 -> CYRILLIC SMALL LETTER UKRAINIAN IE
u'\u0404' # 0x0087 -> CYRILLIC CAPITAL LETTER UKRAINIAN IE
u'\u0455' # 0x0088 -> CYRILLIC SMALL LETTER DZE
u'\u0405' # 0x0089 -> CYRILLIC CAPITAL LETTER DZE
u'\u0456' # 0x008a -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0406' # 0x008b -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0457' # 0x008c -> CYRILLIC SMALL LETTER YI
u'\u0407' # 0x008d -> CYRILLIC CAPITAL LETTER YI
u'\u0458' # 0x008e -> CYRILLIC SMALL LETTER JE
u'\u0408' # 0x008f -> CYRILLIC CAPITAL LETTER JE
u'\u0459' # 0x0090 -> CYRILLIC SMALL LETTER LJE
u'\u0409' # 0x0091 -> CYRILLIC CAPITAL LETTER LJE
u'\u045a' # 0x0092 -> CYRILLIC SMALL LETTER NJE
u'\u040a' # 0x0093 -> CYRILLIC CAPITAL LETTER NJE
u'\u045b' # 0x0094 -> CYRILLIC SMALL LETTER TSHE
u'\u040b' # 0x0095 -> CYRILLIC CAPITAL LETTER TSHE
u'\u045c' # 0x0096 -> CYRILLIC SMALL LETTER KJE
u'\u040c' # 0x0097 -> CYRILLIC CAPITAL LETTER KJE
u'\u045e' # 0x0098 -> CYRILLIC SMALL LETTER SHORT U
u'\u040e' # 0x0099 -> CYRILLIC CAPITAL LETTER SHORT U
u'\u045f' # 0x009a -> CYRILLIC SMALL LETTER DZHE
u'\u040f' # 0x009b -> CYRILLIC CAPITAL LETTER DZHE
u'\u044e' # 0x009c -> CYRILLIC SMALL LETTER YU
u'\u042e' # 0x009d -> CYRILLIC CAPITAL LETTER YU
u'\u044a' # 0x009e -> CYRILLIC SMALL LETTER HARD SIGN
u'\u042a' # 0x009f -> CYRILLIC CAPITAL LETTER HARD SIGN
u'\u0430' # 0x00a0 -> CYRILLIC SMALL LETTER A
u'\u0410' # 0x00a1 -> CYRILLIC CAPITAL LETTER A
u'\u0431' # 0x00a2 -> CYRILLIC SMALL LETTER BE
u'\u0411' # 0x00a3 -> CYRILLIC CAPITAL LETTER BE
u'\u0446' # 0x00a4 -> CYRILLIC SMALL LETTER TSE
u'\u0426' # 0x00a5 -> CYRILLIC CAPITAL LETTER TSE
u'\u0434' # 0x00a6 -> CYRILLIC SMALL LETTER DE
u'\u0414' # 0x00a7 -> CYRILLIC CAPITAL LETTER DE
u'\u0435' # 0x00a8 -> CYRILLIC SMALL LETTER IE
u'\u0415' # 0x00a9 -> CYRILLIC CAPITAL LETTER IE
u'\u0444' # 0x00aa -> CYRILLIC SMALL LETTER EF
u'\u0424' # 0x00ab -> CYRILLIC CAPITAL LETTER EF
u'\u0433' # 0x00ac -> CYRILLIC SMALL LETTER GHE
u'\u0413' # 0x00ad -> CYRILLIC CAPITAL LETTER GHE
u'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2591' # 0x00b0 -> LIGHT SHADE
u'\u2592' # 0x00b1 -> MEDIUM SHADE
u'\u2593' # 0x00b2 -> DARK SHADE
u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\u0445' # 0x00b5 -> CYRILLIC SMALL LETTER HA
u'\u0425' # 0x00b6 -> CYRILLIC CAPITAL LETTER HA
u'\u0438' # 0x00b7 -> CYRILLIC SMALL LETTER I
u'\u0418' # 0x00b8 -> CYRILLIC CAPITAL LETTER I
u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\u0439' # 0x00bd -> CYRILLIC SMALL LETTER SHORT I
u'\u0419' # 0x00be -> CYRILLIC CAPITAL LETTER SHORT I
u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\u043a' # 0x00c6 -> CYRILLIC SMALL LETTER KA
u'\u041a' # 0x00c7 -> CYRILLIC CAPITAL LETTER KA
u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\xa4' # 0x00cf -> CURRENCY SIGN
u'\u043b' # 0x00d0 -> CYRILLIC SMALL LETTER EL
u'\u041b' # 0x00d1 -> CYRILLIC CAPITAL LETTER EL
u'\u043c' # 0x00d2 -> CYRILLIC SMALL LETTER EM
u'\u041c' # 0x00d3 -> CYRILLIC CAPITAL LETTER EM
u'\u043d' # 0x00d4 -> CYRILLIC SMALL LETTER EN
u'\u041d' # 0x00d5 -> CYRILLIC CAPITAL LETTER EN
u'\u043e' # 0x00d6 -> CYRILLIC SMALL LETTER O
u'\u041e' # 0x00d7 -> CYRILLIC CAPITAL LETTER O
u'\u043f' # 0x00d8 -> CYRILLIC SMALL LETTER PE
u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2588' # 0x00db -> FULL BLOCK
u'\u2584' # 0x00dc -> LOWER HALF BLOCK
u'\u041f' # 0x00dd -> CYRILLIC CAPITAL LETTER PE
u'\u044f' # 0x00de -> CYRILLIC SMALL LETTER YA
u'\u2580' # 0x00df -> UPPER HALF BLOCK
u'\u042f' # 0x00e0 -> CYRILLIC CAPITAL LETTER YA
u'\u0440' # 0x00e1 -> CYRILLIC SMALL LETTER ER
u'\u0420' # 0x00e2 -> CYRILLIC CAPITAL LETTER ER
u'\u0441' # 0x00e3 -> CYRILLIC SMALL LETTER ES
u'\u0421' # 0x00e4 -> CYRILLIC CAPITAL LETTER ES
u'\u0442' # 0x00e5 -> CYRILLIC SMALL LETTER TE
u'\u0422' # 0x00e6 -> CYRILLIC CAPITAL LETTER TE
u'\u0443' # 0x00e7 -> CYRILLIC SMALL LETTER U
u'\u0423' # 0x00e8 -> CYRILLIC CAPITAL LETTER U
u'\u0436' # 0x00e9 -> CYRILLIC SMALL LETTER ZHE
u'\u0416' # 0x00ea -> CYRILLIC CAPITAL LETTER ZHE
u'\u0432' # 0x00eb -> CYRILLIC SMALL LETTER VE
u'\u0412' # 0x00ec -> CYRILLIC CAPITAL LETTER VE
u'\u044c' # 0x00ed -> CYRILLIC SMALL LETTER SOFT SIGN
u'\u042c' # 0x00ee -> CYRILLIC CAPITAL LETTER SOFT SIGN
u'\u2116' # 0x00ef -> NUMERO SIGN
u'\xad' # 0x00f0 -> SOFT HYPHEN
u'\u044b' # 0x00f1 -> CYRILLIC SMALL LETTER YERU
u'\u042b' # 0x00f2 -> CYRILLIC CAPITAL LETTER YERU
u'\u0437' # 0x00f3 -> CYRILLIC SMALL LETTER ZE
u'\u0417' # 0x00f4 -> CYRILLIC CAPITAL LETTER ZE
u'\u0448' # 0x00f5 -> CYRILLIC SMALL LETTER SHA
u'\u0428' # 0x00f6 -> CYRILLIC CAPITAL LETTER SHA
u'\u044d' # 0x00f7 -> CYRILLIC SMALL LETTER E
u'\u042d' # 0x00f8 -> CYRILLIC CAPITAL LETTER E
u'\u0449' # 0x00f9 -> CYRILLIC SMALL LETTER SHCHA
u'\u0429' # 0x00fa -> CYRILLIC CAPITAL LETTER SHCHA
u'\u0447' # 0x00fb -> CYRILLIC SMALL LETTER CHE
u'\u0427' # 0x00fc -> CYRILLIC CAPITAL LETTER CHE
u'\xa7' # 0x00fd -> SECTION SIGN
u'\u25a0' # 0x00fe -> BLACK SQUARE
u'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a4: 0x00cf, # CURRENCY SIGN
0x00a7: 0x00fd, # SECTION SIGN
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ad: 0x00f0, # SOFT HYPHEN
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x0401: 0x0085, # CYRILLIC CAPITAL LETTER IO
0x0402: 0x0081, # CYRILLIC CAPITAL LETTER DJE
0x0403: 0x0083, # CYRILLIC CAPITAL LETTER GJE
0x0404: 0x0087, # CYRILLIC CAPITAL LETTER UKRAINIAN IE
0x0405: 0x0089, # CYRILLIC CAPITAL LETTER DZE
0x0406: 0x008b, # CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
0x0407: 0x008d, # CYRILLIC CAPITAL LETTER YI
0x0408: 0x008f, # CYRILLIC CAPITAL LETTER JE
0x0409: 0x0091, # CYRILLIC CAPITAL LETTER LJE
0x040a: 0x0093, # CYRILLIC CAPITAL LETTER NJE
0x040b: 0x0095, # CYRILLIC CAPITAL LETTER TSHE
0x040c: 0x0097, # CYRILLIC CAPITAL LETTER KJE
0x040e: 0x0099, # CYRILLIC CAPITAL LETTER SHORT U
0x040f: 0x009b, # CYRILLIC CAPITAL LETTER DZHE
0x0410: 0x00a1, # CYRILLIC CAPITAL LETTER A
0x0411: 0x00a3, # CYRILLIC CAPITAL LETTER BE
0x0412: 0x00ec, # CYRILLIC CAPITAL LETTER VE
0x0413: 0x00ad, # CYRILLIC CAPITAL LETTER GHE
0x0414: 0x00a7, # CYRILLIC CAPITAL LETTER DE
0x0415: 0x00a9, # CYRILLIC CAPITAL LETTER IE
0x0416: 0x00ea, # CYRILLIC CAPITAL LETTER ZHE
0x0417: 0x00f4, # CYRILLIC CAPITAL LETTER ZE
0x0418: 0x00b8, # CYRILLIC CAPITAL LETTER I
0x0419: 0x00be, # CYRILLIC CAPITAL LETTER SHORT I
0x041a: 0x00c7, # CYRILLIC CAPITAL LETTER KA
0x041b: 0x00d1, # CYRILLIC CAPITAL LETTER EL
0x041c: 0x00d3, # CYRILLIC CAPITAL LETTER EM
0x041d: 0x00d5, # CYRILLIC CAPITAL LETTER EN
0x041e: 0x00d7, # CYRILLIC CAPITAL LETTER O
0x041f: 0x00dd, # CYRILLIC CAPITAL LETTER PE
0x0420: 0x00e2, # CYRILLIC CAPITAL LETTER ER
0x0421: 0x00e4, # CYRILLIC CAPITAL LETTER ES
0x0422: 0x00e6, # CYRILLIC CAPITAL LETTER TE
0x0423: 0x00e8, # CYRILLIC CAPITAL LETTER U
0x0424: 0x00ab, # CYRILLIC CAPITAL LETTER EF
0x0425: 0x00b6, # CYRILLIC CAPITAL LETTER HA
0x0426: 0x00a5, # CYRILLIC CAPITAL LETTER TSE
0x0427: 0x00fc, # CYRILLIC CAPITAL LETTER CHE
0x0428: 0x00f6, # CYRILLIC CAPITAL LETTER SHA
0x0429: 0x00fa, # CYRILLIC CAPITAL LETTER SHCHA
0x042a: 0x009f, # CYRILLIC CAPITAL LETTER HARD SIGN
0x042b: 0x00f2, # CYRILLIC CAPITAL LETTER YERU
0x042c: 0x00ee, # CYRILLIC CAPITAL LETTER SOFT SIGN
0x042d: 0x00f8, # CYRILLIC CAPITAL LETTER E
0x042e: 0x009d, # CYRILLIC CAPITAL LETTER YU
0x042f: 0x00e0, # CYRILLIC CAPITAL LETTER YA
0x0430: 0x00a0, # CYRILLIC SMALL LETTER A
0x0431: 0x00a2, # CYRILLIC SMALL LETTER BE
0x0432: 0x00eb, # CYRILLIC SMALL LETTER VE
0x0433: 0x00ac, # CYRILLIC SMALL LETTER GHE
0x0434: 0x00a6, # CYRILLIC SMALL LETTER DE
0x0435: 0x00a8, # CYRILLIC SMALL LETTER IE
0x0436: 0x00e9, # CYRILLIC SMALL LETTER ZHE
0x0437: 0x00f3, # CYRILLIC SMALL LETTER ZE
0x0438: 0x00b7, # CYRILLIC SMALL LETTER I
0x0439: 0x00bd, # CYRILLIC SMALL LETTER SHORT I
0x043a: 0x00c6, # CYRILLIC SMALL LETTER KA
0x043b: 0x00d0, # CYRILLIC SMALL LETTER EL
0x043c: 0x00d2, # CYRILLIC SMALL LETTER EM
0x043d: 0x00d4, # CYRILLIC SMALL LETTER EN
0x043e: 0x00d6, # CYRILLIC SMALL LETTER O
0x043f: 0x00d8, # CYRILLIC SMALL LETTER PE
0x0440: 0x00e1, # CYRILLIC SMALL LETTER ER
0x0441: 0x00e3, # CYRILLIC SMALL LETTER ES
0x0442: 0x00e5, # CYRILLIC SMALL LETTER TE
0x0443: 0x00e7, # CYRILLIC SMALL LETTER U
0x0444: 0x00aa, # CYRILLIC SMALL LETTER EF
0x0445: 0x00b5, # CYRILLIC SMALL LETTER HA
0x0446: 0x00a4, # CYRILLIC SMALL LETTER TSE
0x0447: 0x00fb, # CYRILLIC SMALL LETTER CHE
0x0448: 0x00f5, # CYRILLIC SMALL LETTER SHA
0x0449: 0x00f9, # CYRILLIC SMALL LETTER SHCHA
0x044a: 0x009e, # CYRILLIC SMALL LETTER HARD SIGN
0x044b: 0x00f1, # CYRILLIC SMALL LETTER YERU
0x044c: 0x00ed, # CYRILLIC SMALL LETTER SOFT SIGN
0x044d: 0x00f7, # CYRILLIC SMALL LETTER E
0x044e: 0x009c, # CYRILLIC SMALL LETTER YU
0x044f: 0x00de, # CYRILLIC SMALL LETTER YA
0x0451: 0x0084, # CYRILLIC SMALL LETTER IO
0x0452: 0x0080, # CYRILLIC SMALL LETTER DJE
0x0453: 0x0082, # CYRILLIC SMALL LETTER GJE
0x0454: 0x0086, # CYRILLIC SMALL LETTER UKRAINIAN IE
0x0455: 0x0088, # CYRILLIC SMALL LETTER DZE
0x0456: 0x008a, # CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
0x0457: 0x008c, # CYRILLIC SMALL LETTER YI
0x0458: 0x008e, # CYRILLIC SMALL LETTER JE
0x0459: 0x0090, # CYRILLIC SMALL LETTER LJE
0x045a: 0x0092, # CYRILLIC SMALL LETTER NJE
0x045b: 0x0094, # CYRILLIC SMALL LETTER TSHE
0x045c: 0x0096, # CYRILLIC SMALL LETTER KJE
0x045e: 0x0098, # CYRILLIC SMALL LETTER SHORT U
0x045f: 0x009a, # CYRILLIC SMALL LETTER DZHE
0x2116: 0x00ef, # NUMERO SIGN
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
|
jigarkb/Twitter-Sentiment-Analysis | refs/heads/master | twitterstream.py | 1 | import oauth2 as oauth
import urllib2 as urllib
access_token_key = "105782391-kfbeApbulMptbrV9w6Bfh5MHrAiUUqqmd1xmD2az"
access_token_secret = "fuIwYDPiqTFvfI4jPJVbCWxZBZaL0ESiq4IMD30c1o"
consumer_key = "1ANfOmOIRa4iaJideGYAg"
consumer_secret = "MmrodYL5xcpjFrFcG8Y4CAR5PTYbRXkKWuQgA1bU"
_debug = 0
oauth_token = oauth.Token(key=access_token_key, secret=access_token_secret)
oauth_consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret)
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1()
http_method = "GET"
http_handler = urllib.HTTPHandler(debuglevel=_debug)
https_handler = urllib.HTTPSHandler(debuglevel=_debug)
'''
Construct, sign, and open a twitter request
using the hard-coded credentials above.
'''
def twitterreq(url, method, parameters):
req = oauth.Request.from_consumer_and_token(oauth_consumer,
token=oauth_token,
http_method=http_method,
http_url=url,
parameters=parameters)
req.sign_request(signature_method_hmac_sha1, oauth_consumer, oauth_token)
headers = req.to_header()
if http_method == "POST":
encoded_post_data = req.to_postdata()
else:
encoded_post_data = None
url = req.to_url()
opener = urllib.OpenerDirector()
opener.add_handler(http_handler)
opener.add_handler(https_handler)
response = opener.open(url, encoded_post_data)
return response
def fetchsamples():
url = "https://stream.twitter.com/1/statuses/sample.json"
parameters = []
response = twitterreq(url, "GET", parameters)
for line in response:
print line.strip()
if __name__ == '__main__':
fetchsamples()
|
jazztpt/edx-platform | refs/heads/master | lms/djangoapps/mobile_api/social_facebook/courses/models.py | 1644 | """
A models.py is required to make this an app (until we move to Django 1.7)
"""
|
gdimitris/ChessPuzzlerBackend | refs/heads/master | Virtual_Environment/lib/python2.7/site-packages/setuptools/command/saveopts.py | 1052 | from setuptools.command.setopt import edit_config, option_base
class saveopts(option_base):
"""Save command-line options to a file"""
description = "save supplied options to setup.cfg or other config file"
def run(self):
dist = self.distribution
settings = {}
for cmd in dist.command_options:
if cmd == 'saveopts':
continue # don't save our own options!
for opt, (src, val) in dist.get_option_dict(cmd).items():
if src == "command line":
settings.setdefault(cmd, {})[opt] = val
edit_config(self.filename, settings, self.dry_run)
|
ValkyrieSystems/six-library | refs/heads/master | externals/coda-oss/modules/python/math.poly/tests/test_math_poly.py | 4 | #!/usr/bin/env python
"""
* =========================================================================
* This file is part of math.poly-c++
* =========================================================================
*
* (C) Copyright 2004 - 2014, MDA Information Systems LLC
*
* math.linear-c++ is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; If not,
* see <http://www.gnu.org/licenses/>.
*
*
"""
import sys
from coda.math_linear import VectorDouble, MatrixDouble
from coda.math_poly import *
if __name__ == '__main__':
#################
# Basic 1D test #
#################
poly1D = Poly1D(2)
for x in range(poly1D.order() + 1):
poly1D[x] = (x + 1) * 10
print '1D poly:'
print poly1D
# Try to index out of bounds by getting
threw = False
try:
foo = poly1D[3]
except ValueError:
threw = True
if threw:
print 'Getting 1D OOB threw as expected'
else:
sys.exit('Getting 1D OOB did not throw!')
# Try to index out of bounds by setting
threw = False
try:
poly1D[3] = 5
except ValueError:
threw = True
if threw:
print 'Setting 1D OOB threw as expected'
else:
sys.exit('Setting 1D OOB did not throw!')
#################
# Basic 2D test #
#################
poly2D = Poly2D(2, 3)
val = 100
for x in range(poly2D.orderX() + 1):
for y in range(poly2D.orderY() + 1):
poly2D[(x, y)] = val
val += 100
print '\n2D poly:'
print poly2D
# Try to index out of bounds by getting
threw = False
try:
foo = poly2D[(3, 3)]
except ValueError:
threw = True
if threw:
print 'Getting 2D OOB threw as expected'
else:
sys.exit('Getting 2D OOB did not throw!')
# Try to index out of bounds by setting
threw = False
try:
poly2D[(3, 3)] = 5
except ValueError:
threw = True
if threw:
print 'Setting 2D OOB threw as expected'
else:
sys.exit('Setting 2D OOB did not throw!')
############################
# 1D Fit test (array args) #
############################
xObs = new_doubleArray(4)
doubleArray_setitem(xObs, 0, 1)
doubleArray_setitem(xObs, 1, -1)
doubleArray_setitem(xObs, 2, 2)
doubleArray_setitem(xObs, 3, -2)
yObs = new_doubleArray(4)
doubleArray_setitem(yObs, 0, 3)
doubleArray_setitem(yObs, 1, 13)
doubleArray_setitem(yObs, 2, 1)
doubleArray_setitem(yObs, 3, 33)
numObs = 4
numCoeff = 3
fit1D = fit(numObs, xObs, yObs, numCoeff);
print "\n1D Fit from arrays:"
print fit1D
delete_doubleArray(xObs)
delete_doubleArray(yObs)
#############################
# 1D Fit test (Vector args) #
#############################
xObs = VectorDouble(4)
xObs[0] = 1
xObs[1] = -1
xObs[2] = 2
xObs[3] = -2
yObs = VectorDouble(4)
yObs[0] = 3
yObs[1] = 13
yObs[2] = 1
yObs[3] = 33
numCoeff = 3
fit1D = FitVectorDouble(xObs, yObs, numCoeff)
print "\n1D Fit from Vectors:"
print fit1D
############################################
# PolyVector3 Fit Test (math::linear args) #
############################################
xObs = VectorDouble(4)
xObs[0] = 1
xObs[1] = -1
xObs[2] = 2
xObs[3] = -2
yObs = MatrixDouble(3,4)
yObs[0,0] = 3
yObs[0,1] = 13
yObs[0,2] = 1
yObs[0,3] = 33
yObs[1,0] = 33
yObs[1,1] = 1
yObs[1,2] = 13
yObs[1,3] = 3
yObs[2,0] = 1
yObs[2,1] = -1
yObs[2,2] = 2
yObs[2,3] = -2
numCoeff = 3
fitPolyVector3 = fit(xObs, yObs, numCoeff)
print "\nPolyVector3 fit from math::linear args:"
print fitPolyVector3
###########################################
# PolyVector3 Fit Test (std::vector args) #
###########################################
xObs = StdVectorDouble([1, -1, 2, -2])
yObs0 = StdVectorDouble([3, 13, 1, 33])
yObs1 = StdVectorDouble([33, 1, 13, 3])
yObs2 = StdVectorDouble([1, -1, 2, -2])
numCoeff = 3
fitPolyVector3 = fit(xObs, yObs0, yObs1, yObs2, numCoeff)
print "\nPolyVector3 fit from std::vector args:"
print fitPolyVector3
############################
# 2D Fit test (array args) #
############################
numRows = 3
numCols = 3
xObs = new_doubleArray(9)
doubleArray_setitem(xObs, 0, 1)
doubleArray_setitem(xObs, 1, 0)
doubleArray_setitem(xObs, 2, 1)
doubleArray_setitem(xObs, 3, 1)
doubleArray_setitem(xObs, 4, 1)
doubleArray_setitem(xObs, 5, 0)
doubleArray_setitem(xObs, 6, 0)
doubleArray_setitem(xObs, 7, 1)
doubleArray_setitem(xObs, 8, 1)
yObs = new_doubleArray(9)
doubleArray_setitem(yObs, 0, 1)
doubleArray_setitem(yObs, 1, 1)
doubleArray_setitem(yObs, 2, 1)
doubleArray_setitem(yObs, 3, 0)
doubleArray_setitem(yObs, 4, 1)
doubleArray_setitem(yObs, 5, 1)
doubleArray_setitem(yObs, 6, 0)
doubleArray_setitem(yObs, 7, 0)
doubleArray_setitem(yObs, 8, 1)
zObs = new_doubleArray(9)
doubleArray_setitem(zObs, 0, 1)
doubleArray_setitem(zObs, 1, 0.3)
doubleArray_setitem(zObs, 2, 0)
doubleArray_setitem(zObs, 3, 0.16)
doubleArray_setitem(zObs, 4, 1)
doubleArray_setitem(zObs, 5, 0)
doubleArray_setitem(zObs, 6, 0)
doubleArray_setitem(zObs, 7, 0)
doubleArray_setitem(zObs, 8, 0.85)
fit2D = fit(numRows, numCols, xObs, yObs, zObs, 1, 1)
print "\n2D Fit from arrays:"
print fit2D
delete_doubleArray(xObs)
delete_doubleArray(yObs)
delete_doubleArray(zObs)
#############################
# 2D Fit test (Matrix args) #
#############################
xObs = MatrixDouble(3,3)
xObs[0,0] = 1
xObs[0,1] = 0
xObs[0,2] = 1
xObs[1,0] = 1
xObs[1,1] = 1
xObs[1,2] = 0
xObs[2,0] = 0
xObs[2,1] = 1
xObs[2,2] = 1
yObs = MatrixDouble(3,3)
yObs[0,0] = 1
yObs[0,1] = 1
yObs[0,2] = 1
yObs[1,0] = 0
yObs[1,1] = 1
yObs[1,2] = 1
yObs[2,0] = 0
yObs[2,1] = 0
yObs[2,2] = 1
zObs = MatrixDouble(3,3)
zObs[0,0] = 1
zObs[0,1] = 0.3
zObs[0,2] = 0
zObs[1,0] = 0.16
zObs[1,1] = 1
zObs[1,2] = 0
zObs[2,0] = 0
zObs[2,1] = 0
zObs[2,2] = 0.85
fit2D = fit(xObs, yObs, zObs, 1, 1)
print "2D Fit from Matrices:"
print fit2D
|
lochiiconnectivity/libcloud | refs/heads/trunk | libcloud/compute/deployment.py | 25 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides generic deployment steps for machines post boot.
"""
from __future__ import with_statement
import os
import binascii
from libcloud.utils.py3 import basestring, PY3
class Deployment(object):
"""
Base class for deployment tasks.
"""
def run(self, node, client):
"""
Runs this deployment task on node using the client provided.
:type node: :class:`Node`
:keyword node: Node to operate one
:type client: :class:`BaseSSHClient`
:keyword client: Connected SSH client to use.
:return: :class:`Node`
"""
raise NotImplementedError(
'run not implemented for this deployment')
def _get_string_value(self, argument_name, argument_value):
if not isinstance(argument_value, basestring) and \
not hasattr(argument_value, 'read'):
raise TypeError('%s argument must be a string or a file-like '
'object' % (argument_name))
if hasattr(argument_value, 'read'):
argument_value = argument_value.read()
return argument_value
class SSHKeyDeployment(Deployment):
"""
Installs a public SSH Key onto a server.
"""
def __init__(self, key):
"""
:type key: ``str`` or :class:`File` object
:keyword key: Contents of the public key write or a file object which
can be read.
"""
self.key = self._get_string_value(argument_name='key',
argument_value=key)
def run(self, node, client):
"""
Installs SSH key into ``.ssh/authorized_keys``
See also :class:`Deployment.run`
"""
client.put(".ssh/authorized_keys", contents=self.key, mode='a')
return node
class FileDeployment(Deployment):
"""
Installs a file on the server.
"""
def __init__(self, source, target):
"""
:type source: ``str``
:keyword source: Local path of file to be installed
:type target: ``str``
:keyword target: Path to install file on node
"""
self.source = source
self.target = target
def run(self, node, client):
"""
Upload the file, retaining permissions.
See also :class:`Deployment.run`
"""
perms = int(oct(os.stat(self.source).st_mode)[4:], 8)
with open(self.source, 'rb') as fp:
content = fp.read()
client.put(path=self.target, chmod=perms,
contents=content)
return node
class ScriptDeployment(Deployment):
"""
Runs an arbitrary shell script on the server.
This step works by first writing the content of the shell script (script
argument) in a \*.sh file on a remote server and then running that file.
If you are running a non-shell script, make sure to put the appropriate
shebang to the top of the script. You are also advised to do that even if
you are running a plan shell script.
"""
def __init__(self, script, args=None, name=None, delete=False):
"""
:type script: ``str``
:keyword script: Contents of the script to run.
:type args: ``list``
:keyword args: Optional command line arguments which get passed to the
deployment script file.
:type name: ``str``
:keyword name: Name of the script to upload it as, if not specified,
a random name will be chosen.
:type delete: ``bool``
:keyword delete: Whether to delete the script on completion.
"""
script = self._get_string_value(argument_name='script',
argument_value=script)
self.script = script
self.args = args or []
self.stdout = None
self.stderr = None
self.exit_status = None
self.delete = delete
self.name = name
if self.name is None:
# File is put under user's home directory
# (~/libcloud_deployment_<random_string>.sh)
random_string = binascii.hexlify(os.urandom(4))
random_string = random_string.decode('ascii')
self.name = 'libcloud_deployment_%s.sh' % (random_string)
def run(self, node, client):
"""
Uploads the shell script and then executes it.
See also :class:`Deployment.run`
"""
file_path = client.put(path=self.name, chmod=int('755', 8),
contents=self.script)
# Pre-pend cwd if user specified a relative path
if self.name[0] != '/':
base_path = os.path.dirname(file_path)
name = os.path.join(base_path, self.name)
else:
name = self.name
cmd = name
if self.args:
# Append arguments to the command
cmd = '%s %s' % (name, ' '.join(self.args))
else:
cmd = name
self.stdout, self.stderr, self.exit_status = client.run(cmd)
if self.delete:
client.delete(self.name)
return node
class ScriptFileDeployment(ScriptDeployment):
"""
Runs an arbitrary shell script from a local file on the server. Same as
ScriptDeployment, except that you can pass in a path to the file instead of
the script content.
"""
def __init__(self, script_file, args=None, name=None, delete=False):
"""
:type script_file: ``str``
:keyword script_file: Path to a file containing the script to run.
:type args: ``list``
:keyword args: Optional command line arguments which get passed to the
deployment script file.
:type name: ``str``
:keyword name: Name of the script to upload it as, if not specified,
a random name will be chosen.
:type delete: ``bool``
:keyword delete: Whether to delete the script on completion.
"""
with open(script_file, 'rb') as fp:
content = fp.read()
if PY3:
content = content.decode('utf-8')
super(ScriptFileDeployment, self).__init__(script=content,
args=args,
name=name,
delete=delete)
class MultiStepDeployment(Deployment):
"""
Runs a chain of Deployment steps.
"""
def __init__(self, add=None):
"""
:type add: ``list``
:keyword add: Deployment steps to add.
"""
self.steps = []
self.add(add)
def add(self, add):
"""
Add a deployment to this chain.
:type add: Single :class:`Deployment` or a ``list`` of
:class:`Deployment`
:keyword add: Adds this deployment to the others already in this
object.
"""
if add is not None:
add = add if isinstance(add, (list, tuple)) else [add]
self.steps.extend(add)
def run(self, node, client):
"""
Run each deployment that has been added.
See also :class:`Deployment.run`
"""
for s in self.steps:
node = s.run(node, client)
return node
|
CentroGeo/geonode | refs/heads/master | geonode/geoserver/__init__.py | 4 | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import logging
from django.conf import settings
from django.utils.translation import ugettext_noop as _
from geonode.notifications_helper import NotificationsAppConfigBase
logger = logging.getLogger(__name__)
def run_setup_hooks(*args, **kwargs):
from django.db.models import signals
from geonode.base.models import ResourceBase
from geonode.layers.models import Layer
from geonode.maps.models import Map, MapLayer
from geonode.geoserver.signals import geoserver_pre_save
from geonode.geoserver.signals import geoserver_pre_delete
from geonode.geoserver.signals import geoserver_post_save
from geonode.geoserver.signals import geoserver_post_save_map
from geonode.geoserver.signals import geoserver_pre_save_maplayer
signals.post_save.connect(geoserver_post_save, sender=ResourceBase)
signals.pre_save.connect(geoserver_pre_save, sender=Layer)
signals.pre_delete.connect(geoserver_pre_delete, sender=Layer)
signals.post_save.connect(geoserver_post_save, sender=Layer)
signals.pre_save.connect(geoserver_pre_save_maplayer, sender=MapLayer)
signals.post_save.connect(geoserver_post_save_map, sender=Map)
def set_resource_links(*args, **kwargs):
from geonode.utils import set_resource_default_links
from geonode.catalogue.models import catalogue_post_save
from geonode.layers.models import Layer
if settings.UPDATE_RESOURCE_LINKS_AT_MIGRATE:
_all_layers = Layer.objects.all()
for index, layer in enumerate(_all_layers, start=1):
_lyr_name = layer.name
message = f"[{index} / {len(_all_layers)}] Updating Layer [{_lyr_name}] ..."
logger.debug(message)
try:
set_resource_default_links(layer, layer)
catalogue_post_save(instance=layer, sender=layer.__class__)
except Exception:
logger.exception(
f"[ERROR] Layer [{_lyr_name}] couldn't be updated"
)
class GeoserverAppConfig(NotificationsAppConfigBase):
name = 'geonode.geoserver'
NOTIFICATIONS = (("layer_uploaded", _("Layer Uploaded"), _("A layer was uploaded"),),
("layer_comment", _("Comment on Layer"), _("A layer was commented on"),),
("layer_rated", _("Rating for Layer"), _("A rating was given to a layer"),),
)
def ready(self):
super(GeoserverAppConfig, self).ready()
run_setup_hooks()
# Connect the post_migrate signal with the _set_resource_links
# method to update links for each resource
from django.db.models import signals
signals.post_migrate.connect(set_resource_links, sender=self)
default_app_config = 'geonode.geoserver.GeoserverAppConfig'
BACKEND_PACKAGE = 'geonode.geoserver'
|
shakamunyi/tensorflow | refs/heads/master | tensorflow/python/training/summary_io.py | 165 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Reads Summaries from and writes Summaries to event files."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.python.summary.summary_iterator import summary_iterator
from tensorflow.python.summary.writer.writer import FileWriter as _FileWriter
from tensorflow.python.summary.writer.writer_cache import FileWriterCache as SummaryWriterCache
# pylint: enable=unused-import
from tensorflow.python.util.deprecation import deprecated
class SummaryWriter(_FileWriter):
@deprecated("2016-11-30",
"Please switch to tf.summary.FileWriter. The interface and "
"behavior is the same; this is just a rename.")
def __init__(self,
logdir,
graph=None,
max_queue=10,
flush_secs=120,
graph_def=None):
"""Creates a `SummaryWriter` and an event file.
This class is deprecated, and should be replaced with tf.summary.FileWriter.
On construction the summary writer creates a new event file in `logdir`.
This event file will contain `Event` protocol buffers constructed when you
call one of the following functions: `add_summary()`, `add_session_log()`,
`add_event()`, or `add_graph()`.
If you pass a `Graph` to the constructor it is added to
the event file. (This is equivalent to calling `add_graph()` later).
TensorBoard will pick the graph from the file and display it graphically so
you can interactively explore the graph you built. You will usually pass
the graph from the session in which you launched it:
```python
...create a graph...
# Launch the graph in a session.
sess = tf.Session()
# Create a summary writer, add the 'graph' to the event file.
writer = tf.summary.FileWriter(<some-directory>, sess.graph)
```
The other arguments to the constructor control the asynchronous writes to
the event file:
* `flush_secs`: How often, in seconds, to flush the added summaries
and events to disk.
* `max_queue`: Maximum number of summaries or events pending to be
written to disk before one of the 'add' calls block.
Args:
logdir: A string. Directory where event file will be written.
graph: A `Graph` object, such as `sess.graph`.
max_queue: Integer. Size of the queue for pending events and summaries.
flush_secs: Number. How often, in seconds, to flush the
pending events and summaries to disk.
graph_def: DEPRECATED: Use the `graph` argument instead.
"""
super(SummaryWriter, self).__init__(logdir, graph, max_queue, flush_secs,
graph_def)
|
mdilai/jweegad | refs/heads/master | external/gtest/test/gtest_break_on_failure_unittest.py | 2140 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test's break-on-failure mode.
A user can ask Google Test to seg-fault when an assertion fails, using
either the GTEST_BREAK_ON_FAILURE environment variable or the
--gtest_break_on_failure flag. This script tests such functionality
by invoking gtest_break_on_failure_unittest_ (a program written with
Google Test) with different environments and command line flags.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import gtest_test_utils
import os
import sys
# Constants.
IS_WINDOWS = os.name == 'nt'
# The environment variable for enabling/disabling the break-on-failure mode.
BREAK_ON_FAILURE_ENV_VAR = 'GTEST_BREAK_ON_FAILURE'
# The command line flag for enabling/disabling the break-on-failure mode.
BREAK_ON_FAILURE_FLAG = 'gtest_break_on_failure'
# The environment variable for enabling/disabling the throw-on-failure mode.
THROW_ON_FAILURE_ENV_VAR = 'GTEST_THROW_ON_FAILURE'
# The environment variable for enabling/disabling the catch-exceptions mode.
CATCH_EXCEPTIONS_ENV_VAR = 'GTEST_CATCH_EXCEPTIONS'
# Path to the gtest_break_on_failure_unittest_ program.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_break_on_failure_unittest_')
environ = gtest_test_utils.environ
SetEnvVar = gtest_test_utils.SetEnvVar
# Tests in this file run a Google-Test-based test program and expect it
# to terminate prematurely. Therefore they are incompatible with
# the premature-exit-file protocol by design. Unset the
# premature-exit filepath to prevent Google Test from creating
# the file.
SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
def Run(command):
"""Runs a command; returns 1 if it was killed by a signal, or 0 otherwise."""
p = gtest_test_utils.Subprocess(command, env=environ)
if p.terminated_by_signal:
return 1
else:
return 0
# The tests.
class GTestBreakOnFailureUnitTest(gtest_test_utils.TestCase):
"""Tests using the GTEST_BREAK_ON_FAILURE environment variable or
the --gtest_break_on_failure flag to turn assertion failures into
segmentation faults.
"""
def RunAndVerify(self, env_var_value, flag_value, expect_seg_fault):
"""Runs gtest_break_on_failure_unittest_ and verifies that it does
(or does not) have a seg-fault.
Args:
env_var_value: value of the GTEST_BREAK_ON_FAILURE environment
variable; None if the variable should be unset.
flag_value: value of the --gtest_break_on_failure flag;
None if the flag should not be present.
expect_seg_fault: 1 if the program is expected to generate a seg-fault;
0 otherwise.
"""
SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, env_var_value)
if env_var_value is None:
env_var_value_msg = ' is not set'
else:
env_var_value_msg = '=' + env_var_value
if flag_value is None:
flag = ''
elif flag_value == '0':
flag = '--%s=0' % BREAK_ON_FAILURE_FLAG
else:
flag = '--%s' % BREAK_ON_FAILURE_FLAG
command = [EXE_PATH]
if flag:
command.append(flag)
if expect_seg_fault:
should_or_not = 'should'
else:
should_or_not = 'should not'
has_seg_fault = Run(command)
SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, None)
msg = ('when %s%s, an assertion failure in "%s" %s cause a seg-fault.' %
(BREAK_ON_FAILURE_ENV_VAR, env_var_value_msg, ' '.join(command),
should_or_not))
self.assert_(has_seg_fault == expect_seg_fault, msg)
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(env_var_value=None,
flag_value=None,
expect_seg_fault=0)
def testEnvVar(self):
"""Tests using the GTEST_BREAK_ON_FAILURE environment variable."""
self.RunAndVerify(env_var_value='0',
flag_value=None,
expect_seg_fault=0)
self.RunAndVerify(env_var_value='1',
flag_value=None,
expect_seg_fault=1)
def testFlag(self):
"""Tests using the --gtest_break_on_failure flag."""
self.RunAndVerify(env_var_value=None,
flag_value='0',
expect_seg_fault=0)
self.RunAndVerify(env_var_value=None,
flag_value='1',
expect_seg_fault=1)
def testFlagOverridesEnvVar(self):
"""Tests that the flag overrides the environment variable."""
self.RunAndVerify(env_var_value='0',
flag_value='0',
expect_seg_fault=0)
self.RunAndVerify(env_var_value='0',
flag_value='1',
expect_seg_fault=1)
self.RunAndVerify(env_var_value='1',
flag_value='0',
expect_seg_fault=0)
self.RunAndVerify(env_var_value='1',
flag_value='1',
expect_seg_fault=1)
def testBreakOnFailureOverridesThrowOnFailure(self):
"""Tests that gtest_break_on_failure overrides gtest_throw_on_failure."""
SetEnvVar(THROW_ON_FAILURE_ENV_VAR, '1')
try:
self.RunAndVerify(env_var_value=None,
flag_value='1',
expect_seg_fault=1)
finally:
SetEnvVar(THROW_ON_FAILURE_ENV_VAR, None)
if IS_WINDOWS:
def testCatchExceptionsDoesNotInterfere(self):
"""Tests that gtest_catch_exceptions doesn't interfere."""
SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, '1')
try:
self.RunAndVerify(env_var_value='1',
flag_value='1',
expect_seg_fault=1)
finally:
SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, None)
if __name__ == '__main__':
gtest_test_utils.Main()
|
seagullbird/BLEXT | refs/heads/master | tests/test_basics.py | 1 | # -*- coding: utf-8 -*-
import unittest
from flask import current_app
from app import create_app, db
# 使用 unittest 编写的测试
# setUp() 和 tearDown() 分别在各测试前后运行
# 名字以 test_开头的函数都作为测试执行
# setUp()方法尝试创建一个测试环境,类似于运行中的程序。
# 首先,使用测试配置创建程序,然后激活上下文。
# 这一步的作用是确保能在测试中使用current_app,像普通请求一样。
# 然后创建一个全新的数据库,以备不时之需。
# 数据库和程序上下文在tearDown()方法中删除
# 其余测试脚本原理相同
class BasicsTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
# 确保程序实例存在
def test_app_exists(self):
self.assertFalse(current_app is None)
# 确保程序在测试配置中运行
def test_app_is_testing(self):
self.assertTrue(current_app.config['TESTING'])
|
kostyll/micropython | refs/heads/master | tests/bytecode/mp-tests/string2.py | 22 | 'abc'
class f:
u"123"
pass
x = 'abc'
x = u"abc"
x = u"ab\\c"
x = r"ab\\c"
x = b"abc"
x = rb"abc"
x = b"ab\\c"
x = rb"ab\\c"
x = """abc"""
x = b"""abc"""
|
maisim/django-localflavor | refs/heads/master | localflavor/cl/forms.py | 4 | """
Chile specific form helpers.
"""
from __future__ import unicode_literals
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import RegexField, Select
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from .cl_regions import REGION_CHOICES
class CLRegionSelect(Select):
"""
A Select widget that uses a list of Chilean Regions (Regiones)
as its choices.
"""
def __init__(self, attrs=None):
super(CLRegionSelect, self).__init__(attrs, choices=REGION_CHOICES)
class CLRutField(RegexField):
"""
Chilean "Rol Unico Tributario" (RUT) field. This is the Chilean national
identification number.
Samples for testing are available from
https://palena.sii.cl/cvc/dte/ee_empresas_emisoras.html
"""
default_error_messages = {
'invalid': _('Enter a valid Chilean RUT.'),
'strict': _('Enter a valid Chilean RUT. The format is XX.XXX.XXX-X.'),
'checksum': _('The Chilean RUT is not valid.'),
}
def __init__(self, *args, **kwargs):
if 'strict' in kwargs:
del kwargs['strict']
super(CLRutField, self).__init__(r'^(\d{1,2}\.)?\d{3}\.\d{3}-[\dkK]$',
error_messages={'invalid': self.default_error_messages['strict']},
*args, **kwargs)
else:
# In non-strict mode, accept RUTs that validate but do not exist in
# the real world.
super(CLRutField, self).__init__(r'^[\d\.]{1,11}-?[\dkK]$', *args, **kwargs)
def clean(self, value):
"""
Check and clean the Chilean RUT.
"""
super(CLRutField, self).clean(value)
if value in EMPTY_VALUES:
return ''
rut, verificador = self._canonify(value)
if self._algorithm(rut) == verificador:
return self._format(rut, verificador)
else:
raise ValidationError(self.error_messages['checksum'])
def _algorithm(self, rut):
"""
Takes RUT in pure canonical form, calculates the verifier digit.
"""
suma = 0
multi = 2
for r in rut[::-1]:
suma += int(r) * multi
multi += 1
if multi == 8:
multi = 2
return '0123456789K0'[11 - suma % 11]
def _canonify(self, rut):
"""
Turns the RUT into one normalized format. Returns a (rut, verifier)
tuple.
"""
rut = force_text(rut).replace(' ', '').replace('.', '').replace('-', '')
return rut[:-1], rut[-1].upper()
def _format(self, code, verifier=None):
"""
Formats the RUT from canonical form to the common string representation.
If verifier=None, then the last digit in 'code' is the verifier.
"""
if verifier is None:
verifier = code[-1]
code = code[:-1]
while len(code) > 3 and '.' not in code[:3]:
pos = code.find('.')
if pos == -1:
new_dot = -3
else:
new_dot = pos - 3
code = code[:new_dot] + '.' + code[new_dot:]
return '%s-%s' % (code, verifier)
|
qeedquan/misc_utilities | refs/heads/master | snippets/python/sympy/calculus.py | 1 | from sympy import *
x, y = symbols('x y')
print(Derivative(x*x, x).doit())
print(Derivative(sin(x), x).doit())
print(Derivative(exp(x), x).doit())
print(Derivative(x*x - exp(x) + sin(x)*cos(x)/exp(2*x*x*x), x).doit())
print(Integral(x*3, x).doit())
print(Integral(exp(x), x).doit())
print(Integral(sin(x), x).doit())
print(Integral(exp(-x**2), x).doit())
print(x*x*x + 3*x)
A = Matrix([[x*y, 0], [0, x+24*x-14*(y*x*x*x*y+510)]])
print(A)
print(A**5)
print(A**-1)
print("transpose: {}".format(A.T))
print("exponential: {}".format(exp(A)))
print("determinant: {}".format(det(A)))
R = Matrix([[sinh(x), cosh(x)], [-cosh(x), sinh(x)]])
print(R)
print(exp(R*20))
|
mollstam/UnrealPy | refs/heads/master | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/zope.interface-4.1.2/src/zope/interface/tests/odd.py | 79 | ##############################################################################
#
# Copyright (c) 2003 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Odd meta class that doesn't subclass type.
This is used for testing support for ExtensionClass in new interfaces.
>>> class A(object):
... __metaclass__ = MetaClass
... a = 1
...
>>> A.__name__
'A'
>>> A.__bases__ == (object,)
True
>>> class B(object):
... __metaclass__ = MetaClass
... b = 1
...
>>> class C(A, B): pass
...
>>> C.__name__
'C'
>>> int(C.__bases__ == (A, B))
1
>>> a = A()
>>> aa = A()
>>> a.a
1
>>> aa.a
1
>>> aa.a = 2
>>> a.a
1
>>> aa.a
2
>>> c = C()
>>> c.a
1
>>> c.b
1
>>> c.b = 2
>>> c.b
2
>>> C.c = 1
>>> c.c
1
>>> import sys
>>> if sys.version[0] == '2': # This test only makes sense under Python 2.x
... from types import ClassType
... assert not isinstance(C, (type, ClassType))
>>> int(C.__class__.__class__ is C.__class__)
1
"""
# class OddClass is an odd meta class
class MetaMetaClass(type):
def __getattribute__(self, name):
if name == '__class__':
return self
return type.__getattribute__(self, name)
class MetaClass(object):
"""Odd classes
"""
__metaclass__ = MetaMetaClass
def __init__(self, name, bases, dict):
self.__name__ = name
self.__bases__ = bases
self.__dict__.update(dict)
def __call__(self):
return OddInstance(self)
def __getattr__(self, name):
for b in self.__bases__:
v = getattr(b, name, self)
if v is not self:
return v
raise AttributeError(name)
def __repr__(self):
return "<odd class %s at %s>" % (self.__name__, hex(id(self)))
class OddInstance(object):
def __init__(self, cls):
self.__dict__['__class__'] = cls
def __getattribute__(self, name):
dict = object.__getattribute__(self, '__dict__')
if name == '__dict__':
return dict
v = dict.get(name, self)
if v is not self:
return v
return getattr(dict['__class__'], name)
def __setattr__(self, name, v):
self.__dict__[name] = v
def __delattr__(self, name):
del self.__dict__[name]
def __repr__(self):
return "<odd %s instance at %s>" % (
self.__class__.__name__, hex(id(self)))
# DocTest:
if __name__ == "__main__":
import doctest, __main__
doctest.testmod(__main__, isprivate=lambda *a: False)
|
pkbullock/RaspberryPi | refs/heads/master | Projects/cam-pir/ncampir-v2.py | 2 | import RPi.GPIO as GPIO
import picamera
import time
GPIO.setmode(GPIO.BCM)
GPIO.setup(17, GPIO.IN)
#GPIO.setup(18, GPIO.OUT)
camera = picamera.PiCamera()
camera.resolution = (1024, 768)
try:
while True:
#GPIO.output(18, True)
input_state = GPIO.input(17)
if input_state == True:
print('Motion Detected')
camera.start_preview()
time.sleep(2)
filename = 'night-motion-'+time.strftime('%Y%m%d-%H%M%S')+'.jpg'
camera.capture(filename)
time.sleep(2)
camera.stop_preview()
print(filename)
finally:
print('Cleaning Up')
#GPIO.output(18, False)
GPIO.cleanup()
camera.close() |
yungsters/rain-workload-toolkit | refs/heads/master | utils/fixedurl.py | 10 | import sys
import subprocess
import time
import re
import os
import simplejson as json
import getopt
from run_manager import RunManager, RainOutputParser
'''Example config
{
"profilesCreatorClass": "radlab.rain.workload.httptest.FixedUrlProfileCreator",
"profilesCreatorClassParams": {
"hostListFile" : "/home/rean/work/rain.git/hostlist.txt",
"popularHostFraction": 0.2,
"numHostTargets": 10,
"meanThinkTime": 5,
"usersPerPopularHost": 10,
"usersPerLessPopularHost":1,
"generatorParameters": {
"connectionTimeoutMsecs" : 1000,
"socketTimeoutMsecs" : 1000
}
},
"timing": {
"rampUp": 10,
"duration": 60,
"rampDown": 10
},
"pipePort": 7581
}
'''
class FixedUrlGeneratorParameters:
'''
Rain configuration object for generator parameters for
the SpecificUrlGenerator
'''
def __init__(self):
self.connectionTimeoutMsecs = 1000
self.socketTimeoutMsecs = 1000
def to_json( self ):
dict = {}
dict['connectionTimeoutMsecs'] = self.connectionTimeoutMsecs
dict['socketTimeoutMsecs'] = self.socketTimeoutMsecs
return dict
class FixedUrlTestConfig:
'''
Rain configuration object for SpecificUrl tests
'''
def __init__(self):
self.generatorParameters = FixedUrlGeneratorParameters()
# Profile creator class to use
self.profilesCreatorClass = \
"radlab.rain.workload.httptest.FixedUrlProfileCreator"
# Profile creator params
self.hostListFile = ""
self.popularHostFraction = 0.2 # Fraction of popular hosts
self.usersPerPopularHost = 25
self.usersPerLessPopularHost = 5
self.meanThinkTime = 5 # seconds
# Timing info
self.rampUp = 10 # seconds
self.duration = 60 # seconds
self.rampDown = 10 # seconds
self.pipePort = 7851 # comm port
def to_json( self ):
dict = {}
dict['profilesCreatorClass'] = self.profilesCreatorClass
# sub-map with profile creator parameters
creatorParams = {}
# set all the creator params
creatorParams['hostListFile'] = self.hostListFile
creatorParams['popularHostFraction'] = self.popularHostFraction
creatorParams['meanThinkTime'] = self.meanThinkTime
creatorParams['usersPerPopularHost'] = self.usersPerPopularHost
creatorParams['usersPerLessPopularHost'] = self.usersPerLessPopularHost
# add in the generator parameters to the creator parameters
creatorParams['generatorParameters'] = \
self.generatorParameters.to_json()
# Add profile creator params to top-level dictionary
dict['profilesCreatorClassParams'] = creatorParams
# sub map with timing info
timing = {}
timing['rampUp'] = self.rampUp
timing['duration'] = self.duration
timing['rampDown'] = self.rampDown
# Add timing info to top-level dictionary
dict['timing'] = timing
# Add the comm port
dict['pipePort'] = self.pipePort
return dict
class FixedUrlTestRunner:
def create_dir( self, path ):
if not os.path.exists( path ):
os.mkdir( path )
def run( self, hostlist_fname, popular_host_fraction,\
mean_think_time, users_per_popular_host,\
users_per_less_popular_host,\
connection_timeout_msecs, socket_timeout_msecs,\
results_dir="./results", run_duration_secs=60, \
config_dir="./config", pipe_port=7851 ):
# Some pre-reqs:
# 1) create the config_dir if it doesn't exist
# 2) create the results_dir if it doesn't exist
self.create_dir( config_dir )
self.create_dir( results_dir )
num_tests = 1
for i in range(num_tests):
# With a single Rain launch, load an entire block of ip's
config = FixedUrlTestConfig()
config.hostListFile = hostlist_fname
config.duration = run_duration_secs
config.popularHostFraction = popular_host_fraction
config.usersPerPopularHost = users_per_popular_host
config.usersPerLessPopularHost = users_per_less_popular_host
config.meanThinkTime = mean_think_time
config.pipePort = pipe_port
# Add in the parameters for the workload generator
# the operation mixes etc.
generatorParams = FixedUrlGeneratorParameters()
generatorParams.connectionTimeoutMsecs = connection_timeout_msecs
generatorParams.socketTimeoutMsecs = socket_timeout_msecs
config.generatorParameters = generatorParams
json_data = \
json.dumps(config, sort_keys='True',\
default=FixedUrlTestConfig.to_json)
# Write this data out to a file, then invoke the run mananger
# passing in the path to this file
print( "[FixedUrlTestRunner] json config: {0}"\
.format(json_data) )
run_classpath=".:rain.jar:workloads/httptest.jar"
run_config_filename = config_dir + "/" + \
"run_fixed_url_config" + "_nodes.json"
run_output_filename = results_dir + "/" + \
"run_fixed_url_log" + "_nodes.txt"
run_results_filename = results_dir + "/" + \
"run_fixed_url_result" + "_nodes.txt"
# write the json data out to the config file
# invoke the run manager passing the location of the config file
# collect the results and write them out to the results_dir
print "[FixedUrlTestRunner] Writing config file: {0}"\
.format( run_config_filename )
config_file = open( run_config_filename, 'w' )
config_file.write( json_data )
config_file.flush()
config_file.close()
run_output = RunManager.run_rain( run_config_filename,\
run_classpath )
#print run_output
track_results = RainOutputParser.parse_output( run_output )
# Validate each of the track_results instances
for result in track_results:
# Set some 90th and 99th pctile thresholds
result.pct_overhead_ops_threshold=10.0
result.pct_failed_ops_threshold=5.0
# Set the desired 90th and 99th percentile thresholds for
# the 50ms, 100ms, 200ms operations - set everything to
# 500 ms = 0.5s. Threshold units = seconds
result.op_response_time_thresholds['FixedUrl']=\
(0.5,0.5)
# Write out the run output
print "[FixedUrlTestRunner] Writing output: {0}"\
.format( run_output_filename )
run_output_file = open( run_output_filename, 'w' )
run_output_file.write( run_output )
run_output_file.flush()
run_output_file.close()
# Write out the run results
print "[FixedUrlTestRunner] Writing results: {0}"\
.format( run_results_filename )
run_results_file = open( run_results_filename, 'w' )
RainOutputParser.print_results( track_results, run_results_file )
run_results_file.write( "\n" )
# After writing out the table for all the tracks
# Spit out the 90th and 99th percentiles
for result in track_results:
for k,v in result.op_response_times.items():
run_results_file.write( "{0},{1},{2},{3}\n"\
.format(result.name, k, v[0], v[1]) )
run_results_file.flush()
run_results_file.close()
def usage():
print( "Usage: {0} [--resultsdir <path>]"\
" [--duration <seconds to run>] [--configdir <path>]"\
" [--popularhosts <%popular hosts>]"\
" [--popularhostusers <users-per-popular-host>]"\
" [--lesspopularhostusers <users-per-less-popular-host]"\
" [--connectiontimeout <msecs to wait for http connection>]"\
" [--sockettimeout <msecs to wait for data/server response>]"\
" [--hostlist <path to file>] [--pipeport <port>]"\
.format(sys.argv[0]) )
print "\n"
print( "defaults: {0} --resultsdir ./results --duration 60"\
" --configdir ./config --popularhosts 0.2"\
" --popularhostusers 25 --lesspopularhostusers 5"\
" --sockettimeout 1000 --connectiontimeout 1000"\
" --thinktime 5 "\
" --hostlist /home/rean/work/rain.git/hostlist.txt"\
" --pipeport 7851"\
.format(sys.argv[0]) )
def main(argv):
results_dir = "./results"
run_duration = 60
config_dir = "./config"
hostlist_fname = ""
popular_host_fraction = 0.2
mean_think_time = 5
users_per_popular_host = 25
users_per_less_popular_host = 5
connection_timeout_msecs = 1000
socket_timeout_msecs = 1000
pipe_port = 7851
# parse arguments and replace the defaults
try:
opts, args = getopt.getopt( argv, "h", ["resultsdir=",\
"duration=", "configdir=",\
"help", "hostlist=", \
"popularhosts=",\
"thinktime=", "popularhostusers=",\
"lesspopularhostusers=",\
"connectiontimeout=",\
"sockettimeout=", "pipeport="] )
except getopt.GetoptError:
print sys.exc_info()
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ( "-h", "--help" ):
usage()
sys.exit()
elif opt == "--resultsdir":
results_dir = arg
elif opt == "--duration":
run_duration = int(arg)
elif opt == "--configdir":
config_dir = arg
elif opt == "--popularhostfraction":
popular_host_fraction = float(arg)
elif opt == "--popularhostusers":
users_per_popular_host = int(arg)
elif opt == "--lesspopularhostusers":
users_per_less_popular_host = int(arg)
elif opt == "--thinktime":
mean_think_time = float(arg)
elif opt == "--connectiontimeout":
connection_timeout_msecs = int(arg)
elif opt == "--sockettimeout":
socket_timeout_msecs = int(arg)
elif opt == "--hostlist":
hostlist_fname = arg
elif opt == "--pipeport":
pipe_port = int(arg)
# launch run
test_runner = FixedUrlTestRunner()
test_runner.run( hostlist_fname, popular_host_fraction,\
mean_think_time, \
users_per_popular_host,\
users_per_less_popular_host,\
connection_timeout_msecs, socket_timeout_msecs,\
results_dir, run_duration, \
config_dir, pipe_port )
if __name__=='__main__':
# Pass all the arguments we received except the name of the script
# argv[0]
main( sys.argv[1:] )
|
p4datasystems/CarnotKE | refs/heads/master | jyhton/lib-python/2.7/encodings/iso2022_jp_3.py | 816 | #
# iso2022_jp_3.py: Python Unicode Codec for ISO2022_JP_3
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_iso2022, codecs
import _multibytecodec as mbc
codec = _codecs_iso2022.getcodec('iso2022_jp_3')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='iso2022_jp_3',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.