repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
Beeblio/django
|
refs/heads/master
|
tests/migrations/migrations_test_apps/normal/__init__.py
|
12133432
| |
Jet-Streaming/gyp
|
refs/heads/master
|
test/generator-output/gyptest-depth.py
|
1
|
#!/usr/bin/env python
# Copyright 2014 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies building a project hierarchy created when the --generator-output=
and --depth= options is used to put the build configuration files in a separate
directory tree.
"""
import TestGyp
import os
# This is a regression test for the make generator only.
test = TestGyp.TestGyp(formats=['make'])
test.writable(test.workpath('src'), False)
toplevel_dir = os.path.basename(test.workpath())
test.run_gyp(os.path.join(toplevel_dir, 'src', 'prog1.gyp'),
'-Dset_symroot=1',
'--generator-output=gypfiles',
depth=toplevel_dir,
chdir='..')
test.writable(test.workpath('src/build'), True)
test.writable(test.workpath('src/subdir2/build'), True)
test.writable(test.workpath('src/subdir3/build'), True)
test.build('prog1.gyp', test.ALL, chdir='gypfiles')
chdir = 'gypfiles'
expect = """\
Hello from %s
Hello from inc.h
Hello from inc1/include1.h
Hello from inc2/include2.h
Hello from inc3/include3.h
Hello from subdir2/deeper/deeper.h
"""
if test.format == 'xcode':
chdir = 'src'
test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
if test.format == 'xcode':
chdir = 'src/subdir2'
test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
if test.format == 'xcode':
chdir = 'src/subdir3'
test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
test.pass_test()
|
doismellburning/edx-platform
|
refs/heads/master
|
common/djangoapps/track/migrations/0002_auto__add_field_trackinglog_host__chg_field_trackinglog_event_type__ch.py
|
189
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'TrackingLog.host'
db.add_column('track_trackinglog', 'host',
self.gf('django.db.models.fields.CharField')(default='', max_length=64, blank=True),
keep_default=False)
# Changing field 'TrackingLog.event_type'
db.alter_column('track_trackinglog', 'event_type', self.gf('django.db.models.fields.CharField')(max_length=512))
# Changing field 'TrackingLog.page'
db.alter_column('track_trackinglog', 'page', self.gf('django.db.models.fields.CharField')(max_length=512, null=True))
def backwards(self, orm):
# Deleting field 'TrackingLog.host'
db.delete_column('track_trackinglog', 'host')
# Changing field 'TrackingLog.event_type'
db.alter_column('track_trackinglog', 'event_type', self.gf('django.db.models.fields.CharField')(max_length=32))
# Changing field 'TrackingLog.page'
db.alter_column('track_trackinglog', 'page', self.gf('django.db.models.fields.CharField')(max_length=32, null=True))
models = {
'track.trackinglog': {
'Meta': {'object_name': 'TrackingLog'},
'agent': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'dtcreated': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'event_source': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'event_type': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'page': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'})
}
}
complete_apps = ['track']
|
JetBrains/intellij-community
|
refs/heads/master
|
python/testData/resolve/multiFile/relativeAndSameDirectoryImports/plainDirectoryImportResolveExcludedDirectoryModuleNotThrowsException/plainDirectory/script.py
|
27
|
from excluded import foo
# <ref>
|
KarlParkinson/practice
|
refs/heads/master
|
dataStructures/binTreeIdentical.py
|
1
|
import binTree
def equalTrees(t1, t2):
if (t1 == None or t2 == None):
return compNoneTrees(t1,t2)
else:
if (t1.getRootVal() != t2.getRootVal()):
return False
else:
return equalTrees(t1.getLeftChild(), t2.getLeftChild()) and equalTrees(t1.getRightChild(), t2.getRightChild())
def compNoneTrees(t1,t2):
return (t1 == None and t2 == None)
t1 = binTree.BinaryTree(1)
t1.insertLeft(2)
t1.insertRight(3)
t1.getLeftChild().insertLeft(4)
t1.getLeftChild().insertRight(5)
t2 = binTree.BinaryTree(1)
t2.insertLeft(2)
t2.getLeftChild().insertLeft(4)
t2.getLeftChild().insertRight(5)
print equalTrees(t1,t2)
|
drongh/vnpy
|
refs/heads/master
|
vn.strategy/backtestingEngine.py
|
91
|
# encoding: UTF-8
import shelve
from eventEngine import *
from pymongo import Connection
from pymongo.errors import *
from strategyEngine import *
########################################################################
class LimitOrder(object):
"""限价单对象"""
#----------------------------------------------------------------------
def __init__(self, symbol):
"""Constructor"""
self.symbol = symbol
self.price = 0
self.volume = 0
self.direction = None
self.offset = None
########################################################################
class BacktestingEngine(object):
"""
回测引擎,作用:
1. 从数据库中读取数据并回放
2. 作为StrategyEngine创建时的参数传入
"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
self.eventEngine = EventEngine()
# 策略引擎
self.strategyEngine = None
# TICK历史数据列表,由于要使用For循环来实现仿真回放
# 使用list的速度比Numpy和Pandas都要更快
self.listDataHistory = []
# 限价单字典
self.dictOrder = {}
# 最新的TICK数据
self.currentData = None
# 回测的成交字典
self.listTrade = []
# 报单编号
self.orderRef = 0
# 成交编号
self.tradeID = 0
#----------------------------------------------------------------------
def setStrategyEngine(self, engine):
"""设置策略引擎"""
self.strategyEngine = engine
self.writeLog(u'策略引擎设置完成')
#----------------------------------------------------------------------
def connectMongo(self):
"""连接MongoDB数据库"""
try:
self.__mongoConnection = Connection()
self.__mongoConnected = True
self.__mongoTickDB = self.__mongoConnection['TickDB']
self.writeLog(u'回测引擎连接MongoDB成功')
except ConnectionFailure:
self.writeLog(u'回测引擎连接MongoDB失败')
#----------------------------------------------------------------------
def loadDataHistory(self, symbol, startDate, endDate):
"""载入历史TICK数据"""
if self.__mongoConnected:
collection = self.__mongoTickDB[symbol]
# 如果输入了读取TICK的最后日期
if endDate:
cx = collection.find({'date':{'$gte':startDate, '$lte':endDate}})
elif startDate:
cx = collection.find({'date':{'$gte':startDate}})
else:
cx = collection.find()
# 将TICK数据读入内存
self.listDataHistory = [data for data in cx]
self.writeLog(u'历史TICK数据载入完成')
else:
self.writeLog(u'MongoDB未连接,请检查')
#----------------------------------------------------------------------
def processLimitOrder(self):
"""处理限价单"""
for ref, order in self.dictOrder.items():
# 如果是买单,且限价大于等于当前TICK的卖一价,则假设成交
if order.direction == DIRECTION_BUY and \
order.price >= self.currentData['AskPrice1']:
self.executeLimitOrder(ref, order, self.currentData['AskPrice1'])
# 如果是卖单,且限价低于当前TICK的买一价,则假设全部成交
if order.direction == DIRECTION_SELL and \
order.price <= self.currentData['BidPrice1']:
self.executeLimitOrder(ref, order, self.currentData['BidPrice1'])
#----------------------------------------------------------------------
def executeLimitOrder(self, ref, order, price):
"""限价单成交处理"""
# 成交回报
self.tradeID = self.tradeID + 1
tradeData = {}
tradeData['InstrumentID'] = order.symbol
tradeData['OrderRef'] = ref
tradeData['TradeID'] = str(self.tradeID)
tradeData['Direction'] = order.direction
tradeData['OffsetFlag'] = order.offset
tradeData['Price'] = price
tradeData['Volume'] = order.volume
tradeEvent = Event()
tradeEvent.dict_['data'] = tradeData
self.strategyEngine.updateTrade(tradeEvent)
# 报单回报
orderData = {}
orderData['InstrumentID'] = order.symbol
orderData['OrderRef'] = ref
orderData['Direction'] = order.direction
orderData['CombOffsetFlag'] = order.offset
orderData['LimitPrice'] = price
orderData['VolumeTotalOriginal'] = order.volume
orderData['VolumeTraded'] = order.volume
orderData['InsertTime'] = ''
orderData['CancelTime'] = ''
orderData['FrontID'] = ''
orderData['SessionID'] = ''
orderData['OrderStatus'] = ''
orderEvent = Event()
orderEvent.dict_['data'] = orderData
self.strategyEngine.updateOrder(orderEvent)
# 记录该成交到列表中
self.listTrade.append(tradeData)
# 删除该限价单
del self.dictOrder[ref]
#----------------------------------------------------------------------
def startBacktesting(self):
"""开始回测"""
self.writeLog(u'开始回测')
for data in self.listDataHistory:
# 记录最新的TICK数据
self.currentData = data
# 处理限价单
self.processLimitOrder()
# 推送到策略引擎中
event = Event()
event.dict_['data'] = data
self.strategyEngine.updateMarketData(event)
self.saveTradeData()
self.writeLog(u'回测结束')
#----------------------------------------------------------------------
def sendOrder(self, instrumentid, exchangeid, price, pricetype, volume, direction, offset):
"""回测发单"""
order = LimitOrder(instrumentid)
order.price = price
order.direction = direction
order.volume = volume
order.offset = offset
self.orderRef = self.orderRef + 1
self.dictOrder[str(self.orderRef)] = order
return str(self.orderRef)
#----------------------------------------------------------------------
def cancelOrder(self, instrumentid, exchangeid, orderref, frontid, sessionid):
"""回测撤单"""
try:
del self.dictOrder[orderref]
except KeyError:
pass
#----------------------------------------------------------------------
def writeLog(self, log):
"""写日志"""
print log
#----------------------------------------------------------------------
def selectInstrument(self, symbol):
"""读取合约数据"""
d = {}
d['ExchangeID'] = 'BackTesting'
return d
#----------------------------------------------------------------------
def saveTradeData(self):
"""保存交易记录"""
f = shelve.open('result.vn')
f['listTrade'] = self.listTrade
f.close()
#----------------------------------------------------------------------
def subscribe(self, symbol, exchange):
"""仿真订阅合约"""
pass
|
roy-boy/python_scripts
|
refs/heads/master
|
html_reporter.py
|
1
|
"""html_reporter.py converts csv to a html test report."""
import datetime
from csv_handler import load_csv
from lib import HTML
import logger as tl
from test_config import TEST_OUTPUT_PATH, TEST_RESULT_CSV
def produce_report():
test_timestamp = datetime.datetime.now()
test_timestamp = test_timestamp.strftime('%y-%m-%d-%H-%M-%S')
tl.test_logger.info('Producing HTML test report...')
html_file = TEST_OUTPUT_PATH + test_timestamp + '-test_result.html'
# dict of colors for each result:
result_colors = {
'PASSED': 'lime',
'FAILED': 'red',
'UNKNOWN': 'yellow',
}
# to load test result csv file:
load_test_result_list = load_csv(TEST_RESULT_CSV)
result_header = load_test_result_list[0]
test_result_list = load_test_result_list[1]
t = HTML.Table(header_row=result_header)
for test_case in test_result_list:
fill_color = result_colors[test_case['TEST_RESULT']] # to set color to the cell
colored_result = HTML.TableCell(test_case['TEST_RESULT'], bgcolor=fill_color)
t.rows.append([test_case['TEST_FLAG'], test_case['TEST_CASE_ID'], test_case['TEST_CASE_NAME'],
test_case['REGION'], test_case['VENUE'], test_case['TRADE_TYPE'], test_case['TRADE_FILE'],
test_case['TRADE_ID'], test_case['STP'], colored_result])
html_code = str(t)
try:
with open(html_file, 'w') as html_report:
html_report.write('<H2 style="font-family:verdana">Integration Test Result</H2>')
html_report.write('<p style="font-family:verdana">Execution time: ' + test_timestamp)
html_report.write(html_code)
html_report.write('</p>')
except OSError as err:
print('Failed to open the file as ', err)
|
AutorestCI/azure-sdk-for-python
|
refs/heads/master
|
azure-mgmt-network/azure/mgmt/network/v2017_11_01/operations/load_balancer_backend_address_pools_operations.py
|
1
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from .. import models
class LoadBalancerBackendAddressPoolsOperations(object):
"""LoadBalancerBackendAddressPoolsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client API version. Constant value: "2017-11-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-11-01"
self.config = config
def list(
self, resource_group_name, load_balancer_name, custom_headers=None, raw=False, **operation_config):
"""Gets all the load balancer backed address pools.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of BackendAddressPool
:rtype:
~azure.mgmt.network.v2017_11_01.models.BackendAddressPoolPaged[~azure.mgmt.network.v2017_11_01.models.BackendAddressPool]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/backendAddressPools'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.BackendAddressPoolPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.BackendAddressPoolPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def get(
self, resource_group_name, load_balancer_name, backend_address_pool_name, custom_headers=None, raw=False, **operation_config):
"""Gets load balancer backend address pool.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param backend_address_pool_name: The name of the backend address
pool.
:type backend_address_pool_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: BackendAddressPool or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.network.v2017_11_01.models.BackendAddressPool or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/backendAddressPools/{backendAddressPoolName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'backendAddressPoolName': self._serialize.url("backend_address_pool_name", backend_address_pool_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BackendAddressPool', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
|
nonhermitian/scipy
|
refs/heads/master
|
scipy/interpolate/polyint.py
|
45
|
from __future__ import division, print_function, absolute_import
import warnings
import numpy as np
from scipy.special import factorial
from scipy._lib.six import xrange
from scipy._lib._util import _asarray_validated
__all__ = ["KroghInterpolator", "krogh_interpolate", "BarycentricInterpolator",
"barycentric_interpolate", "approximate_taylor_polynomial"]
def _isscalar(x):
"""Check whether x is if a scalar type, or 0-dim"""
return np.isscalar(x) or hasattr(x, 'shape') and x.shape == ()
class _Interpolator1D(object):
"""
Common features in univariate interpolation
Deal with input data type and interpolation axis rolling. The
actual interpolator can assume the y-data is of shape (n, r) where
`n` is the number of x-points, and `r` the number of variables,
and use self.dtype as the y-data type.
Attributes
----------
_y_axis
Axis along which the interpolation goes in the original array
_y_extra_shape
Additional trailing shape of the input arrays, excluding
the interpolation axis.
dtype
Dtype of the y-data arrays. Can be set via set_dtype, which
forces it to be float or complex.
Methods
-------
__call__
_prepare_x
_finish_y
_reshape_yi
_set_yi
_set_dtype
_evaluate
"""
__slots__ = ('_y_axis', '_y_extra_shape', 'dtype')
def __init__(self, xi=None, yi=None, axis=None):
self._y_axis = axis
self._y_extra_shape = None
self.dtype = None
if yi is not None:
self._set_yi(yi, xi=xi, axis=axis)
def __call__(self, x):
"""
Evaluate the interpolant
Parameters
----------
x : array_like
Points to evaluate the interpolant at.
Returns
-------
y : array_like
Interpolated values. Shape is determined by replacing
the interpolation axis in the original array with the shape of x.
"""
x, x_shape = self._prepare_x(x)
y = self._evaluate(x)
return self._finish_y(y, x_shape)
def _evaluate(self, x):
"""
Actually evaluate the value of the interpolator.
"""
raise NotImplementedError()
def _prepare_x(self, x):
"""Reshape input x array to 1-D"""
x = _asarray_validated(x, check_finite=False, as_inexact=True)
x_shape = x.shape
return x.ravel(), x_shape
def _finish_y(self, y, x_shape):
"""Reshape interpolated y back to n-d array similar to initial y"""
y = y.reshape(x_shape + self._y_extra_shape)
if self._y_axis != 0 and x_shape != ():
nx = len(x_shape)
ny = len(self._y_extra_shape)
s = (list(range(nx, nx + self._y_axis))
+ list(range(nx)) + list(range(nx+self._y_axis, nx+ny)))
y = y.transpose(s)
return y
def _reshape_yi(self, yi, check=False):
yi = np.rollaxis(np.asarray(yi), self._y_axis)
if check and yi.shape[1:] != self._y_extra_shape:
ok_shape = "%r + (N,) + %r" % (self._y_extra_shape[-self._y_axis:],
self._y_extra_shape[:-self._y_axis])
raise ValueError("Data must be of shape %s" % ok_shape)
return yi.reshape((yi.shape[0], -1))
def _set_yi(self, yi, xi=None, axis=None):
if axis is None:
axis = self._y_axis
if axis is None:
raise ValueError("no interpolation axis specified")
yi = np.asarray(yi)
shape = yi.shape
if shape == ():
shape = (1,)
if xi is not None and shape[axis] != len(xi):
raise ValueError("x and y arrays must be equal in length along "
"interpolation axis.")
self._y_axis = (axis % yi.ndim)
self._y_extra_shape = yi.shape[:self._y_axis]+yi.shape[self._y_axis+1:]
self.dtype = None
self._set_dtype(yi.dtype)
def _set_dtype(self, dtype, union=False):
if np.issubdtype(dtype, np.complexfloating) \
or np.issubdtype(self.dtype, np.complexfloating):
self.dtype = np.complex_
else:
if not union or self.dtype != np.complex_:
self.dtype = np.float_
class _Interpolator1DWithDerivatives(_Interpolator1D):
def derivatives(self, x, der=None):
"""
Evaluate many derivatives of the polynomial at the point x
Produce an array of all derivative values at the point x.
Parameters
----------
x : array_like
Point or points at which to evaluate the derivatives
der : int or None, optional
How many derivatives to extract; None for all potentially
nonzero derivatives (that is a number equal to the number
of points). This number includes the function value as 0th
derivative.
Returns
-------
d : ndarray
Array with derivatives; d[j] contains the j-th derivative.
Shape of d[j] is determined by replacing the interpolation
axis in the original array with the shape of x.
Examples
--------
>>> from scipy.interpolate import KroghInterpolator
>>> KroghInterpolator([0,0,0],[1,2,3]).derivatives(0)
array([1.0,2.0,3.0])
>>> KroghInterpolator([0,0,0],[1,2,3]).derivatives([0,0])
array([[1.0,1.0],
[2.0,2.0],
[3.0,3.0]])
"""
x, x_shape = self._prepare_x(x)
y = self._evaluate_derivatives(x, der)
y = y.reshape((y.shape[0],) + x_shape + self._y_extra_shape)
if self._y_axis != 0 and x_shape != ():
nx = len(x_shape)
ny = len(self._y_extra_shape)
s = ([0] + list(range(nx+1, nx + self._y_axis+1))
+ list(range(1,nx+1)) +
list(range(nx+1+self._y_axis, nx+ny+1)))
y = y.transpose(s)
return y
def derivative(self, x, der=1):
"""
Evaluate one derivative of the polynomial at the point x
Parameters
----------
x : array_like
Point or points at which to evaluate the derivatives
der : integer, optional
Which derivative to extract. This number includes the
function value as 0th derivative.
Returns
-------
d : ndarray
Derivative interpolated at the x-points. Shape of d is
determined by replacing the interpolation axis in the
original array with the shape of x.
Notes
-----
This is computed by evaluating all derivatives up to the desired
one (using self.derivatives()) and then discarding the rest.
"""
x, x_shape = self._prepare_x(x)
y = self._evaluate_derivatives(x, der+1)
return self._finish_y(y[der], x_shape)
class KroghInterpolator(_Interpolator1DWithDerivatives):
"""
Interpolating polynomial for a set of points.
The polynomial passes through all the pairs (xi,yi). One may
additionally specify a number of derivatives at each point xi;
this is done by repeating the value xi and specifying the
derivatives as successive yi values.
Allows evaluation of the polynomial and all its derivatives.
For reasons of numerical stability, this function does not compute
the coefficients of the polynomial, although they can be obtained
by evaluating all the derivatives.
Parameters
----------
xi : array_like, length N
Known x-coordinates. Must be sorted in increasing order.
yi : array_like
Known y-coordinates. When an xi occurs two or more times in
a row, the corresponding yi's represent derivative values.
axis : int, optional
Axis in the yi array corresponding to the x-coordinate values.
Notes
-----
Be aware that the algorithms implemented here are not necessarily
the most numerically stable known. Moreover, even in a world of
exact computation, unless the x coordinates are chosen very
carefully - Chebyshev zeros (e.g. cos(i*pi/n)) are a good choice -
polynomial interpolation itself is a very ill-conditioned process
due to the Runge phenomenon. In general, even with well-chosen
x values, degrees higher than about thirty cause problems with
numerical instability in this code.
Based on [1]_.
References
----------
.. [1] Krogh, "Efficient Algorithms for Polynomial Interpolation
and Numerical Differentiation", 1970.
Examples
--------
To produce a polynomial that is zero at 0 and 1 and has
derivative 2 at 0, call
>>> from scipy.interpolate import KroghInterpolator
>>> KroghInterpolator([0,0,1],[0,2,0])
This constructs the quadratic 2*X**2-2*X. The derivative condition
is indicated by the repeated zero in the xi array; the corresponding
yi values are 0, the function value, and 2, the derivative value.
For another example, given xi, yi, and a derivative ypi for each
point, appropriate arrays can be constructed as:
>>> xi = np.linspace(0, 1, 5)
>>> yi, ypi = np.random.rand(2, 5)
>>> xi_k, yi_k = np.repeat(xi, 2), np.ravel(np.dstack((yi,ypi)))
>>> KroghInterpolator(xi_k, yi_k)
To produce a vector-valued polynomial, supply a higher-dimensional
array for yi:
>>> KroghInterpolator([0,1],[[2,3],[4,5]])
This constructs a linear polynomial giving (2,3) at 0 and (4,5) at 1.
"""
def __init__(self, xi, yi, axis=0):
_Interpolator1DWithDerivatives.__init__(self, xi, yi, axis)
self.xi = np.asarray(xi)
self.yi = self._reshape_yi(yi)
self.n, self.r = self.yi.shape
c = np.zeros((self.n+1, self.r), dtype=self.dtype)
c[0] = self.yi[0]
Vk = np.zeros((self.n, self.r), dtype=self.dtype)
for k in xrange(1,self.n):
s = 0
while s <= k and xi[k-s] == xi[k]:
s += 1
s -= 1
Vk[0] = self.yi[k]/float(factorial(s))
for i in xrange(k-s):
if xi[i] == xi[k]:
raise ValueError("Elements if `xi` can't be equal.")
if s == 0:
Vk[i+1] = (c[i]-Vk[i])/(xi[i]-xi[k])
else:
Vk[i+1] = (Vk[i+1]-Vk[i])/(xi[i]-xi[k])
c[k] = Vk[k-s]
self.c = c
def _evaluate(self, x):
pi = 1
p = np.zeros((len(x), self.r), dtype=self.dtype)
p += self.c[0,np.newaxis,:]
for k in range(1, self.n):
w = x - self.xi[k-1]
pi = w*pi
p += pi[:,np.newaxis] * self.c[k]
return p
def _evaluate_derivatives(self, x, der=None):
n = self.n
r = self.r
if der is None:
der = self.n
pi = np.zeros((n, len(x)))
w = np.zeros((n, len(x)))
pi[0] = 1
p = np.zeros((len(x), self.r))
p += self.c[0,np.newaxis,:]
for k in xrange(1,n):
w[k-1] = x - self.xi[k-1]
pi[k] = w[k-1]*pi[k-1]
p += pi[k,:,np.newaxis]*self.c[k]
cn = np.zeros((max(der,n+1), len(x), r), dtype=self.dtype)
cn[:n+1,:,:] += self.c[:n+1,np.newaxis,:]
cn[0] = p
for k in xrange(1,n):
for i in xrange(1,n-k+1):
pi[i] = w[k+i-1]*pi[i-1]+pi[i]
cn[k] = cn[k]+pi[i,:,np.newaxis]*cn[k+i]
cn[k] *= factorial(k)
cn[n,:,:] = 0
return cn[:der]
def krogh_interpolate(xi, yi, x, der=0, axis=0):
"""
Convenience function for polynomial interpolation.
See `KroghInterpolator` for more details.
Parameters
----------
xi : array_like
Known x-coordinates.
yi : array_like
Known y-coordinates, of shape ``(xi.size, R)``. Interpreted as
vectors of length R, or scalars if R=1.
x : array_like
Point or points at which to evaluate the derivatives.
der : int or list, optional
How many derivatives to extract; None for all potentially
nonzero derivatives (that is a number equal to the number
of points), or a list of derivatives to extract. This number
includes the function value as 0th derivative.
axis : int, optional
Axis in the yi array corresponding to the x-coordinate values.
Returns
-------
d : ndarray
If the interpolator's values are R-dimensional then the
returned array will be the number of derivatives by N by R.
If `x` is a scalar, the middle dimension will be dropped; if
the `yi` are scalars then the last dimension will be dropped.
See Also
--------
KroghInterpolator
Notes
-----
Construction of the interpolating polynomial is a relatively expensive
process. If you want to evaluate it repeatedly consider using the class
KroghInterpolator (which is what this function uses).
"""
P = KroghInterpolator(xi, yi, axis=axis)
if der == 0:
return P(x)
elif _isscalar(der):
return P.derivative(x,der=der)
else:
return P.derivatives(x,der=np.amax(der)+1)[der]
def approximate_taylor_polynomial(f,x,degree,scale,order=None):
"""
Estimate the Taylor polynomial of f at x by polynomial fitting.
Parameters
----------
f : callable
The function whose Taylor polynomial is sought. Should accept
a vector of `x` values.
x : scalar
The point at which the polynomial is to be evaluated.
degree : int
The degree of the Taylor polynomial
scale : scalar
The width of the interval to use to evaluate the Taylor polynomial.
Function values spread over a range this wide are used to fit the
polynomial. Must be chosen carefully.
order : int or None, optional
The order of the polynomial to be used in the fitting; `f` will be
evaluated ``order+1`` times. If None, use `degree`.
Returns
-------
p : poly1d instance
The Taylor polynomial (translated to the origin, so that
for example p(0)=f(x)).
Notes
-----
The appropriate choice of "scale" is a trade-off; too large and the
function differs from its Taylor polynomial too much to get a good
answer, too small and round-off errors overwhelm the higher-order terms.
The algorithm used becomes numerically unstable around order 30 even
under ideal circumstances.
Choosing order somewhat larger than degree may improve the higher-order
terms.
"""
if order is None:
order = degree
n = order+1
# Choose n points that cluster near the endpoints of the interval in
# a way that avoids the Runge phenomenon. Ensure, by including the
# endpoint or not as appropriate, that one point always falls at x
# exactly.
xs = scale*np.cos(np.linspace(0,np.pi,n,endpoint=n % 1)) + x
P = KroghInterpolator(xs, f(xs))
d = P.derivatives(x,der=degree+1)
return np.poly1d((d/factorial(np.arange(degree+1)))[::-1])
class BarycentricInterpolator(_Interpolator1D):
"""The interpolating polynomial for a set of points
Constructs a polynomial that passes through a given set of points.
Allows evaluation of the polynomial, efficient changing of the y
values to be interpolated, and updating by adding more x values.
For reasons of numerical stability, this function does not compute
the coefficients of the polynomial.
The values yi need to be provided before the function is
evaluated, but none of the preprocessing depends on them, so rapid
updates are possible.
Parameters
----------
xi : array_like
1-d array of x coordinates of the points the polynomial
should pass through
yi : array_like, optional
The y coordinates of the points the polynomial should pass through.
If None, the y values will be supplied later via the `set_y` method.
axis : int, optional
Axis in the yi array corresponding to the x-coordinate values.
Notes
-----
This class uses a "barycentric interpolation" method that treats
the problem as a special case of rational function interpolation.
This algorithm is quite stable, numerically, but even in a world of
exact computation, unless the x coordinates are chosen very
carefully - Chebyshev zeros (e.g. cos(i*pi/n)) are a good choice -
polynomial interpolation itself is a very ill-conditioned process
due to the Runge phenomenon.
Based on Berrut and Trefethen 2004, "Barycentric Lagrange Interpolation".
"""
def __init__(self, xi, yi=None, axis=0):
_Interpolator1D.__init__(self, xi, yi, axis)
self.xi = np.asarray(xi)
self.set_yi(yi)
self.n = len(self.xi)
self.wi = np.zeros(self.n)
self.wi[0] = 1
for j in xrange(1,self.n):
self.wi[:j] *= (self.xi[j]-self.xi[:j])
self.wi[j] = np.multiply.reduce(self.xi[:j]-self.xi[j])
self.wi **= -1
def set_yi(self, yi, axis=None):
"""
Update the y values to be interpolated
The barycentric interpolation algorithm requires the calculation
of weights, but these depend only on the xi. The yi can be changed
at any time.
Parameters
----------
yi : array_like
The y coordinates of the points the polynomial should pass through.
If None, the y values will be supplied later.
axis : int, optional
Axis in the yi array corresponding to the x-coordinate values.
"""
if yi is None:
self.yi = None
return
self._set_yi(yi, xi=self.xi, axis=axis)
self.yi = self._reshape_yi(yi)
self.n, self.r = self.yi.shape
def add_xi(self, xi, yi=None):
"""
Add more x values to the set to be interpolated
The barycentric interpolation algorithm allows easy updating by
adding more points for the polynomial to pass through.
Parameters
----------
xi : array_like
The x coordinates of the points that the polynomial should pass
through.
yi : array_like, optional
The y coordinates of the points the polynomial should pass through.
Should have shape ``(xi.size, R)``; if R > 1 then the polynomial is
vector-valued.
If `yi` is not given, the y values will be supplied later. `yi` should
be given if and only if the interpolator has y values specified.
"""
if yi is not None:
if self.yi is None:
raise ValueError("No previous yi value to update!")
yi = self._reshape_yi(yi, check=True)
self.yi = np.vstack((self.yi,yi))
else:
if self.yi is not None:
raise ValueError("No update to yi provided!")
old_n = self.n
self.xi = np.concatenate((self.xi,xi))
self.n = len(self.xi)
self.wi **= -1
old_wi = self.wi
self.wi = np.zeros(self.n)
self.wi[:old_n] = old_wi
for j in xrange(old_n,self.n):
self.wi[:j] *= (self.xi[j]-self.xi[:j])
self.wi[j] = np.multiply.reduce(self.xi[:j]-self.xi[j])
self.wi **= -1
def __call__(self, x):
"""Evaluate the interpolating polynomial at the points x
Parameters
----------
x : array_like
Points to evaluate the interpolant at.
Returns
-------
y : array_like
Interpolated values. Shape is determined by replacing
the interpolation axis in the original array with the shape of x.
Notes
-----
Currently the code computes an outer product between x and the
weights, that is, it constructs an intermediate array of size
N by len(x), where N is the degree of the polynomial.
"""
return _Interpolator1D.__call__(self, x)
def _evaluate(self, x):
if x.size == 0:
p = np.zeros((0, self.r), dtype=self.dtype)
else:
c = x[...,np.newaxis]-self.xi
z = c == 0
c[z] = 1
c = self.wi/c
p = np.dot(c,self.yi)/np.sum(c,axis=-1)[...,np.newaxis]
# Now fix where x==some xi
r = np.nonzero(z)
if len(r) == 1: # evaluation at a scalar
if len(r[0]) > 0: # equals one of the points
p = self.yi[r[0][0]]
else:
p[r[:-1]] = self.yi[r[-1]]
return p
def barycentric_interpolate(xi, yi, x, axis=0):
"""
Convenience function for polynomial interpolation.
Constructs a polynomial that passes through a given set of points,
then evaluates the polynomial. For reasons of numerical stability,
this function does not compute the coefficients of the polynomial.
This function uses a "barycentric interpolation" method that treats
the problem as a special case of rational function interpolation.
This algorithm is quite stable, numerically, but even in a world of
exact computation, unless the `x` coordinates are chosen very
carefully - Chebyshev zeros (e.g. cos(i*pi/n)) are a good choice -
polynomial interpolation itself is a very ill-conditioned process
due to the Runge phenomenon.
Parameters
----------
xi : array_like
1-d array of x coordinates of the points the polynomial should
pass through
yi : array_like
The y coordinates of the points the polynomial should pass through.
x : scalar or array_like
Points to evaluate the interpolator at.
axis : int, optional
Axis in the yi array corresponding to the x-coordinate values.
Returns
-------
y : scalar or array_like
Interpolated values. Shape is determined by replacing
the interpolation axis in the original array with the shape of x.
See Also
--------
BarycentricInterpolator
Notes
-----
Construction of the interpolation weights is a relatively slow process.
If you want to call this many times with the same xi (but possibly
varying yi or x) you should use the class `BarycentricInterpolator`.
This is what this function uses internally.
"""
return BarycentricInterpolator(xi, yi, axis=axis)(x)
|
liuche/prox-server
|
refs/heads/master
|
samples/yelp-test.py
|
1
|
import json
import pprint
from app.clients import yelpClient, yelp3Client
from app.representation import venueRecord
# categories="beaches"
# def getLocality(lat, lon, **kwargs):
# return yelpClient.search_by_coordinates(lat, lon, **kwargs)
# locality = getLocality(19.915403, -155.887403,
# radius_filter=25000,
# sort=1,
# limit=20,
# offset=0,
# category_filter=categories
# )
# businesses = locality.businesses
# print(json.dumps([venueRecord(b) for b in businesses], indent=2))
#yelpID = "north-india-restaurant-san-francisco"
yelpID = "holoholokai-beach-park-waimea"
yelp3Biz = yelp3Client.request("/businesses/%s" % yelpID)
print(json.dumps(yelp3Biz, indent=2))
|
prakashmishra1598/gdeploy
|
refs/heads/3.0
|
gdeployfeatures/firewalld/firewalld.py
|
1
|
"""
Add functions corresponding to each of the actions in the json file.
The function should be named as follows <feature name>_<action_name>
"""
from gdeploylib import defaults, Global
def firewalld_add(section_dict):
section_dict['firewall_state'] = 'enabled'
Global.logger.info("Enabling firewalld service")
return get_yml_lists(section_dict)
def firewalld_delete(section_dict):
section_dict['firewall_state'] = 'disabled'
Global.logger.info("Disabling firewalld service")
return get_yml_lists(section_dict)
def get_yml_lists(section_dict):
ymls = {'ports': defaults.PORT_OP,
'services': defaults.SERVICE_OP
}
yml_list = [v for k,v in ymls.iteritems() if section_dict[k]]
return section_dict, yml_list
|
ChrisThoung/fsic
|
refs/heads/main
|
examples/godley-lavoie_2007/6_reg.py
|
1
|
# -*- coding: utf-8 -*-
"""
6_reg
=====
FSIC implementation of Model *REG*, a model of a two-region economy with a
single government, fiscal and monetary system, and currency, from Chapter 6 of
Godley and Lavoie (2007). This model disaggregates Model *PC* (see '4_pc.py')
into two regions: 'North' and 'South'. Parameter values come from Zezza (2006).
Godley and Lavoie (2007) analyse Model *REG* beginning from an initial
stationary state. This script first finds that stationary state, matching (more
or less) the starting values in Zezza's (2006) EViews script.
This example also shows how to use the `offset` keyword argument in the solve
methods to copy over values from another period before solving. This can
substantially improve solution times.
While FSIC only requires NumPy, this example also uses:
* `pandas`, to generate a DataFrame of results using `fsictools`
* `matplotlib`, to replicate, from Godley and Lavoie (2007), Figures 6.1, 6.2,
6.3, 6.4, 6.5, 6.6 and 6.7. These figures consist of four pairs of charts in
a (4 x 2) grid that shows the results of four experiments with Model *REG*:
1. an increase in the propensity to import by the South
2. an increase in government expenditure in the South
3. an increase in the propensity to save of Southern households
4. a decrease in Southern households' liquidity preference
Each pair of charts consists of:
1. a comparison of financial balances (household net acquisition of
financial assets; government budget, trade) relative to the original
stationary state baseline
2. evolution of North and South GDP in the experiment (scenario)
(Godley and Lavoie, 2007, don't report a GDP chart for the fourth
experiment.)
Outputs:
1. Replicates Figures 6.1, 6.2, 6.3, 6.4, 6.5, 6.6 and 6.7 of Godley and Lavoie
(2007), saving the charts to 'figures-6.1t6.7.png'
References:
Godley, W., Lavoie, M. (2007),
*Monetary economics: an integrated approach to
credit, money, income, production and wealth*,
Palgrave Macmillan
Zezza, G. (2006),
'EViews macros for building models in *Wynne Godley and Marc Lavoie*
Monetary Economics: an integrated approach to
credit, money, income, production and wealth',
http://gennaro.zezza.it/software/eviews/glch06.php
"""
import matplotlib.pyplot as plt
from pandas import DataFrame
import pandas as pd
import fsic
import fsictools
# Inline comments give the corresponding equation numbers from Godley and
# Lavoie (2007) - for reference only; FSIC ignores comments, just as Python
# does.
# 'A' suffix indicates a slight amendment to be compatible with the FSIC
# parser.
script = '''
Y_N = C_N + G_N + X_N - IM_N # 6.1
Y_S = C_S + G_S + X_S - IM_S # 6.2
IM_N = {mu_N} * Y_N # 6.3
IM_S = {mu_S} * Y_S # 6.4
X_N = IM_S # 6.5
X_S = IM_N # 6.6
YD_N = Y_N - T_N + r[-1] * Bh_N[-1] # 6.7
YD_S = Y_S - T_S + r[-1] * Bh_S[-1] # 6.8
T_N = {theta} * (Y_N + r[-1] * Bh_N[-1]) # 6.9
T_S = {theta} * (Y_S + r[-1] * Bh_S[-1]) # 6.10
V_N = V_N[-1] + (YD_N - C_N) # 6.11
V_S = V_S[-1] + (YD_S - C_S) # 6.12
C_N = {alpha_1_N} * YD_N + {alpha_2_N} * V_N[-1] # 6.13
C_S = {alpha_1_S} * YD_S + {alpha_2_S} * V_S[-1] # 6.14
Hh_N = V_N - Bh_N # 6.15
Hh_S = V_S - Bh_S # 6.16
Bh_N = V_N * ({lambda_0_N} + {lambda_1_N} * r - {lambda_2_N} * (YD_N / V_N)) # 6.17A
Bh_S = V_S * ({lambda_0_S} + {lambda_1_S} * r - {lambda_2_S} * (YD_S / V_S)) # 6.18A
T = T_N + T_S # 6.19
G = G_N + G_S # 6.20
Bh = Bh_N + Bh_S # 6.21
Hh = Hh_N + Hh_S # 6.22
Bs = Bs[-1] + (G + r[-1] * Bs[-1]) - (T + r[-1] * Bcb[-1]) # 6.23A
Hs = Hs[-1] + (Bcb - Bcb[-1]) # 6.24A
Bcb = Bs - Bh # 6.25
r = r_bar # 6.26
'''
symbols = fsic.parse_model(script)
REG = fsic.build_model(symbols)
def make_model_results(model: fsic.BaseModel) -> DataFrame:
"""Return the model results, with supplementary variables, as a `pandas` DataFrame."""
results = fsictools.model_to_dataframe(model)[model.names]
# Take first difference of household wealth to construct a flow measure
results['D(V_N)'] = results['V_N'].diff()
results['D(V_S)'] = results['V_S'].diff()
results['GovtBal_N'] = results.eval('T_N - G_N') - results['r'].shift() * results['Bh_N'].shift()
results['GovtBal_S'] = results.eval('T_S - G_S') - results['r'].shift() * results['Bh_S'].shift()
results['NX_N'] = results.eval('X_N - IM_N')
results['NX_S'] = results.eval('X_S - IM_S')
return results
def make_scenario_charts(financial_balances_plot: 'AxesSubplot', gdp_plot: 'AxesSubplot', scenario_results: DataFrame, baseline_results: DataFrame) -> None:
"""Create plots (Southern financial balances and both regions' GDP)."""
# Calculate difference from baseline
difference_from_baseline = scenario_results - baseline_results
# Financial balances plot
financial_balances_plot.plot(difference_from_baseline.index, [0] * len(difference_from_baseline.index),
color='k', linewidth=0.75)
financial_balances_plot.plot(difference_from_baseline.index, difference_from_baseline['D(V_S)'],
label='Change in household wealth of the South region', color='#33C3F0', linestyle='-')
financial_balances_plot.plot(difference_from_baseline.index, difference_from_baseline['GovtBal_S'],
label='Government balance with the South region', color='#FF4F2E', linestyle=':')
financial_balances_plot.plot(difference_from_baseline.index, difference_from_baseline['NX_S'],
label='Trade balance of the South region', color='#77C3AF', linestyle='--')
financial_balances_plot.set_xlim(min(difference_from_baseline.index), max(difference_from_baseline.index))
# GDP plot
gdp_plot.plot(scenario_results.index, [scenario_results['Y_N'].iloc[0]] * len(difference_from_baseline.index),
color='k', linewidth=0.75)
gdp_plot.plot(scenario_results.index, scenario_results['Y_N'],
label='North region GDP', color='#33C3F0', linestyle='-')
gdp_plot.plot(scenario_results.index, scenario_results['Y_S'],
label='South region GDP', color='#FF4F2E', linestyle='--')
gdp_plot.set_xlim(min(scenario_results.index), max(scenario_results.index))
if __name__ == '__main__':
# 1. Find the stationary state of the model from an initial set of
# parameter values (from Zezza, 2006)
starting_from_zero = REG(
range(500), # Enough periods to reach the stationary state
alpha_1_N=0.6, alpha_2_N=0.4, lambda_0_N=0.635, lambda_1_N=5, lambda_2_N=0.01, mu_N=0.18781,
alpha_1_S=0.7, alpha_2_S=0.3, lambda_0_S=0.670, lambda_1_S=6, lambda_2_S=0.07, mu_S=0.18781)
# Fiscal policy
starting_from_zero.G_N = starting_from_zero.G_S = 20
starting_from_zero.theta = 0.2
# Monetary policy
starting_from_zero.r_bar = 0.025
starting_from_zero.r[0] = starting_from_zero.r_bar[0]
# Solve the model:
# - increase the maximum number of iterations for convergence
# - copying the values from the previous period (with `offset=-1`) before
# solution improves solution times as the model approaches its
# stationary state
# - Equations 6.17A and 6.18A have division operations: starting from
# zero, this may initially generate a NaN in solution - ignore this
# because it should be overwritten in a later iteration
starting_from_zero.solve(max_iter=2000, offset=-1, errors='ignore')
# Take the results from the last period as the stationary state
stationary_state = dict(zip(starting_from_zero.names,
starting_from_zero.values[:, -1]))
# Copy the stationary state to a new baseline model instance
baseline = REG(range(1945, 2010 + 1), **stationary_state)
baseline.solve(offset=-1)
baseline_results = make_model_results(baseline).loc[1950:2000, :]
# 2. Experiments with Model *REG*
# (Input values come from Zezza, 2006)
# 2.1 An increase in the propensity to import of the South
# (from Section 6.5.1 of Godley and Lavoie, 2007)
import_propensity_scenario = baseline.copy()
import_propensity_scenario['mu_S', 1960:] = 0.20781
import_propensity_scenario.solve(max_iter=3500, offset=-1)
imports_results = make_model_results(import_propensity_scenario).loc[1950:2000, :]
# 2.2 An increase in the government expenditures of the South
# (from Section 6.5.2 of Godley and Lavoie, 2007)
government_expenditure_scenario = baseline.copy()
government_expenditure_scenario['G_S', 1960:] = 25
government_expenditure_scenario.solve(max_iter=2000, offset=-1)
government_results = make_model_results(government_expenditure_scenario).loc[1950:2000, :]
# 2.3 An increase in the propensity to save of the Southern households
# (from Section 6.5.3 of Godley and Lavoie, 2007)
consumption_propensity_scenario = baseline.copy()
consumption_propensity_scenario['alpha_1_S', 1960:] = 0.6
consumption_propensity_scenario.solve(max_iter=2000, offset=-1)
consumption_results = make_model_results(consumption_propensity_scenario).loc[1950:2000, :]
# 2.4 A change in the liquidity preference of the Southern households
# (from Section 6.5.4 of Godley and Lavoie, 2007)
liquidity_preference_scenario = baseline.copy()
liquidity_preference_scenario['lambda_0_S', 1960:] = 0.75
liquidity_preference_scenario.solve(max_iter=2000, offset=-1)
liquidity_results = make_model_results(liquidity_preference_scenario).loc[1950:2000, :]
# 3. Replicate Figures 6.1, 6.2, 6.3, 6.4, 6.5, 6.6 and 6.7 of Godley and
# Lavoie (2007)
# Set up plot area
_, axes = plt.subplots(4, 2, figsize=(12, 20))
plt.suptitle('Experiments with Model $\it{REG}$')
# Create individual plots
make_scenario_charts(*axes[0], imports_results, baseline_results)
make_scenario_charts(*axes[1], government_results, baseline_results)
make_scenario_charts(*axes[2], consumption_results, baseline_results)
make_scenario_charts(*axes[3], liquidity_results, baseline_results)
# Add plot labels
axes[0, 0].set_title('Evolution of financial balances in the South region')
axes[0, 1].set_title('Evolution of GDP in the North and South regions')
# Use y-axis labels to name the individual experiments
axes[0, 0].set_ylabel('Increase in propensity to import of the South')
axes[1, 0].set_ylabel('Increase in government expenditure in the South')
axes[2, 0].set_ylabel('Increase in propensity to save of Southern households')
axes[3, 0].set_ylabel("Decrease in Southern households' liquidity preference")
# Add legends to the bottom of the chart
axes[-1, 0].legend(loc='upper left', bbox_to_anchor=(0.0, -0.1))
axes[-1, 1].legend(loc='upper left', bbox_to_anchor=(0.0, -0.1))
plt.savefig('figures-6.1t6.7.png')
|
vbraun/libXon
|
refs/heads/master
|
python/setup.py
|
1
|
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
cmdclass = {'build_ext': build_ext},
ext_modules = [
Extension('message_passing.message',
language='c++',
sources=['message_passing/message.pyx'],
libraries=['xon-objectxx', 'xon-clientxx']
)
]
)
|
youdonghai/intellij-community
|
refs/heads/master
|
python/testData/findUsages/OverrideVariableByTupleInComprehension2.py
|
35
|
def f():
xst = [(3, 4)]
k = 3
return [(<caret>k, str(v)) for k, v in xst]
|
collinjackson/mojo
|
refs/heads/master
|
sky/tools/webkitpy/layout_tests/layout_package/bot_test_expectations_unittest.py
|
42
|
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from webkitpy.layout_tests.layout_package import bot_test_expectations
from webkitpy.layout_tests.models import test_expectations
from webkitpy.layout_tests.port import builders
class BotTestExpectationsFactoryTest(unittest.TestCase):
def fake_results_json_for_builder(self, builder):
return bot_test_expectations.ResultsJSON(builder, 'Dummy content')
def test_expectations_for_builder(self):
factory = bot_test_expectations.BotTestExpectationsFactory()
factory._results_json_for_builder = self.fake_results_json_for_builder
old_builders = builders._exact_matches
builders._exact_matches = {
"Dummy builder name": {"port_name": "dummy-port", "specifiers": []},
}
try:
self.assertIsNotNone(factory.expectations_for_builder('Dummy builder name'))
finally:
builders._exact_matches = old_builders
def test_expectations_for_port(self):
factory = bot_test_expectations.BotTestExpectationsFactory()
factory._results_json_for_builder = self.fake_results_json_for_builder
old_builders = builders._exact_matches
builders._exact_matches = {
"Dummy builder name": {"port_name": "dummy-port", "specifiers": []},
}
try:
self.assertIsNotNone(factory.expectations_for_port('dummy-port'))
finally:
builders._exact_matches = old_builders
class BotTestExpectationsTest(unittest.TestCase):
# FIXME: Find a way to import this map from Tools/TestResultServer/model/jsonresults.py.
FAILURE_MAP = {"A": "AUDIO", "C": "CRASH", "F": "TEXT", "I": "IMAGE", "O": "MISSING",
"N": "NO DATA", "P": "PASS", "T": "TIMEOUT", "Y": "NOTRUN", "X": "SKIP", "Z": "IMAGE+TEXT", "K": "LEAK"}
# All result_string's in this file expect newest result
# on left: "PFF", means it just passed after 2 failures.
def _assert_is_flaky(self, results_string, should_be_flaky):
results_json = self._results_json_from_test_data({})
expectations = bot_test_expectations.BotTestExpectations(results_json, set('test'))
length_encoded = self._results_from_string(results_string)['results']
num_actual_results = len(expectations._flaky_types_in_results(length_encoded, only_ignore_very_flaky=True))
if should_be_flaky:
self.assertGreater(num_actual_results, 1)
else:
self.assertEqual(num_actual_results, 1)
def test_basic_flaky(self):
self._assert_is_flaky('PFF', False) # Used to fail, but now passes.
self._assert_is_flaky('FFP', False) # Just started failing.
self._assert_is_flaky('PFPF', True) # Seen both failures and passes.
# self._assert_is_flaky('PPPF', True) # Should be counted as flaky but isn't yet.
self._assert_is_flaky('FPPP', False) # Just started failing, not flaky.
self._assert_is_flaky('PFFP', True) # Failed twice in a row, still flaky.
# Failing 3+ times in a row is unlikely to be flaky, but rather a transient failure on trunk.
# self._assert_is_flaky('PFFFP', False)
# self._assert_is_flaky('PFFFFP', False)
def _results_json_from_test_data(self, test_data):
test_data[bot_test_expectations.ResultsJSON.FAILURE_MAP_KEY] = self.FAILURE_MAP
json_dict = {
'builder': test_data,
}
return bot_test_expectations.ResultsJSON('builder', json_dict)
def _results_from_string(self, results_string):
results_list = []
last_char = None
for char in results_string:
if char != last_char:
results_list.insert(0, [1, char])
else:
results_list[0][0] += 1
return {'results': results_list}
def _assert_expectations(self, test_data, expectations_string, only_ignore_very_flaky):
results_json = self._results_json_from_test_data(test_data)
expectations = bot_test_expectations.BotTestExpectations(results_json, set('test'))
self.assertEqual(expectations.flakes_by_path(only_ignore_very_flaky), expectations_string)
def _assert_unexpected_results(self, test_data, expectations_string):
results_json = self._results_json_from_test_data(test_data)
expectations = bot_test_expectations.BotTestExpectations(results_json, set('test'))
self.assertEqual(expectations.unexpected_results_by_path(), expectations_string)
def test_basic(self):
test_data = {
'tests': {
'foo': {
'veryflaky.html': self._results_from_string('FPFP'),
'maybeflaky.html': self._results_from_string('PPFP'),
'notflakypass.html': self._results_from_string('PPPP'),
'notflakyfail.html': self._results_from_string('FFFF'),
}
}
}
self._assert_expectations(test_data, {
'foo/veryflaky.html': sorted(["TEXT", "PASS"]),
}, only_ignore_very_flaky=True)
self._assert_expectations(test_data, {
'foo/veryflaky.html': sorted(["TEXT", "PASS"]),
'foo/maybeflaky.html': sorted(["TEXT", "PASS"]),
}, only_ignore_very_flaky=False)
def test_all_failure_types(self):
test_data = {
'tests': {
'foo': {
'allfailures.html': self._results_from_string('FPFPCNCNTXTXIZIZOCOCYKYK'),
'imageplustextflake.html': self._results_from_string('ZPZPPPPPPPPPPPPPPPPP'),
}
}
}
self._assert_expectations(test_data, {
'foo/imageplustextflake.html': sorted(["IMAGE+TEXT", "PASS"]),
'foo/allfailures.html': sorted(["TEXT", "PASS", "IMAGE+TEXT", "TIMEOUT", "CRASH", "IMAGE", "MISSING", "LEAK"]),
}, only_ignore_very_flaky=True)
def test_unexpected_results_no_unexpected(self):
test_data = {
'tests': {
'foo': {
'pass1.html': {'results': [[4, 'P']]},
'pass2.html': {'results': [[2, 'Z']], 'expected': 'PASS FAIL'},
'fail.html': {'results': [[2, 'P'], [1, 'F']], 'expected': 'PASS FAIL'},
'not_run.html': {'results': []},
'crash.html': {'results': [[2, 'F'], [1, 'C']], 'expected': 'CRASH FAIL WONTFIX'},
}
}
}
self._assert_unexpected_results(test_data, {})
def test_unexpected_results_all_unexpected(self):
test_data = {
'tests': {
'foo': {
'pass1.html': {'results': [[4, 'P']], 'expected': 'FAIL'},
'pass2.html': {'results': [[2, 'P']], 'expected': 'IMAGE'},
'fail.html': {'results': [[4, 'F']]},
'f_p.html': {'results': [[1, 'F'], [2, 'P']]},
'crash.html': {'results': [[2, 'F'], [1, 'C']], 'expected': 'WONTFIX'},
'image.html': {'results': [[2, 'F'], [1, 'I']], 'expected': 'CRASH FAIL'},
'i_f.html': {'results': [[1, 'F'], [5, 'I']], 'expected': 'PASS'},
'all.html': self._results_from_string('FPFPCNCNTXTXIZIZOCOCYKYK'),
}
}
}
self.maxDiff = None
self._assert_unexpected_results(test_data, {
'foo/pass1.html': sorted(["FAIL", "PASS"]),
'foo/pass2.html': sorted(["IMAGE", "PASS"]),
'foo/fail.html': sorted(["TEXT", "PASS"]),
'foo/f_p.html': sorted(["TEXT", "PASS"]),
'foo/crash.html': sorted(["WONTFIX", "CRASH", "TEXT"]),
'foo/image.html': sorted(["CRASH", "FAIL", "IMAGE"]),
'foo/i_f.html': sorted(["PASS", "IMAGE", "TEXT"]),
'foo/all.html': sorted(["TEXT", "PASS", "IMAGE+TEXT", "TIMEOUT", "CRASH", "IMAGE", "MISSING", "LEAK"]),
})
|
toregreijer/horse-battery
|
refs/heads/master
|
freebsd_api.py
|
4
|
"""
Author: Armon Dadgar
Start Date: April 7th, 2009
Description:
This file provides a python interface to low-level system call on the Linux platform.
It is designed to abstract away the C-level detail and provide a high-level method of doing
common management tasks.
"""
import ctypes # Allows us to make C calls
import ctypes.util # Helps to find the C library
import os # Provides some convenience functions
import time # Provides time.time
import freebsd_kinfo # Imports the kinfo structure, along with others
import nix_common_api as nix_api # Import the Common API
import textops # Import the seattlelib textops library
import portable_popen # Import for Popen
# Manually import the common functions we want
exists_outgoing_network_socket = nix_api.exists_outgoing_network_socket
exists_listening_network_socket = nix_api.exists_listening_network_socket
get_available_interfaces = nix_api.get_available_interfaces
get_ctypes_errno = nix_api.get_ctypes_errno
get_ctypes_error_str = nix_api.get_ctypes_error_str
# Get the standard library
libc = nix_api.libc
# Globals
# Cache the last process info struct so as to avoid redundant memory allocation
# and to fetch additional info without constantly updating
last_proc_info_struct = None
last_proc_info_size = 0 # Stores the size of the struct
# Functions
_sysctl = libc.sysctl # Makes system calls
_clock_gettime = libc.clock_gettime # Get the CPU time
# Constants
CTL_KERN = 1
KERN_PROC = 14
KERN_PROC_PID = 1
FourIntegers = ctypes.c_int * 4 # A C array with 4 ints, used for syscalls
PAGE_SIZE = libc.getpagesize() # Call into libc to get our page size
KERN_BOOTTIME = 21
TwoIntegers = ctypes.c_int * 2 # C array with 2 ints
CLOCK_THREAD_CPUTIME_ID = 14 # Get the CPU time for the current thread
# Structures
kinfo_proc = freebsd_kinfo.kinfo_proc # Import from the external file
class timeval(ctypes.Structure):
_fields_ = [("tv_sec", ctypes.c_long),
("tv_usec", ctypes.c_long)]
def _get_proc_info_by_pid(pid):
"""
<Purpose>
Immediately updates the internal kinfo_proc structure.
<Arguments>
pid: The Process Identifier for which data should be retrieved
<Exceptions>
Raises an Exception if there is an error.
<Returns>
Nothing
"""
global last_proc_info_struct
global last_proc_info_size
# Create the argument array
mib = FourIntegers(CTL_KERN, KERN_PROC, KERN_PROC_PID, pid)
# Check if we need to allocate a structure
if last_proc_info_struct == None:
# Allocate a kinfo structure
last_proc_info_struct = kinfo_proc(0)
last_proc_info_size = ctypes.c_int(0)
# Make a system call without a pointer to the kinfo structure, this sets
# ths proper size of the structure for future system calls
status = _sysctl(mib, 4, None, ctypes.byref(last_proc_info_size), None, 0)
# Check the status
if status != 0:
raise Exception,"Fatal error with sysctl. Errno:"+str(get_ctypes_errno())+", Error: "+get_ctypes_error_str()
# Make the call to update
status = _sysctl(mib, 4, ctypes.byref(last_proc_info_struct), ctypes.byref(last_proc_info_size), None, 0)
# Check the status
if status != 0:
raise Exception,"Fatal error with sysctl. Errno:"+str(get_ctypes_errno())+", Error: "+get_ctypes_error_str()
def get_process_cpu_time(pid):
"""
<Purpose>
Returns the total CPU time used by a process.
<Arguments>
pid: The process identifier for the process to query.
<Exceptions>
See _get_proc_info_by_pid.
<Returns>
The total cpu time.
"""
global last_proc_info_struct
# Update the info
_get_proc_info_by_pid(pid)
# Get the rusage field in the structure
ru = last_proc_info_struct.ki_rusage
# Calculate user time and system, for the process and its children,
# divide by 1 million since the usec field is in microseconds
utime = ru.ru_utime.tv_sec + ru.ru_utime.tv_usec/1000000.0
stime = ru.ru_stime.tv_sec + ru.ru_stime.tv_usec/1000000.0
# Switch ru to the child structure
ru = last_proc_info_struct.ki_rusage_ch
utime_ch = ru.ru_utime.tv_sec + ru.ru_utime.tv_usec/1000000.0
stime_ch = ru.ru_stime.tv_sec + ru.ru_stime.tv_usec/1000000.0
# Calculate the total time
total_time = utime + stime + utime_ch + stime_ch
return total_time
def get_process_rss(force_update=False, pid=None):
"""
<Purpose>
Returns the Resident Set Size of a process. By default, this will
return the information cached by the last call to _get_proc_info_by_pid.
This call is used in get_process_cpu_time.
<Arguments>
force_update:
Allows the caller to force a data update, instead of using the cached data.
pid:
If force_update is True, this parameter must be specified to force the update.
<Exceptions>
See _get_proc_info_by_pid.
<Returns>
The RSS of the process in bytes.
"""
global last_proc_info_struct
# Check if an update is being forced
if force_update and pid != None:
# Update the info
_get_proc_info_by_pid(pid)
# Get RSS
rss_pages = last_proc_info_struct.ki_rssize
rss_bytes = rss_pages * PAGE_SIZE
return rss_bytes
# Get the CPU time of the current thread
def get_current_thread_cpu_time():
"""
<Purpose>
Gets the total CPU time for the currently executing thread.
<Exceptions>
An AssertionError will be raised if the underlying system call fails.
<Returns>
A floating amount of time in seconds.
"""
# Allocate a structure
time_struct = timeval()
# Make the system call
result = _clock_gettime(CLOCK_THREAD_CPUTIME_ID, ctypes.byref(time_struct))
# Sum up the CPU usage
cpu_time = time_struct.tv_sec + time_struct.tv_usec / 1000000000.0
# Safety check, result should be 0
# Do the safety check after we free the memory to avoid leaks
assert(result == 0)
# Return the structure
return cpu_time
# Return the timeval struct with our boottime
def _get_boottime_struct():
# Get an array with 2 elements, set the syscall parameters
mib = TwoIntegers(CTL_KERN, KERN_BOOTTIME)
# Get timeval structure, set the size
boottime = timeval()
size = ctypes.c_size_t(ctypes.sizeof(boottime))
# Make the syscall
libc.sysctl(mib, 2, ctypes.pointer(boottime), ctypes.pointer(size), None, 0)
return boottime
def get_system_uptime():
"""
<Purpose>
Returns the system uptime.
<Returns>
The system uptime.
"""
# Get the boot time struct
boottime = _get_boottime_struct()
# Calculate uptime from current time
uptime = time.time() - boottime.tv_sec+boottime.tv_usec*1.0e-6
return uptime
def get_uptime_granularity():
"""
<Purpose>
Determines the granularity of the get_system_uptime call.
<Returns>
A numerical representation of the minimum granularity.
E.g. 2 digits of granularity would return 0.01
"""
# Get the boot time struct
boottime = _get_boottime_struct()
# Check if the number of nano seconds is 0
if boottime.tv_usec == 0:
granularity = 0
else:
# Convert nanoseconds to string
nanosecondstr = str(boottime.tv_usec)
# Justify with 0's to 9 digits
nanosecondstr = nanosecondstr.rjust(9,"0")
# Strip the 0's on the other side
nanosecondstr = nanosecondstr.rstrip("0")
# Get granularity from the length of the string
granularity = len(nanosecondstr)
# Convert granularity to a number
return pow(10, 0-granularity)
def get_system_thread_count():
"""
<Purpose>
Returns the number of active threads running on the system.
<Returns>
The thread count.
"""
# Use PS since it is can get the info for us
process = portable_popen.Popen(["ps", "axH"])
ps_output, _ = process.communicate()
# Subtract 1 from the number of lines because the first line is a a table
# header: " PID TTY STAT TIME COMMAND"
threads = len(textops.textops_rawtexttolines(ps_output)) - 1
return threads
def get_interface_ip_addresses(interfaceName):
"""
<Purpose>
Returns the IP address associated with the interface.
<Arguments>
interfaceName: The string name of the interface, e.g. eth0
<Returns>
A list of IP addresses associated with the interface.
"""
# Launch up a shell, get the feed back
# We use ifconfig with the interface name.
ifconfig_process = portable_popen.Popen(["/sbin/ifconfig", interfaceName.strip()])
ifconfig_output, _ = ifconfig_process.communicate()
ifconfig_lines = textops.textops_rawtexttolines(ifconfig_output)
# Look for ipv4 addresses
target_lines = textops.textops_grep("inet", ifconfig_lines)
# and not ipv6
target_lines = textops.textops_grep("inet6", target_lines, exclude=True)
# Only take the ip(s)
target_lines = textops.textops_cut(target_lines, delimiter=" ", fields=[1])
# Create an array for the ip's
ipaddressList = []
for line in target_lines:
# Strip the newline and any spacing
line = line.strip("\n\t ")
ipaddressList.append(line)
# Done, return the interfaces
return ipaddressList
|
kayzhou/tornado
|
refs/heads/master
|
maint/test/websocket/client.py
|
111
|
#!/usr/bin/env python
import logging
from tornado import gen
from tornado.ioloop import IOLoop
from tornado.options import define, options, parse_command_line
from tornado.websocket import websocket_connect
define('url', default='ws://localhost:9001')
define('name', default='Tornado')
@gen.engine
def run_tests():
url = options.url + '/getCaseCount'
control_ws = yield websocket_connect(url, None)
num_tests = int((yield control_ws.read_message()))
logging.info('running %d cases', num_tests)
msg = yield control_ws.read_message()
assert msg is None
for i in range(1, num_tests + 1):
logging.info('running test case %d', i)
url = options.url + '/runCase?case=%d&agent=%s' % (i, options.name)
test_ws = yield websocket_connect(url, None, compression_options={})
while True:
message = yield test_ws.read_message()
if message is None:
break
test_ws.write_message(message, binary=isinstance(message, bytes))
url = options.url + '/updateReports?agent=%s' % options.name
update_ws = yield websocket_connect(url, None)
msg = yield update_ws.read_message()
assert msg is None
IOLoop.instance().stop()
def main():
parse_command_line()
IOLoop.instance().add_callback(run_tests)
IOLoop.instance().start()
if __name__ == '__main__':
main()
|
PythonNut/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/pywebsocket/src/example/hsts_wsh.py
|
486
|
# Copyright 2013, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def web_socket_do_extra_handshake(request):
request.extra_headers.append(
('Strict-Transport-Security', 'max-age=86400'))
def web_socket_transfer_data(request):
request.ws_stream.send_message('Hello', binary=False)
# vi:sts=4 sw=4 et
|
kouaw/CouchPotatoServer
|
refs/heads/develop
|
libs/pyasn1/type/tag.py
|
200
|
# ASN.1 types tags
from operator import getitem
from pyasn1 import error
tagClassUniversal = 0x00
tagClassApplication = 0x40
tagClassContext = 0x80
tagClassPrivate = 0xC0
tagFormatSimple = 0x00
tagFormatConstructed = 0x20
tagCategoryImplicit = 0x01
tagCategoryExplicit = 0x02
tagCategoryUntagged = 0x04
class Tag:
def __init__(self, tagClass, tagFormat, tagId):
if tagId < 0:
raise error.PyAsn1Error(
'Negative tag ID (%s) not allowed' % (tagId,)
)
self.__tag = (tagClass, tagFormat, tagId)
self.uniq = (tagClass, tagId)
self.__hashedUniqTag = hash(self.uniq)
def __repr__(self):
return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % (
(self.__class__.__name__,) + self.__tag
)
# These is really a hotspot -- expose public "uniq" attribute to save on
# function calls
def __eq__(self, other): return self.uniq == other.uniq
def __ne__(self, other): return self.uniq != other.uniq
def __lt__(self, other): return self.uniq < other.uniq
def __le__(self, other): return self.uniq <= other.uniq
def __gt__(self, other): return self.uniq > other.uniq
def __ge__(self, other): return self.uniq >= other.uniq
def __hash__(self): return self.__hashedUniqTag
def __getitem__(self, idx): return self.__tag[idx]
def __and__(self, otherTag):
(tagClass, tagFormat, tagId) = otherTag
return self.__class__(
self.__tag&tagClass, self.__tag&tagFormat, self.__tag&tagId
)
def __or__(self, otherTag):
(tagClass, tagFormat, tagId) = otherTag
return self.__class__(
self.__tag[0]|tagClass,
self.__tag[1]|tagFormat,
self.__tag[2]|tagId
)
def asTuple(self): return self.__tag # __getitem__() is slow
class TagSet:
def __init__(self, baseTag=(), *superTags):
self.__baseTag = baseTag
self.__superTags = superTags
self.__hashedSuperTags = hash(superTags)
_uniq = ()
for t in superTags:
_uniq = _uniq + t.uniq
self.uniq = _uniq
self.__lenOfSuperTags = len(superTags)
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
', '.join([repr(x) for x in self.__superTags])
)
def __add__(self, superTag):
return self.__class__(
self.__baseTag, *self.__superTags + (superTag,)
)
def __radd__(self, superTag):
return self.__class__(
self.__baseTag, *(superTag,) + self.__superTags
)
def tagExplicitly(self, superTag):
tagClass, tagFormat, tagId = superTag
if tagClass == tagClassUniversal:
raise error.PyAsn1Error(
'Can\'t tag with UNIVERSAL-class tag'
)
if tagFormat != tagFormatConstructed:
superTag = Tag(tagClass, tagFormatConstructed, tagId)
return self + superTag
def tagImplicitly(self, superTag):
tagClass, tagFormat, tagId = superTag
if self.__superTags:
superTag = Tag(tagClass, self.__superTags[-1][1], tagId)
return self[:-1] + superTag
def getBaseTag(self): return self.__baseTag
def __getitem__(self, idx):
if isinstance(idx, slice):
return self.__class__(
self.__baseTag, *getitem(self.__superTags, idx)
)
return self.__superTags[idx]
def __eq__(self, other): return self.uniq == other.uniq
def __ne__(self, other): return self.uniq != other.uniq
def __lt__(self, other): return self.uniq < other.uniq
def __le__(self, other): return self.uniq <= other.uniq
def __gt__(self, other): return self.uniq > other.uniq
def __ge__(self, other): return self.uniq >= other.uniq
def __hash__(self): return self.__hashedSuperTags
def __len__(self): return self.__lenOfSuperTags
def isSuperTagSetOf(self, tagSet):
if len(tagSet) < self.__lenOfSuperTags:
return
idx = self.__lenOfSuperTags - 1
while idx >= 0:
if self.__superTags[idx] != tagSet[idx]:
return
idx = idx - 1
return 1
def initTagSet(tag): return TagSet(tag, tag)
|
alexgorban/models
|
refs/heads/master
|
research/deep_speech/data/dataset.py
|
2
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generate tf.data.Dataset object for deep speech training/evaluation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import random
# pylint: disable=g-bad-import-order
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import soundfile
import tensorflow as tf
# pylint: enable=g-bad-import-order
import data.featurizer as featurizer # pylint: disable=g-bad-import-order
class AudioConfig(object):
"""Configs for spectrogram extraction from audio."""
def __init__(self,
sample_rate,
window_ms,
stride_ms,
normalize=False):
"""Initialize the AudioConfig class.
Args:
sample_rate: an integer denoting the sample rate of the input waveform.
window_ms: an integer for the length of a spectrogram frame, in ms.
stride_ms: an integer for the frame stride, in ms.
normalize: a boolean for whether apply normalization on the audio feature.
"""
self.sample_rate = sample_rate
self.window_ms = window_ms
self.stride_ms = stride_ms
self.normalize = normalize
class DatasetConfig(object):
"""Config class for generating the DeepSpeechDataset."""
def __init__(self, audio_config, data_path, vocab_file_path, sortagrad):
"""Initialize the configs for deep speech dataset.
Args:
audio_config: AudioConfig object specifying the audio-related configs.
data_path: a string denoting the full path of a manifest file.
vocab_file_path: a string specifying the vocabulary file path.
sortagrad: a boolean, if set to true, audio sequences will be fed by
increasing length in the first training epoch, which will
expedite network convergence.
Raises:
RuntimeError: file path not exist.
"""
self.audio_config = audio_config
assert tf.gfile.Exists(data_path)
assert tf.gfile.Exists(vocab_file_path)
self.data_path = data_path
self.vocab_file_path = vocab_file_path
self.sortagrad = sortagrad
def _normalize_audio_feature(audio_feature):
"""Perform mean and variance normalization on the spectrogram feature.
Args:
audio_feature: a numpy array for the spectrogram feature.
Returns:
a numpy array of the normalized spectrogram.
"""
mean = np.mean(audio_feature, axis=0)
var = np.var(audio_feature, axis=0)
normalized = (audio_feature - mean) / (np.sqrt(var) + 1e-6)
return normalized
def _preprocess_audio(audio_file_path, audio_featurizer, normalize):
"""Load the audio file and compute spectrogram feature."""
data, _ = soundfile.read(audio_file_path)
feature = featurizer.compute_spectrogram_feature(
data, audio_featurizer.sample_rate, audio_featurizer.stride_ms,
audio_featurizer.window_ms)
# Feature normalization
if normalize:
feature = _normalize_audio_feature(feature)
# Adding Channel dimension for conv2D input.
feature = np.expand_dims(feature, axis=2)
return feature
def _preprocess_data(file_path):
"""Generate a list of tuples (wav_filename, wav_filesize, transcript).
Each dataset file contains three columns: "wav_filename", "wav_filesize",
and "transcript". This function parses the csv file and stores each example
by the increasing order of audio length (indicated by wav_filesize).
AS the waveforms are ordered in increasing length, audio samples in a
mini-batch have similar length.
Args:
file_path: a string specifying the csv file path for a dataset.
Returns:
A list of tuples (wav_filename, wav_filesize, transcript) sorted by
file_size.
"""
tf.logging.info("Loading data set {}".format(file_path))
with tf.gfile.Open(file_path, "r") as f:
lines = f.read().splitlines()
# Skip the csv header in lines[0].
lines = lines[1:]
# The metadata file is tab separated.
lines = [line.split("\t", 2) for line in lines]
# Sort input data by the length of audio sequence.
lines.sort(key=lambda item: int(item[1]))
return [tuple(line) for line in lines]
class DeepSpeechDataset(object):
"""Dataset class for training/evaluation of DeepSpeech model."""
def __init__(self, dataset_config):
"""Initialize the DeepSpeechDataset class.
Args:
dataset_config: DatasetConfig object.
"""
self.config = dataset_config
# Instantiate audio feature extractor.
self.audio_featurizer = featurizer.AudioFeaturizer(
sample_rate=self.config.audio_config.sample_rate,
window_ms=self.config.audio_config.window_ms,
stride_ms=self.config.audio_config.stride_ms)
# Instantiate text feature extractor.
self.text_featurizer = featurizer.TextFeaturizer(
vocab_file=self.config.vocab_file_path)
self.speech_labels = self.text_featurizer.speech_labels
self.entries = _preprocess_data(self.config.data_path)
# The generated spectrogram will have 161 feature bins.
self.num_feature_bins = 161
def batch_wise_dataset_shuffle(entries, epoch_index, sortagrad, batch_size):
"""Batch-wise shuffling of the data entries.
Each data entry is in the format of (audio_file, file_size, transcript).
If epoch_index is 0 and sortagrad is true, we don't perform shuffling and
return entries in sorted file_size order. Otherwise, do batch_wise shuffling.
Args:
entries: a list of data entries.
epoch_index: an integer of epoch index
sortagrad: a boolean to control whether sorting the audio in the first
training epoch.
batch_size: an integer for the batch size.
Returns:
The shuffled data entries.
"""
shuffled_entries = []
if epoch_index == 0 and sortagrad:
# No need to shuffle.
shuffled_entries = entries
else:
# Shuffle entries batch-wise.
max_buckets = int(math.floor(len(entries) / batch_size))
total_buckets = [i for i in xrange(max_buckets)]
random.shuffle(total_buckets)
shuffled_entries = []
for i in total_buckets:
shuffled_entries.extend(entries[i * batch_size : (i + 1) * batch_size])
# If the last batch doesn't contain enough batch_size examples,
# just append it to the shuffled_entries.
shuffled_entries.extend(entries[max_buckets * batch_size:])
return shuffled_entries
def input_fn(batch_size, deep_speech_dataset, repeat=1):
"""Input function for model training and evaluation.
Args:
batch_size: an integer denoting the size of a batch.
deep_speech_dataset: DeepSpeechDataset object.
repeat: an integer for how many times to repeat the dataset.
Returns:
a tf.data.Dataset object for model to consume.
"""
# Dataset properties
data_entries = deep_speech_dataset.entries
num_feature_bins = deep_speech_dataset.num_feature_bins
audio_featurizer = deep_speech_dataset.audio_featurizer
feature_normalize = deep_speech_dataset.config.audio_config.normalize
text_featurizer = deep_speech_dataset.text_featurizer
def _gen_data():
"""Dataset generator function."""
for audio_file, _, transcript in data_entries:
features = _preprocess_audio(
audio_file, audio_featurizer, feature_normalize)
labels = featurizer.compute_label_feature(
transcript, text_featurizer.token_to_index)
input_length = [features.shape[0]]
label_length = [len(labels)]
# Yield a tuple of (features, labels) where features is a dict containing
# all info about the actual data features.
yield (
{
"features": features,
"input_length": input_length,
"label_length": label_length
},
labels)
dataset = tf.data.Dataset.from_generator(
_gen_data,
output_types=(
{
"features": tf.float32,
"input_length": tf.int32,
"label_length": tf.int32
},
tf.int32),
output_shapes=(
{
"features": tf.TensorShape([None, num_feature_bins, 1]),
"input_length": tf.TensorShape([1]),
"label_length": tf.TensorShape([1])
},
tf.TensorShape([None]))
)
# Repeat and batch the dataset
dataset = dataset.repeat(repeat)
# Padding the features to its max length dimensions.
dataset = dataset.padded_batch(
batch_size=batch_size,
padded_shapes=(
{
"features": tf.TensorShape([None, num_feature_bins, 1]),
"input_length": tf.TensorShape([1]),
"label_length": tf.TensorShape([1])
},
tf.TensorShape([None]))
)
# Prefetch to improve speed of input pipeline.
dataset = dataset.prefetch(buffer_size=tf.data.experimental.AUTOTUNE)
return dataset
|
dwrpayne/zulip
|
refs/heads/master
|
zilencer/tests.py
|
126
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import ujson
from django.test import TestCase
class EndpointDiscoveryTest(TestCase):
def test_staging_user(self):
response = self.client.get("/api/v1/deployments/endpoints", {"email": "lfaraone@zulip.com"})
data = ujson.loads(response.content)
self.assertEqual(data["result"]["base_site_url"], "https://zulip.com/")
self.assertEqual(data["result"]["base_api_url"], "https://zulip.com/api/")
def test_prod_user(self):
response = self.client.get("/api/v1/deployments/endpoints", {"email": "lfaraone@mit.edu"})
data = ujson.loads(response.content)
self.assertEqual(data["result"]["base_site_url"], "https://zulip.com/")
self.assertEqual(data["result"]["base_api_url"], "https://api.zulip.com/")
|
jswope00/griffinx
|
refs/heads/master
|
lms/djangoapps/notes/utils.py
|
88
|
from django.conf import settings
def notes_enabled_for_course(course):
'''
Returns True if the notes app is enabled for the course, False otherwise.
In order for the app to be enabled it must be:
1) enabled globally via FEATURES.
2) present in the course tab configuration.
'''
tab_found = next((True for t in course.tabs if t['type'] == 'notes'), False)
feature_enabled = settings.FEATURES.get('ENABLE_STUDENT_NOTES')
return feature_enabled and tab_found
|
vietch2612/phantomjs
|
refs/heads/master
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/style/main.py
|
177
|
# Copyright (C) 2010 Chris Jerdonek (cjerdonek@webkit.org)
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import codecs
import logging
import sys
import webkitpy.style.checker as checker
from webkitpy.style.patchreader import PatchReader
from webkitpy.style.checker import StyleProcessor
from webkitpy.style.filereader import TextFileReader
from webkitpy.common.host import Host
_log = logging.getLogger(__name__)
def change_directory(filesystem, checkout_root, paths):
"""Change the working directory to the WebKit checkout root, if possible.
If every path in the paths parameter is below the checkout root (or if
the paths parameter is empty or None), this method changes the current
working directory to the checkout root and converts the paths parameter
as described below.
This allows the paths being checked to be displayed relative to the
checkout root, and for path-specific style checks to work as expected.
Path-specific checks include whether files should be skipped, whether
custom style rules should apply to certain files, etc.
Returns:
paths: A copy of the paths parameter -- possibly converted, as follows.
If this method changed the current working directory to the
checkout root, then the list is the paths parameter converted to
normalized paths relative to the checkout root.
Args:
paths: A list of paths to the files that should be checked for style.
This argument can be None or the empty list if a git commit
or all changes under the checkout root should be checked.
checkout_root: The path to the root of the WebKit checkout.
"""
if paths is not None:
paths = list(paths)
if paths:
# Then try converting all of the paths to paths relative to
# the checkout root.
rel_paths = []
for path in paths:
rel_path = filesystem.relpath(path, checkout_root)
if rel_path.startswith(filesystem.pardir):
# Then the path is not below the checkout root. Since all
# paths should be interpreted relative to the same root,
# do not interpret any of the paths as relative to the
# checkout root. Interpret all of them relative to the
# current working directory, and do not change the current
# working directory.
_log.warn(
"""Path-dependent style checks may not work correctly:
One of the given paths is outside the WebKit checkout of the current
working directory:
Path: %s
Checkout root: %s
Pass only files below the checkout root to ensure correct results.
See the help documentation for more info.
"""
% (path, checkout_root))
return paths
rel_paths.append(rel_path)
# If we got here, the conversion was successful.
paths = rel_paths
_log.debug("Changing to checkout root: " + checkout_root)
filesystem.chdir(checkout_root)
return paths
class CheckWebKitStyle(object):
def _engage_awesome_stderr_hacks(self):
# Change stderr to write with replacement characters so we don't die
# if we try to print something containing non-ASCII characters.
stderr = codecs.StreamReaderWriter(sys.stderr,
codecs.getreader('utf8'),
codecs.getwriter('utf8'),
'replace')
# Setting an "encoding" attribute on the stream is necessary to
# prevent the logging module from raising an error. See
# the checker.configure_logging() function for more information.
stderr.encoding = "UTF-8"
# FIXME: Change webkitpy.style so that we do not need to overwrite
# the global sys.stderr. This involves updating the code to
# accept a stream parameter where necessary, and not calling
# sys.stderr explicitly anywhere.
sys.stderr = stderr
return stderr
def main(self):
args = sys.argv[1:]
host = Host()
host.initialize_scm()
stderr = self._engage_awesome_stderr_hacks()
# Checking for the verbose flag before calling check_webkit_style_parser()
# lets us enable verbose logging earlier.
is_verbose = "-v" in args or "--verbose" in args
checker.configure_logging(stream=stderr, is_verbose=is_verbose)
_log.debug("Verbose logging enabled.")
parser = checker.check_webkit_style_parser()
(paths, options) = parser.parse(args)
configuration = checker.check_webkit_style_configuration(options)
paths = change_directory(host.filesystem, checkout_root=host.scm().checkout_root, paths=paths)
style_processor = StyleProcessor(configuration)
file_reader = TextFileReader(host.filesystem, style_processor)
if paths and not options.diff_files:
file_reader.process_paths(paths)
else:
changed_files = paths if options.diff_files else None
patch = host.scm().create_patch(options.git_commit, changed_files=changed_files)
patch_checker = PatchReader(file_reader)
patch_checker.check(patch)
error_count = style_processor.error_count
file_count = file_reader.file_count
delete_only_file_count = file_reader.delete_only_file_count
_log.info("Total errors found: %d in %d files" % (error_count, file_count))
# We fail when style errors are found or there are no checked files.
return error_count > 0 or (file_count == 0 and delete_only_file_count == 0)
|
Dannnno/flask-pymongo
|
refs/heads/master
|
tests/test_config.py
|
9
|
from tests import util
import flask
import flask.ext.pymongo
import warnings
class CustomDict(dict):
pass
class FlaskPyMongoConfigTest(util.FlaskRequestTest):
def setUp(self):
self.app = flask.Flask('test')
self.context = self.app.test_request_context('/')
self.context.push()
def tearDown(self):
self.context.pop()
def test_default_config_prefix(self):
self.app.config['MONGO_DBNAME'] = 'flask_pymongo_test_db'
self.app.config['MONGO_HOST'] = 'localhost'
self.app.config['MONGO_PORT'] = 27017
mongo = flask.ext.pymongo.PyMongo(self.app)
assert mongo.db.name == 'flask_pymongo_test_db', 'wrong dbname: %s' % mongo.db.name
assert mongo.cx.host == 'localhost'
assert mongo.cx.port == 27017
def test_custom_config_prefix(self):
self.app.config['CUSTOM_DBNAME'] = 'flask_pymongo_test_db'
self.app.config['CUSTOM_HOST'] = 'localhost'
self.app.config['CUSTOM_PORT'] = 27017
mongo = flask.ext.pymongo.PyMongo(self.app, 'CUSTOM')
assert mongo.db.name == 'flask_pymongo_test_db', 'wrong dbname: %s' % mongo.db.name
assert mongo.cx.host == 'localhost'
assert mongo.cx.port == 27017
def test_converts_str_to_int(self):
self.app.config['MONGO_DBNAME'] = 'flask_pymongo_test_db'
self.app.config['MONGO_HOST'] = 'localhost'
self.app.config['MONGO_PORT'] = '27017'
mongo = flask.ext.pymongo.PyMongo(self.app)
assert mongo.db.name == 'flask_pymongo_test_db', 'wrong dbname: %s' % mongo.db.name
assert mongo.cx.host == 'localhost'
assert mongo.cx.port == 27017
def test_rejects_invalid_string(self):
self.app.config['MONGO_PORT'] = '27017x'
self.assertRaises(TypeError, flask.ext.pymongo.PyMongo, self.app)
def test_multiple_pymongos(self):
for prefix in ('ONE', 'TWO'):
self.app.config['%s_DBNAME' % prefix] = prefix
for prefix in ('ONE', 'TWO'):
flask.ext.pymongo.PyMongo(self.app, config_prefix=prefix)
# this test passes if it raises no exceptions
def test_config_with_uri(self):
self.app.config['MONGO_URI'] = 'mongodb://localhost:27017/flask_pymongo_test_db'
with warnings.catch_warnings():
# URI connections without a username and password
# work, but warn that auth should be supplied
warnings.simplefilter('ignore')
mongo = flask.ext.pymongo.PyMongo(self.app)
assert mongo.db.name == 'flask_pymongo_test_db', 'wrong dbname: %s' % mongo.db.name
assert mongo.cx.host == 'localhost'
assert mongo.cx.port == 27017
def test_config_with_uri_no_port(self):
self.app.config['MONGO_URI'] = 'mongodb://localhost/flask_pymongo_test_db'
with warnings.catch_warnings():
# URI connections without a username and password
# work, but warn that auth should be supplied
warnings.simplefilter('ignore')
mongo = flask.ext.pymongo.PyMongo(self.app)
assert mongo.db.name == 'flask_pymongo_test_db', 'wrong dbname: %s' % mongo.db.name
assert mongo.cx.host == 'localhost'
assert mongo.cx.port == 27017
def test_config_with_document_class(self):
self.app.config['MONGO_DOCUMENT_CLASS'] = CustomDict
mongo = flask.ext.pymongo.PyMongo(self.app)
assert mongo.cx.document_class == CustomDict
def test_config_without_document_class(self):
mongo = flask.ext.pymongo.PyMongo(self.app)
assert mongo.cx.document_class == dict
def test_host_with_port_does_not_get_overridden_by_separate_port_config_value(self):
self.app.config['MONGO_HOST'] = 'localhost:27017'
self.app.config['MONGO_PORT'] = 27018
with warnings.catch_warnings():
# URI connections without a username and password
# work, but warn that auth should be supplied
warnings.simplefilter('ignore')
mongo = flask.ext.pymongo.PyMongo(self.app)
assert mongo.cx.host == 'localhost'
assert mongo.cx.port == 27017
def test_uri_prioritised_over_host_and_port(self):
self.app.config['MONGO_URI'] = 'mongodb://localhost:27017/database_name'
self.app.config['MONGO_HOST'] = 'some_other_host'
self.app.config['MONGO_PORT'] = 27018
self.app.config['MONGO_DBNAME'] = 'not_the_correct_db_name'
with warnings.catch_warnings():
# URI connections without a username and password
# work, but warn that auth should be supplied
warnings.simplefilter('ignore')
mongo = flask.ext.pymongo.PyMongo(self.app)
assert mongo.cx.host == 'localhost'
assert mongo.cx.port == 27017
assert mongo.db.name == 'database_name'
def test_uri_without_database_errors_sensibly(self):
self.app.config['MONGO_URI'] = 'mongodb://localhost:27017/'
self.assertRaises(ValueError, flask.ext.pymongo.PyMongo, self.app)
class CustomDocumentClassTest(util.FlaskPyMongoTest):
""" Class that tests reading from DB with custom document_class """
def test_create_with_document_class(self):
""" This test doesn't use self.mongo, because it has to change config
It uses second mongo connection, using a CUSTOM prefix to avoid
duplicate config_prefix exception. To make use of tearDown and thus DB
deletion even in case of failure, it uses same DBNAME.
"""
# copying standard DBNAME, so this DB gets also deleted by tearDown
self.app.config['CUSTOM_DBNAME'] = self.app.config['MONGO_DBNAME']
self.app.config['CUSTOM_DOCUMENT_CLASS'] = CustomDict
# not using self.mongo, because we want to use updated config
# also using CUSTOM, to avoid duplicate config_prefix exception
mongo = flask.ext.pymongo.PyMongo(self.app, 'CUSTOM')
assert mongo.db.things.find_one() == None
# write document and retrieve, to check if type is really CustomDict
mongo.db.things.insert({'_id': 'thing', 'val': 'foo'}, safe=True)
assert type(mongo.db.things.find_one()) == CustomDict
def test_create_without_document_class(self):
""" This uses self.mongo, which uses config without document_class """
assert self.mongo.db.things.find_one() == None
# write document and retrieve, to check if type is dict (default)
self.mongo.db.things.insert({'_id': 'thing', 'val': 'foo'}, safe=True)
assert type(self.mongo.db.things.find_one()) == dict
|
switchkiller/Python-and-Algorithms-and-Data-Structures
|
refs/heads/master
|
src/USEFUL/advanced/lru_cache.py
|
2
|
#!/usr/bin/env python3
__author__ = "bt3"
from functools import lru_cache
@lru_cache(maxsize=20)
def fib(n):
if n < 2:
return n
return fib(n-1) + fib(n-2)
if __name__ == '__main__':
print([fib(n) for n in range(10)])
print(fib.cache_info())
|
JFriel/honours_project
|
refs/heads/master
|
logistic-regression.py
|
1
|
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import app.parser.getData as importArticles
import app.parser.articleRetrieval.getArticles as getContent
import app.parser.sentences as sent
import app.parser.getChunks as gc
import app.analytics.tag as tag
import app.parser.articleRetrieval.wikipediaParse as wp
import app.analytics.features as fe
from sklearn import tree, feature_extraction, svm, linear_model
from sklearn.feature_extraction.text import CountVectorizer
from multiprocessing import Pool
import numpy as np
import datetime
import app.analytics.filterSentences as fl
import networkx as nx
import matplotlib.pyplot as plt
G=nx.DiGraph()
np.seterr(divide='ignore',invalid='ignore')
trainData = eval(open('trainDoubleSet','r').readlines()[0])
testData = open('testDoubleSet','r').readlines()
listOfYears = []
clf = linear_model.LogisticRegression()#svm.SVC(probability=True)
probs = []
titles = []
def train(features):
features = [item for item in features if len(item[0]) != 0]
feats = [item[0] for item in features]
A = len(features)
B = min(map(len,feats))
X = np.ones((A,B))
Y = np.ones((A))
for feature in range(len(features)):
Y[feature] = features[feature][2]#label
for item in range(0,B):
X[feature][item] = features[feature][0][item]
clf.fit(X,Y)
return B
def test(features,B):
correct = 0
probs = []
features = [item for item in features if (len(item[0]) == B)]
for feature in features:
temp = np.array(feature[0][0:B]).reshape((1, -1))
predict = clf.predict(temp[0][0:B])
#prob = max(clf.predict_proba(temp)[0])
probs.append([predict, feature[2]])
if(feature[2] == predict[0]):
correct +=1
print "Accuracy = " + str(correct) + '/' + str(len(features))
print datetime.datetime.now()
p = Pool(20)
#Used to get Article Content
#articles = (p.map(getArticle,trainData))
trainFeatures = []
for I in range(len(trainData[0])):
if trainData[I] is not None:
trainFeatures.append(trainData[I])
B = train(trainFeatures)
print datetime.datetime.now()
#train(generateDataPoints(trainArticles))
print "Training Complere. Now For Testing"
testData = eval(testData[0])
testFeatures = []
for I in range(len(testData)):
if testData[I] is not None:
testFeatures.append(testData[I])
test(testFeatures,B)
|
ConZ27/ansible-modules-core
|
refs/heads/devel
|
cloud/openstack/quantum_router_gateway.py
|
99
|
#!/usr/bin/python
#coding: utf-8 -*-
# (c) 2013, Benno Joy <benno@ansible.com>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
try:
from neutronclient.neutron import client
except ImportError:
from quantumclient.quantum import client
from keystoneclient.v2_0 import client as ksclient
HAVE_DEPS = True
except ImportError:
HAVE_DEPS = False
DOCUMENTATION = '''
---
module: quantum_router_gateway
version_added: "1.2"
author: "Benno Joy (@bennojoy)"
short_description: set/unset a gateway interface for the router with the specified external network
description:
- Creates/Removes a gateway interface from the router, used to associate a external network with a router to route external traffic.
options:
login_username:
description:
- login username to authenticate to keystone
required: true
default: admin
login_password:
description:
- Password of login user
required: true
default: 'yes'
login_tenant_name:
description:
- The tenant name of the login user
required: true
default: 'yes'
auth_url:
description:
- The keystone URL for authentication
required: false
default: 'http://127.0.0.1:35357/v2.0/'
region_name:
description:
- Name of the region
required: false
default: None
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
router_name:
description:
- Name of the router to which the gateway should be attached.
required: true
default: None
network_name:
description:
- Name of the external network which should be attached to the router.
required: true
default: None
requirements:
- "python >= 2.6"
- "python-neutronclient or python-quantumclient"
- "python-keystoneclient"
'''
EXAMPLES = '''
# Attach an external network with a router to allow flow of external traffic
- quantum_router_gateway: state=present login_username=admin login_password=admin
login_tenant_name=admin router_name=external_router
network_name=external_network
'''
_os_keystone = None
def _get_ksclient(module, kwargs):
try:
kclient = ksclient.Client(username=kwargs.get('login_username'),
password=kwargs.get('login_password'),
tenant_name=kwargs.get('login_tenant_name'),
auth_url=kwargs.get('auth_url'))
except Exception, e:
module.fail_json(msg = "Error authenticating to the keystone: %s " % e.message)
global _os_keystone
_os_keystone = kclient
return kclient
def _get_endpoint(module, ksclient):
try:
endpoint = ksclient.service_catalog.url_for(service_type='network', endpoint_type='publicURL')
except Exception, e:
module.fail_json(msg = "Error getting network endpoint: %s" % e.message)
return endpoint
def _get_neutron_client(module, kwargs):
_ksclient = _get_ksclient(module, kwargs)
token = _ksclient.auth_token
endpoint = _get_endpoint(module, _ksclient)
kwargs = {
'token': token,
'endpoint_url': endpoint
}
try:
neutron = client.Client('2.0', **kwargs)
except Exception, e:
module.fail_json(msg = "Error in connecting to neutron: %s " % e.message)
return neutron
def _get_router_id(module, neutron):
kwargs = {
'name': module.params['router_name'],
}
try:
routers = neutron.list_routers(**kwargs)
except Exception, e:
module.fail_json(msg = "Error in getting the router list: %s " % e.message)
if not routers['routers']:
return None
return routers['routers'][0]['id']
def _get_net_id(neutron, module):
kwargs = {
'name': module.params['network_name'],
'router:external': True
}
try:
networks = neutron.list_networks(**kwargs)
except Exception, e:
module.fail_json("Error in listing neutron networks: %s" % e.message)
if not networks['networks']:
return None
return networks['networks'][0]['id']
def _get_port_id(neutron, module, router_id, network_id):
kwargs = {
'device_id': router_id,
'network_id': network_id,
}
try:
ports = neutron.list_ports(**kwargs)
except Exception, e:
module.fail_json( msg = "Error in listing ports: %s" % e.message)
if not ports['ports']:
return None
return ports['ports'][0]['id']
def _add_gateway_router(neutron, module, router_id, network_id):
kwargs = {
'network_id': network_id
}
try:
neutron.add_gateway_router(router_id, kwargs)
except Exception, e:
module.fail_json(msg = "Error in adding gateway to router: %s" % e.message)
return True
def _remove_gateway_router(neutron, module, router_id):
try:
neutron.remove_gateway_router(router_id)
except Exception, e:
module.fail_json(msg = "Error in removing gateway to router: %s" % e.message)
return True
def main():
argument_spec = openstack_argument_spec()
argument_spec.update(dict(
router_name = dict(required=True),
network_name = dict(required=True),
state = dict(default='present', choices=['absent', 'present']),
))
module = AnsibleModule(argument_spec=argument_spec)
if not HAVE_DEPS:
module.fail_json(msg='python-keystoneclient and either python-neutronclient or python-quantumclient are required')
neutron = _get_neutron_client(module, module.params)
router_id = _get_router_id(module, neutron)
if not router_id:
module.fail_json(msg="failed to get the router id, please check the router name")
network_id = _get_net_id(neutron, module)
if not network_id:
module.fail_json(msg="failed to get the network id, please check the network name and make sure it is external")
if module.params['state'] == 'present':
port_id = _get_port_id(neutron, module, router_id, network_id)
if not port_id:
_add_gateway_router(neutron, module, router_id, network_id)
module.exit_json(changed=True, result="created")
module.exit_json(changed=False, result="success")
if module.params['state'] == 'absent':
port_id = _get_port_id(neutron, module, router_id, network_id)
if not port_id:
module.exit_json(changed=False, result="Success")
_remove_gateway_router(neutron, module, router_id)
module.exit_json(changed=True, result="Deleted")
# this is magic, see lib/ansible/module.params['common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
|
raychorn/knowu
|
refs/heads/master
|
django/djangononrelsample2/django/contrib/localflavor/za/__init__.py
|
12133432
| |
eduNEXT/edx-platform
|
refs/heads/master
|
openedx/features/course_experience/api/v1/__init__.py
|
12133432
| |
vincepandolfo/django
|
refs/heads/master
|
django/contrib/gis/geos/io.py
|
588
|
"""
Module that holds classes for performing I/O operations on GEOS geometry
objects. Specifically, this has Python implementations of WKB/WKT
reader and writer classes.
"""
from django.contrib.gis.geos.geometry import GEOSGeometry
from django.contrib.gis.geos.prototypes.io import (
WKBWriter, WKTWriter, _WKBReader, _WKTReader,
)
__all__ = ['WKBWriter', 'WKTWriter', 'WKBReader', 'WKTReader']
# Public classes for (WKB|WKT)Reader, which return GEOSGeometry
class WKBReader(_WKBReader):
def read(self, wkb):
"Returns a GEOSGeometry for the given WKB buffer."
return GEOSGeometry(super(WKBReader, self).read(wkb))
class WKTReader(_WKTReader):
def read(self, wkt):
"Returns a GEOSGeometry for the given WKT string."
return GEOSGeometry(super(WKTReader, self).read(wkt))
|
kxliugang/edx-platform
|
refs/heads/master
|
common/djangoapps/track/migrations/0002_auto__add_field_trackinglog_host__chg_field_trackinglog_event_type__ch.py
|
189
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'TrackingLog.host'
db.add_column('track_trackinglog', 'host',
self.gf('django.db.models.fields.CharField')(default='', max_length=64, blank=True),
keep_default=False)
# Changing field 'TrackingLog.event_type'
db.alter_column('track_trackinglog', 'event_type', self.gf('django.db.models.fields.CharField')(max_length=512))
# Changing field 'TrackingLog.page'
db.alter_column('track_trackinglog', 'page', self.gf('django.db.models.fields.CharField')(max_length=512, null=True))
def backwards(self, orm):
# Deleting field 'TrackingLog.host'
db.delete_column('track_trackinglog', 'host')
# Changing field 'TrackingLog.event_type'
db.alter_column('track_trackinglog', 'event_type', self.gf('django.db.models.fields.CharField')(max_length=32))
# Changing field 'TrackingLog.page'
db.alter_column('track_trackinglog', 'page', self.gf('django.db.models.fields.CharField')(max_length=32, null=True))
models = {
'track.trackinglog': {
'Meta': {'object_name': 'TrackingLog'},
'agent': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'dtcreated': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'event_source': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'event_type': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'page': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'})
}
}
complete_apps = ['track']
|
svn2github/gyp
|
refs/heads/master
|
pylib/gyp/MSVSProject.py
|
2736
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Visual Studio project reader/writer."""
import gyp.common
import gyp.easy_xml as easy_xml
#------------------------------------------------------------------------------
class Tool(object):
"""Visual Studio tool."""
def __init__(self, name, attrs=None):
"""Initializes the tool.
Args:
name: Tool name.
attrs: Dict of tool attributes; may be None.
"""
self._attrs = attrs or {}
self._attrs['Name'] = name
def _GetSpecification(self):
"""Creates an element for the tool.
Returns:
A new xml.dom.Element for the tool.
"""
return ['Tool', self._attrs]
class Filter(object):
"""Visual Studio filter - that is, a virtual folder."""
def __init__(self, name, contents=None):
"""Initializes the folder.
Args:
name: Filter (folder) name.
contents: List of filenames and/or Filter objects contained.
"""
self.name = name
self.contents = list(contents or [])
#------------------------------------------------------------------------------
class Writer(object):
"""Visual Studio XML project writer."""
def __init__(self, project_path, version, name, guid=None, platforms=None):
"""Initializes the project.
Args:
project_path: Path to the project file.
version: Format version to emit.
name: Name of the project.
guid: GUID to use for project, if not None.
platforms: Array of string, the supported platforms. If null, ['Win32']
"""
self.project_path = project_path
self.version = version
self.name = name
self.guid = guid
# Default to Win32 for platforms.
if not platforms:
platforms = ['Win32']
# Initialize the specifications of the various sections.
self.platform_section = ['Platforms']
for platform in platforms:
self.platform_section.append(['Platform', {'Name': platform}])
self.tool_files_section = ['ToolFiles']
self.configurations_section = ['Configurations']
self.files_section = ['Files']
# Keep a dict keyed on filename to speed up access.
self.files_dict = dict()
def AddToolFile(self, path):
"""Adds a tool file to the project.
Args:
path: Relative path from project to tool file.
"""
self.tool_files_section.append(['ToolFile', {'RelativePath': path}])
def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
"""Returns the specification for a configuration.
Args:
config_type: Type of configuration node.
config_name: Configuration name.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
Returns:
"""
# Handle defaults
if not attrs:
attrs = {}
if not tools:
tools = []
# Add configuration node and its attributes
node_attrs = attrs.copy()
node_attrs['Name'] = config_name
specification = [config_type, node_attrs]
# Add tool nodes and their attributes
if tools:
for t in tools:
if isinstance(t, Tool):
specification.append(t._GetSpecification())
else:
specification.append(Tool(t)._GetSpecification())
return specification
def AddConfig(self, name, attrs=None, tools=None):
"""Adds a configuration to the project.
Args:
name: Configuration name.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
"""
spec = self._GetSpecForConfiguration('Configuration', name, attrs, tools)
self.configurations_section.append(spec)
def _AddFilesToNode(self, parent, files):
"""Adds files and/or filters to the parent node.
Args:
parent: Destination node
files: A list of Filter objects and/or relative paths to files.
Will call itself recursively, if the files list contains Filter objects.
"""
for f in files:
if isinstance(f, Filter):
node = ['Filter', {'Name': f.name}]
self._AddFilesToNode(node, f.contents)
else:
node = ['File', {'RelativePath': f}]
self.files_dict[f] = node
parent.append(node)
def AddFiles(self, files):
"""Adds files to the project.
Args:
files: A list of Filter objects and/or relative paths to files.
This makes a copy of the file/filter tree at the time of this call. If you
later add files to a Filter object which was passed into a previous call
to AddFiles(), it will not be reflected in this project.
"""
self._AddFilesToNode(self.files_section, files)
# TODO(rspangler) This also doesn't handle adding files to an existing
# filter. That is, it doesn't merge the trees.
def AddFileConfig(self, path, config, attrs=None, tools=None):
"""Adds a configuration to a file.
Args:
path: Relative path to the file.
config: Name of configuration to add.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
Raises:
ValueError: Relative path does not match any file added via AddFiles().
"""
# Find the file node with the right relative path
parent = self.files_dict.get(path)
if not parent:
raise ValueError('AddFileConfig: file "%s" not in project.' % path)
# Add the config to the file node
spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs,
tools)
parent.append(spec)
def WriteIfChanged(self):
"""Writes the project file."""
# First create XML content definition
content = [
'VisualStudioProject',
{'ProjectType': 'Visual C++',
'Version': self.version.ProjectVersion(),
'Name': self.name,
'ProjectGUID': self.guid,
'RootNamespace': self.name,
'Keyword': 'Win32Proj'
},
self.platform_section,
self.tool_files_section,
self.configurations_section,
['References'], # empty section
self.files_section,
['Globals'] # empty section
]
easy_xml.WriteXmlIfChanged(content, self.project_path,
encoding="Windows-1252")
|
davidbuzz/ardupilot
|
refs/heads/master
|
Tools/LogAnalyzer/tests/TestNaN.py
|
34
|
from LogAnalyzer import Test,TestResult
import math
class TestNaN(Test):
'''test for NaNs present in log'''
def __init__(self):
Test.__init__(self)
self.name = "NaNs"
def run(self, logdata, verbose):
self.result = TestResult()
self.result.status = TestResult.StatusType.GOOD
def FAIL():
self.result.status = TestResult.StatusType.FAIL
nans_ok = {
"CTUN": [ "DSAlt", "TAlt" ],
"POS": [ "RelOriginAlt"],
}
for channel in logdata.channels.keys():
for field in logdata.channels[channel].keys():
if channel in nans_ok and field in nans_ok[channel]:
continue
try:
for tupe in logdata.channels[channel][field].listData:
(ts, val) = tupe
if isinstance(val, float) and math.isnan(val):
FAIL()
self.result.statusMessage += "Found NaN in %s.%s\n" % (channel, field,)
raise ValueError()
except ValueError as e:
continue
|
campenberger/boto
|
refs/heads/develop
|
tests/unit/cloudsearch/test_connection.py
|
114
|
#!/usr/bin env python
from tests.unit import AWSMockServiceTestCase
from boto.cloudsearch.domain import Domain
from boto.cloudsearch.layer1 import Layer1
class TestCloudSearchCreateDomain(AWSMockServiceTestCase):
connection_class = Layer1
def default_body(self):
return b"""
<CreateDomainResponse xmlns="http://cloudsearch.amazonaws.com/doc/2011-02-01">
<CreateDomainResult>
<DomainStatus>
<SearchPartitionCount>0</SearchPartitionCount>
<SearchService>
<Arn>arn:aws:cs:us-east-1:1234567890:search/demo</Arn>
<Endpoint>search-demo-userdomain.us-east-1.cloudsearch.amazonaws.com</Endpoint>
</SearchService>
<NumSearchableDocs>0</NumSearchableDocs>
<Created>true</Created>
<DomainId>1234567890/demo</DomainId>
<Processing>false</Processing>
<SearchInstanceCount>0</SearchInstanceCount>
<DomainName>demo</DomainName>
<RequiresIndexDocuments>false</RequiresIndexDocuments>
<Deleted>false</Deleted>
<DocService>
<Arn>arn:aws:cs:us-east-1:1234567890:doc/demo</Arn>
<Endpoint>doc-demo-userdomain.us-east-1.cloudsearch.amazonaws.com</Endpoint>
</DocService>
</DomainStatus>
</CreateDomainResult>
<ResponseMetadata>
<RequestId>00000000-0000-0000-0000-000000000000</RequestId>
</ResponseMetadata>
</CreateDomainResponse>
"""
def test_create_domain(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_domain('demo')
self.assert_request_parameters({
'Action': 'CreateDomain',
'DomainName': 'demo',
'Version': '2011-02-01',
})
def test_cloudsearch_connect_result_endpoints(self):
"""Check that endpoints & ARNs are correctly returned from AWS"""
self.set_http_response(status_code=200)
api_response = self.service_connection.create_domain('demo')
domain = Domain(self, api_response)
self.assertEqual(domain.doc_service_arn,
"arn:aws:cs:us-east-1:1234567890:doc/demo")
self.assertEqual(
domain.doc_service_endpoint,
"doc-demo-userdomain.us-east-1.cloudsearch.amazonaws.com")
self.assertEqual(domain.search_service_arn,
"arn:aws:cs:us-east-1:1234567890:search/demo")
self.assertEqual(
domain.search_service_endpoint,
"search-demo-userdomain.us-east-1.cloudsearch.amazonaws.com")
def test_cloudsearch_connect_result_statuses(self):
"""Check that domain statuses are correctly returned from AWS"""
self.set_http_response(status_code=200)
api_response = self.service_connection.create_domain('demo')
domain = Domain(self, api_response)
self.assertEqual(domain.created, True)
self.assertEqual(domain.processing, False)
self.assertEqual(domain.requires_index_documents, False)
self.assertEqual(domain.deleted, False)
def test_cloudsearch_connect_result_details(self):
"""Check that the domain information is correctly returned from AWS"""
self.set_http_response(status_code=200)
api_response = self.service_connection.create_domain('demo')
domain = Domain(self, api_response)
self.assertEqual(domain.id, "1234567890/demo")
self.assertEqual(domain.name, "demo")
def test_cloudsearch_documentservice_creation(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_domain('demo')
domain = Domain(self, api_response)
document = domain.get_document_service()
self.assertEqual(
document.endpoint,
"doc-demo-userdomain.us-east-1.cloudsearch.amazonaws.com")
def test_cloudsearch_searchservice_creation(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_domain('demo')
domain = Domain(self, api_response)
search = domain.get_search_service()
self.assertEqual(
search.endpoint,
"search-demo-userdomain.us-east-1.cloudsearch.amazonaws.com")
class CloudSearchConnectionDeletionTest(AWSMockServiceTestCase):
connection_class = Layer1
def default_body(self):
return b"""
<DeleteDomainResponse xmlns="http://cloudsearch.amazonaws.com/doc/2011-02-01">
<DeleteDomainResult>
<DomainStatus>
<SearchPartitionCount>0</SearchPartitionCount>
<SearchService>
<Arn>arn:aws:cs:us-east-1:1234567890:search/demo</Arn>
<Endpoint>search-demo-userdomain.us-east-1.cloudsearch.amazonaws.com</Endpoint>
</SearchService>
<NumSearchableDocs>0</NumSearchableDocs>
<Created>true</Created>
<DomainId>1234567890/demo</DomainId>
<Processing>false</Processing>
<SearchInstanceCount>0</SearchInstanceCount>
<DomainName>demo</DomainName>
<RequiresIndexDocuments>false</RequiresIndexDocuments>
<Deleted>false</Deleted>
<DocService>
<Arn>arn:aws:cs:us-east-1:1234567890:doc/demo</Arn>
<Endpoint>doc-demo-userdomain.us-east-1.cloudsearch.amazonaws.com</Endpoint>
</DocService>
</DomainStatus>
</DeleteDomainResult>
<ResponseMetadata>
<RequestId>00000000-0000-0000-0000-000000000000</RequestId>
</ResponseMetadata>
</DeleteDomainResponse>
"""
def test_cloudsearch_deletion(self):
"""
Check that the correct arguments are sent to AWS when creating a
cloudsearch connection.
"""
self.set_http_response(status_code=200)
api_response = self.service_connection.delete_domain('demo')
self.assert_request_parameters({
'Action': 'DeleteDomain',
'DomainName': 'demo',
'Version': '2011-02-01',
})
class CloudSearchConnectionIndexDocumentTest(AWSMockServiceTestCase):
connection_class = Layer1
def default_body(self):
return b"""
<IndexDocumentsResponse xmlns="http://cloudsearch.amazonaws.com/doc/2011-02-01">
<IndexDocumentsResult>
<FieldNames>
<member>average_score</member>
<member>brand_id</member>
<member>colors</member>
<member>context</member>
<member>context_owner</member>
<member>created_at</member>
<member>creator_id</member>
<member>description</member>
<member>file_size</member>
<member>format</member>
<member>has_logo</member>
<member>has_messaging</member>
<member>height</member>
<member>image_id</member>
<member>ingested_from</member>
<member>is_advertising</member>
<member>is_photo</member>
<member>is_reviewed</member>
<member>modified_at</member>
<member>subject_date</member>
<member>tags</member>
<member>title</member>
<member>width</member>
</FieldNames>
</IndexDocumentsResult>
<ResponseMetadata>
<RequestId>eb2b2390-6bbd-11e2-ab66-93f3a90dcf2a</RequestId>
</ResponseMetadata>
</IndexDocumentsResponse>
"""
def test_cloudsearch_index_documents(self):
"""
Check that the correct arguments are sent to AWS when indexing a
domain.
"""
self.set_http_response(status_code=200)
api_response = self.service_connection.index_documents('demo')
self.assert_request_parameters({
'Action': 'IndexDocuments',
'DomainName': 'demo',
'Version': '2011-02-01',
})
def test_cloudsearch_index_documents_resp(self):
"""
Check that the AWS response is being parsed correctly when indexing a
domain.
"""
self.set_http_response(status_code=200)
api_response = self.service_connection.index_documents('demo')
self.assertEqual(api_response, ['average_score', 'brand_id', 'colors',
'context', 'context_owner',
'created_at', 'creator_id',
'description', 'file_size', 'format',
'has_logo', 'has_messaging', 'height',
'image_id', 'ingested_from',
'is_advertising', 'is_photo',
'is_reviewed', 'modified_at',
'subject_date', 'tags', 'title',
'width'])
|
atzorvas/droughtmeteo
|
refs/heads/master
|
languages/default.py
|
180
|
# coding: utf8
{
'!langcode!': 'en-us',
'!langname!': 'English (US)',
'%s %%(shop)': '%s %%(shop)',
'%s %%(shop[0])': '%s %%(shop[0])',
'%s %%{quark[0]}': '%s %%{quark[0]}',
'%s %%{shop[0]}': '%s %%{shop[0]}',
'%s %%{shop}': '%s %%{shop}',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'@markmin\x01**Hello World**': '**Hello World**',
'About': 'About',
'Access Control': 'Access Control',
'Administrative Interface': 'Administrative Interface',
'Ajax Recipes': 'Ajax Recipes',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Buy this book': 'Buy this book',
'Cannot be empty': 'Cannot be empty',
'Check to delete': 'Check to delete',
'Client IP': 'Client IP',
'Community': 'Community',
'Components and Plugins': 'Components and Plugins',
'Controller': 'Controller',
'Copyright': 'Copyright',
'Created By': 'Created By',
'Created On': 'Created On',
'customize me!': 'customize me!',
'Database': 'Database',
'DB Model': 'DB Model',
'Demo': 'Demo',
'Deployment Recipes': 'Deployment Recipes',
'Description': 'Description',
'Documentation': 'Documentation',
"Don't know what to do?": "Don't know what to do?",
'Download': 'Download',
'E-mail': 'E-mail',
'Email and SMS': 'Email and SMS',
'enter an integer between %(min)g and %(max)g': 'enter an integer between %(min)g and %(max)g',
'enter date and time as %(format)s': 'enter date and time as %(format)s',
'Errors': 'Errors',
'FAQ': 'FAQ',
'First name': 'First name',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Free Applications',
'Group %(group_id)s created': 'Group %(group_id)s created',
'Group ID': 'Group ID',
'Group uniquely assigned to user %(id)s': 'Group uniquely assigned to user %(id)s',
'Groups': 'Groups',
'Hello World': 'Hello World',
'Hello World ## comment': 'Hello World ',
'Hello World## comment': 'Hello World',
'Home': 'Home',
'How did you get here?': 'How did you get here?',
'Introduction': 'Introduction',
'Invalid email': 'Invalid email',
'Is Active': 'Is Active',
'Last name': 'Last name',
'Layout': 'Layout',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'Live Chat': 'Live Chat',
'Logged in': 'Logged in',
'Logged out': 'Logged out',
'Login': 'Login',
'Logout': 'Logout',
'Lost Password': 'Lost Password',
'Lost password?': 'Lost password?',
'Menu Model': 'Menu Model',
'Modified By': 'Modified By',
'Modified On': 'Modified On',
'My Sites': 'My Sites',
'Name': 'Name',
'Object or table name': 'Object or table name',
'Online examples': 'Online examples',
'Origin': 'Origin',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Other Recipes',
'Overview': 'Overview',
'Password': 'Password',
"Password fields don't match": "Password fields don't match",
'please input your password again': 'please input your password again',
'Plugins': 'Plugins',
'Powered by': 'Powered by',
'Preface': 'Preface',
'Profile': 'Profile',
'Python': 'Python',
'Quick Examples': 'Quick Examples',
'Recipes': 'Recipes',
'Record ID': 'Record ID',
'Register': 'Register',
'Registration identifier': 'Registration identifier',
'Registration key': 'Registration key',
'Registration successful': 'Registration successful',
'Remember me (for 30 days)': 'Remember me (for 30 days)',
'Reset Password key': 'Reset Password key',
'Role': 'Role',
'Semantic': 'Semantic',
'Services': 'Services',
'Stylesheet': 'Stylesheet',
'Support': 'Support',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s',
'The Views': 'The Views',
'This App': 'This App',
'Timestamp': 'Timestamp',
'Twitter': 'Twitter',
'User %(id)s Logged-in': 'User %(id)s Logged-in',
'User %(id)s Logged-out': 'User %(id)s Logged-out',
'User %(id)s Registered': 'User %(id)s Registered',
'User ID': 'User ID',
'value already in database or empty': 'value already in database or empty',
'Verify Password': 'Verify Password',
'Videos': 'Videos',
'View': 'View',
'Welcome': 'Welcome',
'Welcome to web2py!': 'Welcome to web2py!',
'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s',
'You are successfully running web2py': 'You are successfully running web2py',
'You can modify this application and adapt it to your needs': 'You can modify this application and adapt it to your needs',
'You visited the url %s': 'You visited the url %s',
}
|
chokribr/inveniotest
|
refs/heads/master
|
modules/bibformat/lib/elements/bfe_date.py
|
25
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints imprint publication date
"""
__revision__ = "$Id$"
import time
def format_element(bfo, date_format='%d %B %Y'):
"""
Prints the imprint publication date. If <code>format</code> is specified,
Parameter <code>date_format</code> allows to specify the string representation of the output.
The format string has the same behaviour as the strftime() function::
<pre>Eg: 1982-09-24 07:32:00
"%d %B %Y" -> 24 September 1982
"%I:%M" -> 07:32
</pre>
@see: pagination.py, publisher.py, reprints.py, imprint.py, place.py
@param date_format date format
"""
date = bfo.field('260__c')
if date_format != '':
try:
date_time = time.strptime(date, "%Y-%m-%d")
return time.strftime(date_format, date_time)
except ValueError:
return date
else:
return date
|
rwightman/tensorflow-litterbox
|
refs/heads/master
|
litterbox/models/google/nets/overfeat_test.py
|
6
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for slim.nets.overfeat."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from nets import overfeat
slim = tf.contrib.slim
class OverFeatTest(tf.test.TestCase):
def testBuild(self):
batch_size = 5
height, width = 231, 231
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = overfeat.overfeat(inputs, num_classes)
self.assertEquals(logits.op.name, 'overfeat/fc8/squeezed')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
def testFullyConvolutional(self):
batch_size = 1
height, width = 281, 281
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = overfeat.overfeat(inputs, num_classes, spatial_squeeze=False)
self.assertEquals(logits.op.name, 'overfeat/fc8/BiasAdd')
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, 2, 2, num_classes])
def testEndPoints(self):
batch_size = 5
height, width = 231, 231
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
_, end_points = overfeat.overfeat(inputs, num_classes)
expected_names = ['overfeat/conv1',
'overfeat/pool1',
'overfeat/conv2',
'overfeat/pool2',
'overfeat/conv3',
'overfeat/conv4',
'overfeat/conv5',
'overfeat/pool5',
'overfeat/fc6',
'overfeat/fc7',
'overfeat/fc8'
]
self.assertSetEqual(set(end_points.keys()), set(expected_names))
def testModelVariables(self):
batch_size = 5
height, width = 231, 231
num_classes = 1000
with self.test_session():
inputs = tf.random_uniform((batch_size, height, width, 3))
overfeat.overfeat(inputs, num_classes)
expected_names = ['overfeat/conv1/weights',
'overfeat/conv1/biases',
'overfeat/conv2/weights',
'overfeat/conv2/biases',
'overfeat/conv3/weights',
'overfeat/conv3/biases',
'overfeat/conv4/weights',
'overfeat/conv4/biases',
'overfeat/conv5/weights',
'overfeat/conv5/biases',
'overfeat/fc6/weights',
'overfeat/fc6/biases',
'overfeat/fc7/weights',
'overfeat/fc7/biases',
'overfeat/fc8/weights',
'overfeat/fc8/biases',
]
model_variables = [v.op.name for v in slim.get_model_variables()]
self.assertSetEqual(set(model_variables), set(expected_names))
def testEvaluation(self):
batch_size = 2
height, width = 231, 231
num_classes = 1000
with self.test_session():
eval_inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = overfeat.overfeat(eval_inputs, is_training=False)
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
predictions = tf.argmax(logits, 1)
self.assertListEqual(predictions.get_shape().as_list(), [batch_size])
def testTrainEvalWithReuse(self):
train_batch_size = 2
eval_batch_size = 1
train_height, train_width = 231, 231
eval_height, eval_width = 281, 281
num_classes = 1000
with self.test_session():
train_inputs = tf.random_uniform(
(train_batch_size, train_height, train_width, 3))
logits, _ = overfeat.overfeat(train_inputs)
self.assertListEqual(logits.get_shape().as_list(),
[train_batch_size, num_classes])
tf.get_variable_scope().reuse_variables()
eval_inputs = tf.random_uniform(
(eval_batch_size, eval_height, eval_width, 3))
logits, _ = overfeat.overfeat(eval_inputs, is_training=False,
spatial_squeeze=False)
self.assertListEqual(logits.get_shape().as_list(),
[eval_batch_size, 2, 2, num_classes])
logits = tf.reduce_mean(logits, [1, 2])
predictions = tf.argmax(logits, 1)
self.assertEquals(predictions.get_shape().as_list(), [eval_batch_size])
def testForward(self):
batch_size = 1
height, width = 231, 231
with self.test_session() as sess:
inputs = tf.random_uniform((batch_size, height, width, 3))
logits, _ = overfeat.overfeat(inputs)
sess.run(tf.initialize_all_variables())
output = sess.run(logits)
self.assertTrue(output.any())
if __name__ == '__main__':
tf.test.main()
|
beacloudgenius/edx-platform
|
refs/heads/master
|
lms/djangoapps/courseware/features/staff_debug_info.py
|
173
|
"""
Steps for staff_debug_info.feature lettuce tests
"""
from django.contrib.auth.models import User
from lettuce import world, step
from common import create_course, course_id
from courseware.courses import get_course_by_id
from instructor.access import allow_access
@step(u'i am staff member for the course "([^"]*)"$')
def i_am_staff_member_for_the_course(step, course_number):
# Create the course
create_course(step, course_number)
course = get_course_by_id(course_id(course_number))
# Create the user
world.create_user('robot', 'test')
user = User.objects.get(username='robot')
# Add user as a course staff.
allow_access(course, user, "staff")
world.log_in(username='robot', password='test')
@step(u'I can view staff debug info')
def view_staff_debug_info(step):
css_selector = "a.instructor-info-action"
world.css_click(css_selector)
world.wait_for_visible("section.staff-modal")
@step(u'I can reset student attempts')
def view_staff_debug_info(step):
css_selector = "a.staff-debug-reset"
world.css_click(css_selector)
world.wait_for_ajax_complete()
@step(u'I cannot see delete student state link')
def view_staff_debug_info(step):
css_selector = "a.staff-debug-sdelete"
world.is_css_not_present(css_selector)
@step(u'I cannot see rescore student submission link')
def view_staff_debug_info(step):
css_selector = "a.staff-debug-rescore"
world.is_css_not_present(css_selector)
|
txomon/vdsm
|
refs/heads/master
|
tests/functional/firewall.py
|
1
|
#
# Copyright 2013-2014 Red Hat, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import logging
from nose.plugins.skip import SkipTest
from vdsm.utils import CommandPath
from vdsm.utils import execCmd
_FIREWALLD_BINARY = CommandPath('firewall-cmd', '/bin/firewall-cmd')
_IPTABLES_BINARY = CommandPath('iptables', '/sbin/iptables')
_SERVICE_BINARY = CommandPath('service', '/sbin/service')
class FirewallError(Exception):
pass
def allowDhcp(veth):
"""Allows DHCP traffic on a testing veth interface.
When using the iptables service, no other traffic is allowed.
With firewalld, the whole interface is moved to the 'trusted',
unrestricted zone.
"""
try:
if _serviceRunning('iptables'):
_execCmdChecker([_IPTABLES_BINARY.cmd, '-I', 'INPUT', '-i',
veth, '-p', 'udp', '--sport', '68', '--dport',
'67', '-j', 'ACCEPT']) # DHCPv4
_execCmdChecker([_IPTABLES_BINARY.cmd, '-I', 'INPUT', '-i',
veth, '-p', 'udp', '--sport', '546', '--dport',
'547', '-j', 'ACCEPT']) # DHCPv6
elif _serviceRunning('firewalld'):
_execCmdChecker([_FIREWALLD_BINARY.cmd, '--zone=trusted',
'--change-interface=' + veth])
else:
logging.info('No firewall service detected.')
except FirewallError as e:
raise SkipTest('Failed to allow DHCP traffic in firewall: %s' % e)
def stopAllowingDhcp(veth):
"""Removes the rules allowing DHCP on the testing veth interface.
As the interface is expected to be removed from the system,
this function merely reverses the effect of the 'allowDhcp' function
just to clean up.
For iptables, it deletes the rule introduced. For firewalld, it removes
the interface from the 'trusted' zone.
If cleaning up fails the affected test must fail too (with FirewallError).
"""
if _serviceRunning('iptables'):
_execCmdChecker([_IPTABLES_BINARY.cmd, '-D', 'INPUT', '-i',
veth, '-p', 'udp', '--sport', '68', '--dport',
'67', '-j', 'ACCEPT']) # DHCPv4
_execCmdChecker([_IPTABLES_BINARY.cmd, '-D', 'INPUT', '-i',
veth, '-p', 'udp', '--sport', '546', '--dport',
'547', '-j', 'ACCEPT']) # DHCPv6
elif _serviceRunning('firewalld'):
_execCmdChecker([_FIREWALLD_BINARY.cmd, '--zone=trusted',
'--remove-interface=' + veth])
else:
logging.warning('No firewall service detected.')
def _serviceRunning(name):
ret, _, _ = execCmd([_SERVICE_BINARY.cmd, name, 'status'])
# return code 0 means service is running
return not ret
def _execCmdChecker(command):
ret, out, err = execCmd(command)
if ret:
raise FirewallError('Command {0} failed with {1}; {2}'.format(
command, out, err))
|
tumbl3w33d/ansible
|
refs/heads/devel
|
lib/ansible/modules/utilities/logic/include_tasks.py
|
45
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'
}
DOCUMENTATION = r'''
---
author: Ansible Core Team (@ansible)
module: include_tasks
short_description: Dynamically include a task list
description:
- Includes a file with a list of tasks to be executed in the current playbook.
version_added: '2.4'
options:
file:
description:
- The name of the imported file is specified directly without any other option.
- Unlike M(import_tasks), most keywords, including loop, with_items, and conditionals, apply to this statement.
- The do until loop is not supported on M(include_tasks).
type: str
version_added: '2.7'
apply:
description:
- Accepts a hash of task keywords (e.g. C(tags), C(become)) that will be applied to the tasks within the include.
type: str
version_added: '2.7'
free-form:
description:
- |
Supplying a file name via free-form C(- include_tasks: file.yml) of a file to be included is the equivalent
of specifying an argument of I(file).
notes:
- This is a core feature of the Ansible, rather than a module, and cannot be overridden like a module.
seealso:
- module: import_playbook
- module: import_role
- module: import_tasks
- module: include_role
- ref: playbooks_reuse_includes
description: More information related to including and importing playbooks, roles and tasks.
'''
EXAMPLES = r'''
- hosts: all
tasks:
- debug:
msg: task1
- name: Include task list in play
include_tasks: stuff.yaml
- debug:
msg: task10
- hosts: all
tasks:
- debug:
msg: task1
- name: Include task list in play only if the condition is true
include_tasks: "{{ hostvar }}.yaml"
when: hostvar is defined
- name: Apply tags to tasks within included file
include_tasks:
file: install.yml
apply:
tags:
- install
tags:
- always
- name: Apply tags to tasks within included file when using free-form
include_tasks: install.yml
args:
apply:
tags:
- install
tags:
- always
'''
RETURN = r'''
# This module does not return anything except tasks to execute.
'''
|
ceramos/micropython
|
refs/heads/master
|
tests/basics/int_big_mod.py
|
55
|
# test % operation on big integers
delta = 100000000000000000000000000000012345
for i in range(11):
for j in range(11):
x = delta * (i)# - 5) # TODO reinstate negative number test when % is working with sign correctly
y = delta * (j)# - 5) # TODO reinstate negative number test when % is working with sign correctly
if y != 0:
print(x % y)
|
yangming85/lettuce
|
refs/heads/master
|
tests/integration/django/chive/manage.py
|
2072
|
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
|
toooooper/oppia
|
refs/heads/master
|
integrations/gcb_oppia_tag_20141119_v0.0.1/coursebuilder/modules/oppia_tag/oppia_tag.py
|
102
|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for implementing question tags."""
__author__ = 'sll@google.com (Sean Lip)'
import os
import jinja2
from common import jinja_utils
from common import schema_fields
from common import tags
from controllers import lessons
from models import custom_modules
from models import progress
RESOURCES_PATH = '/modules/oppia_tag/resources'
OPPIA_TAG_BINDING_NAME = 'oppia-tag'
EXPLORATION_COMPLETED_EVENT_NAME = 'tag-oppia-exploration-completed'
class OppiaTag(tags.BaseTag):
"""Custom tag for an Oppia embedder."""
binding_name = OPPIA_TAG_BINDING_NAME
def get_icon_url(self):
return os.path.join(RESOURCES_PATH, 'oppia.png')
@classmethod
def name(cls):
return 'Oppia exploration'
@classmethod
def vendor(cls):
return 'oppia'
def render(self, node, handler):
instanceid = node.attrib.get('instanceid')
template_values = {
'RESOURCES_PATH': RESOURCES_PATH,
'exploration_id': node.attrib.get('exploration_id'),
'instanceid': instanceid,
'src': node.attrib.get('src'),
}
cpt_progress = None
if (hasattr(handler, 'student') and not handler.student.is_transient
and not handler.lesson_is_scored):
cpt_progress = handler.get_course().get_progress_tracker(
).get_component_progress(
handler.student, handler.unit_id, handler.lesson_id,
instanceid)
template_values['progress'] = cpt_progress
template = jinja_utils.get_template(
'templates/oppia_template.html', [os.path.dirname(__file__)])
html_string = jinja2.utils.Markup(template.render(template_values))
return tags.html_string_to_element_tree(html_string)
def get_schema(self, unused_handler):
reg = schema_fields.FieldRegistry(OppiaTag.name())
reg.add_property(
schema_fields.SchemaField(
'src', 'URL of the Oppia server', 'string', optional=False,
description=(
'Provide the full URL of the Oppia server\'s domain, '
'e.g. \'https://www.oppia.org\'')))
reg.add_property(
schema_fields.SchemaField(
'exploration_id', 'Exploration ID', 'string', optional=False,
description=('The Oppia exploration id.')))
return reg
custom_module = None
def register_module():
"""Registers this module in the registry."""
def when_module_enabled():
# Register custom tags.
tags.Registry.add_tag_binding(
OppiaTag.binding_name, OppiaTag)
tags.EditorBlacklists.register(
OppiaTag.binding_name, tags.EditorBlacklists.COURSE_SCOPE)
# Allow Oppia tag events to be recorded and to count towards progress.
if (EXPLORATION_COMPLETED_EVENT_NAME not in
lessons.TAGS_THAT_TRIGGER_COMPONENT_COMPLETION):
lessons.TAGS_THAT_TRIGGER_COMPONENT_COMPLETION.append(
EXPLORATION_COMPLETED_EVENT_NAME)
if OPPIA_TAG_BINDING_NAME not in progress.TRACKABLE_COMPONENTS:
progress.TRACKABLE_COMPONENTS.append(OPPIA_TAG_BINDING_NAME)
def when_module_disabled():
# Unregister custom tags.
tags.Registry.remove_tag_binding(OppiaTag.binding_name)
tags.EditorBlacklists.unregister(
OppiaTag.binding_name, tags.EditorBlacklists.COURSE_SCOPE)
# Stop recording any Oppia tag events.
if (EXPLORATION_COMPLETED_EVENT_NAME in
lessons.TAGS_THAT_TRIGGER_COMPONENT_COMPLETION):
lessons.TAGS_THAT_TRIGGER_COMPONENT_COMPLETION.remove(
EXPLORATION_COMPLETED_EVENT_NAME)
if OPPIA_TAG_BINDING_NAME in progress.TRACKABLE_COMPONENTS:
progress.TRACKABLE_COMPONENTS.remove(OPPIA_TAG_BINDING_NAME)
# Add a static handler for icons shown in the rich text editor.
global_routes = [(
os.path.join(RESOURCES_PATH, '.*'), tags.ResourcesHandler)]
global custom_module
custom_module = custom_modules.Module(
'Oppia tag',
'A tag for rendering Oppia explorations within a lesson body.',
global_routes,
[],
notify_module_enabled=when_module_enabled,
notify_module_disabled=when_module_disabled)
return custom_module
|
CSCI-462-01-2017/bedrock
|
refs/heads/master
|
tests/functional/firefox/__init__.py
|
12133432
| |
KentaYamada/Siphon
|
refs/heads/master
|
app/tests/controller/__init__.py
|
12133432
| |
wenhuizhang/neutron
|
refs/heads/master
|
neutron/tests/unit/ipam/__init__.py
|
12133432
| |
TedaLIEz/sentry
|
refs/heads/master
|
tests/sentry/tasks/process_buffer/__init__.py
|
12133432
| |
carlosp420/VoSeq
|
refs/heads/master
|
voseq/blast_local_full/__init__.py
|
12133432
| |
foobarbazblarg/stayclean
|
refs/heads/master
|
stayclean-2018-april/reinstate.py
|
49
|
#!/usr/bin/python
# TODO: issues with new oauth2 stuff. Keep using older version of Python for now.
# #!/usr/bin/env python
import sys
from participantCollection import ParticipantCollection
names = sys.argv[1::]
participants = ParticipantCollection()
for name in names:
if participants.hasParticipantNamed(name):
participants.participantNamed(name).isStillIn = True
print "just reinstated " + name
else:
print "*** WARNING: " + name + " is not present in participants.txt"
participants.save()
|
dflemin3/ICgen
|
refs/heads/master
|
make_snapshotSType.py
|
2
|
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 21 15:11:31 2014
@author: ibackus
@editor: dflemin3
-Note: indentation is 4 spaces in this file, not a tab!
This module initializes an S-type binary system in which the gas disk is around
the primary, not both stars! Assumes a_bin >> r_disk such that the disk's
velocity is dominated by the influence of the primary.
"""
__version__ = "$Revision: 1 $"
# $Source$
import pynbody
SimArray = pynbody.array.SimArray
import numpy as np
import binaryUtils
import gc
import os
import AddBinary
import isaac
import calc_velocity
import ICgen_utils
import ICglobal_settings
global_settings = ICglobal_settings.global_settings
def snapshot_gen(ICobj):
"""
Generates a tipsy snapshot from the initial conditions object ICobj.
Returns snapshot, param
snapshot: tipsy snapshot
param: dictionary containing info for a .param file
Note: Code has been edited (dflemin3) such that now it returns a snapshot for a circumbinary disk
where initial conditions generated assuming star at origin of mass M. After gas initialized, replaced
star at origin with binary system who's center of mass lies at the origin and who's mass m1 +m2 = M
"""
print 'Generating snapshot...'
# Constants
G = SimArray(1.0,'G')
# ------------------------------------
# Load in things from ICobj
# ------------------------------------
print 'Accessing data from ICs'
settings = ICobj.settings
# snapshot file name
snapshotName = settings.filenames.snapshotName
paramName = settings.filenames.paramName
# particle positions
r = ICobj.pos.r
xyz = ICobj.pos.xyz
# Number of particles
nParticles = ICobj.pos.nParticles
# molecular mass
m = settings.physical.m
# star mass
m_star = settings.physical.M.copy()
# disk mass
m_disk = ICobj.sigma.m_disk.copy()
m_disk = isaac.match_units(m_disk, m_star)[0]
# mass of the gas particles
m_particles = m_disk / float(nParticles)
# re-scale the particles (allows making of low-mass disk)
m_particles *= settings.snapshot.mScale
# -------------------------------------------------
# Assign output
# -------------------------------------------------
print 'Assigning data to snapshot'
# Get units all set up
m_unit = m_star.units
pos_unit = r.units
if xyz.units != r.units:
xyz.convert_units(pos_unit)
# time units are sqrt(L^3/GM)
t_unit = np.sqrt((pos_unit**3)*np.power((G*m_unit), -1)).units
# velocity units are L/t
v_unit = (pos_unit/t_unit).ratio('km s**-1')
# Make it a unit, save value for future conversion
v_unit_vel = v_unit
#Ensure v_unit_vel is the same as what I assume it is.
assert(np.fabs(AddBinary.VEL_UNIT-v_unit_vel)<AddBinary.SMALL),"VEL_UNIT not equal to ChaNGa unit! Why??"
v_unit = pynbody.units.Unit('{0} km s**-1'.format(v_unit))
# Other settings
metals = settings.snapshot.metals
star_metals = metals
# Generate snapshot
# Note that empty pos, vel, and mass arrays are created in the snapshot
snapshot = pynbody.new(star=1,gas=nParticles)
snapshot['vel'].units = v_unit
snapshot['eps'] = 0.01*SimArray(np.ones(nParticles+1, dtype=np.float32), pos_unit)
snapshot['metals'] = SimArray(np.zeros(nParticles+1, dtype=np.float32))
snapshot['rho'] = SimArray(np.zeros(nParticles+1, dtype=np.float32))
snapshot.gas['pos'] = xyz
snapshot.gas['temp'] = ICobj.T(r)
snapshot.gas['mass'] = m_particles
snapshot.gas['metals'] = metals
snapshot.star['pos'] = SimArray([[ 0., 0., 0.]],pos_unit)
snapshot.star['vel'] = SimArray([[ 0., 0., 0.]], v_unit)
snapshot.star['mass'] = m_star
snapshot.star['metals'] = SimArray(star_metals)
# Estimate the star's softening length as the closest particle distance
eps = r.min()
# Make param file
param = isaac.make_param(snapshot, snapshotName)
param['dMeanMolWeight'] = m
gc.collect()
# CALCULATE VELOCITY USING calc_velocity.py. This also estimates the
# gravitational softening length eps
print 'Calculating circular velocity'
preset = settings.changa_run.preset
max_particles = global_settings['misc']['max_particles']
calc_velocity.v_xy(snapshot, param, changa_preset=preset, max_particles=max_particles)
gc.collect()
# -------------------------------------------------
# Estimate time step for changa to use
# -------------------------------------------------
# Save param file
isaac.configsave(param, paramName, 'param')
# Save snapshot
snapshot.write(filename=snapshotName, fmt=pynbody.tipsy.TipsySnap)
# est dDelta
dDelta = ICgen_utils.est_time_step(paramName, preset)
param['dDelta'] = dDelta
# -------------------------------------------------
# Create director file
# -------------------------------------------------
# largest radius to plot
r_director = float(0.9 * r.max())
# Maximum surface density
sigma_min = float(ICobj.sigma(r_director))
# surface density at largest radius
sigma_max = float(ICobj.sigma.input_dict['sigma'].max())
# Create director dict
director = isaac.make_director(sigma_min, sigma_max, r_director, filename=param['achOutName'])
## Save .director file
#isaac.configsave(director, directorName, 'director')
"""
Now that the gas disk is initializes around the primary (M=m1), add in the
second star as specified by the user.
"""
#Now that velocities and everything are all initialized for gas particles, create new snapshot to return in which
#single star particle is replaced by 2, same units as above
snapshotBinary = pynbody.new(star=2,gas=nParticles)
snapshotBinary['eps'] = 0.01*SimArray(np.ones(nParticles+2, dtype=np.float32), pos_unit)
snapshotBinary['metals'] = SimArray(np.zeros(nParticles+2, dtype=np.float32))
snapshotBinary['vel'].units = v_unit
snapshotBinary['pos'].units = pos_unit
snapshotBinary['mass'].units = snapshot['mass'].units
snapshotBinary['rho'] = SimArray(np.zeros(nParticles+2, dtype=np.float32))
#Assign gas particles with calculated/given values from above
snapshotBinary.gas['pos'] = snapshot.gas['pos']
snapshotBinary.gas['vel'] = snapshot.gas['vel']
snapshotBinary.gas['temp'] = snapshot.gas['temp']
snapshotBinary.gas['rho'] = snapshot.gas['rho']
snapshotBinary.gas['eps'] = snapshot.gas['eps']
snapshotBinary.gas['mass'] = snapshot.gas['mass']
snapshotBinary.gas['metals'] = snapshot.gas['metals']
#Load Binary system obj to initialize system
binsys = ICobj.settings.physical.binsys
m_disk = isaac.strip_units(np.sum(snapshotBinary.gas['mass']))
binsys.m1 = binsys.m1 + m_disk
#Recompute cartesian coords considering primary as m1+m_disk
binsys.computeCartesian()
x1,x2,v1,v2 = binsys.generateICs()
#Assign position, velocity assuming CCW orbit
snapshotBinary.star[0]['pos'] = SimArray(x1,pos_unit)
snapshotBinary.star[0]['vel'] = SimArray(v1,v_unit)
snapshotBinary.star[1]['pos'] = SimArray(x2,pos_unit)
snapshotBinary.star[1]['vel'] = SimArray(v2,v_unit)
"""
We have the binary positions about their center of mass, (0,0,0), so
shift the position, velocity of the gas disk to be around the primary.
"""
snapshotBinary.gas['pos'] += snapshotBinary.star[0]['pos']
snapshotBinary.gas['vel'] += snapshotBinary.star[0]['vel']
#Set stellar masses: Create simArray for mass, convert units to simulation mass units
snapshotBinary.star[0]['mass'] = SimArray(binsys.m1-m_disk,m_unit)
snapshotBinary.star[1]['mass'] = SimArray(binsys.m2,m_unit)
snapshotBinary.star['metals'] = SimArray(star_metals)
#Now that everything has masses and positions, adjust positions so the
#system center of mass corresponds to the origin
"""
com = binaryUtils.computeCOM(snapshotBinary.stars,snapshotBinary.gas)
print com
snapshotBinary.stars['pos'] -= com
snapshotBinary.gas['pos'] -= com
"""
print 'Wrapping up'
# Now set the star particle's tform to a negative number. This allows
# UW ChaNGa treat it as a sink particle.
snapshotBinary.star['tform'] = -1.0
#Set sink radius, stellar smoothing length as fraction of distance
#from primary to inner edge of the disk
r_sink = eps
snapshotBinary.star[0]['eps'] = SimArray(r_sink/2.0,pos_unit)
snapshotBinary.star[1]['eps'] = SimArray(r_sink/2.0,pos_unit)
param['dSinkBoundOrbitRadius'] = r_sink
param['dSinkRadius'] = r_sink
param['dSinkMassMin'] = 0.9 * binsys.m2
param['bDoSinks'] = 1
return snapshotBinary, param, director
def make_director(ICobj, res=1200):
director = {}
director['render'] = 'tsc'
director['FOV'] = 45.0
director['clip'] = [0.0001, 500]
director['up'] = [1, 0, 0]
director['project'] = 'ortho'
director['softgassph'] = 'softgassph'
director['physical'] = 'physical'
director['size'] = [res, res]
sig_set = ICobj.settings.sigma
mScale = ICobj.settings.snapshot.mScale
snapshot_name = ICobj.settings.filenames.snapshotName
f_prefix = os.path.splitext(os.path.basename(snapshot_name))[0]
director['file'] = f_prefix
if sig_set.kind == 'MQWS':
rmax = sig_set.rout + 3*sig_set.rin
zmax = float(rmax)
director['eye'] = [0, 0, zmax]
vmin = float(ICobj.rho(0, rmax))
vmax = float(ICobj.rho.rho_binned[0,:].max())
vmax *= mScale
director['logscale'] = [vmin, 10*vmax]
director['colgas'] = [1, 1, 1]
return director
|
nacc/cobbler
|
refs/heads/master
|
cobbler/item_file.py
|
8
|
"""
Copyright 2006-2009, MadHatter
Kelsey Hightower <kelsey.hightower@gmail.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import resource
import utils
from utils import _
from cexceptions import CX
# this datastructure is described in great detail in item_distro.py -- read the comments there.
FIELDS = [
[ "uid","",0,"",False,"",0,"str"],
["depth",2,0,"",False,"",0,"float"],
["comment","",0,"Comment",True,"Free form text description",0,"str"],
["ctime",0,0,"",False,"",0,"float"],
["mtime",0,0,"",False,"",0,"float"],
["owners","SETTINGS:default_ownership",0,"Owners",False,"Owners list for authz_ownership (space delimited)",[],"list"],
["name","",0,"Name",True,"Name of file resource",0,"str"],
["is_dir",False,0,"Is Directory",True,"Treat file resource as a directory",0,"bool"],
["action","create",0,"Action",True,"Create or remove file resource",0,"str"],
["group","",0,"Group",True,"The group owner of the file",0,"str"],
["mode","",0,"Mode",True,"The mode of the file",0,"str"],
["owner","",0,"Owner",True,"The owner for the file",0,"str"],
["path","",0,"Path",True,"The path for the file",0,"str"],
["template","",0,"Template",True,"The template for the file",0,"str"]
]
class File(resource.Resource):
TYPE_NAME = _("file")
COLLECTION_TYPE = "file"
def make_clone(self):
ds = self.to_datastruct()
cloned = File(self.config)
cloned.from_datastruct(ds)
return cloned
def get_fields(self):
return FIELDS
def set_is_dir(self,is_dir):
"""
If true, treat file resource as a directory. Templates are ignored.
"""
self.is_dir = utils.input_boolean(is_dir)
return True
def check_if_valid(self):
"""
Insure name, path, owner, group, and mode are set.
Templates are only required for files, is_dir = False
"""
if self.name is None or self.name == "":
raise CX("name is required")
if self.path is None or self.path == "":
raise CX("path is required")
if self.owner is None or self.owner == "":
raise CX("owner is required")
if self.group is None or self.group == "":
raise CX("group is required")
if self.mode is None or self.mode == "":
raise CX("mode is required")
if self.is_dir == False and self.template == "":
raise CX("Template is required when not a directory")
|
faulkner/swampdragon
|
refs/heads/master
|
swampdragon/serializers/serializer.py
|
14
|
from swampdragon.serializers.validation import ValidationError
class SerializerMeta(object):
def __init__(self, options):
self.publish_fields = getattr(options, 'publish_fields', None)
if isinstance(self.publish_fields, str):
self.publish_fields = (self.publish_fields, )
self.update_fields = getattr(options, 'update_fields', ())
if isinstance(self.update_fields, str):
self.update_fields = (self.update_fields, )
class Serializer(object):
def __init__(self, data=None, initial=None):
if data and not isinstance(data, dict):
raise Exception('data needs to be a dictionary')
self.opts = SerializerMeta(self.Meta)
self.data = data
self.clean_data = {}
self.initial = initial or {}
self.errors = {}
def save(self):
self.deserialize()
return self.clean_data
def deserialize(self):
for key, val in self.initial.items():
self.clean_data[key] = val
# Deserialize base fields
for key, val in self.data.items():
if key not in self.opts.update_fields:
continue
try:
self.validate_field(key, val, self.data)
self._deserialize_field(key, val)
except ValidationError as err:
self.errors.update(err.get_error_dict())
def validate_field(self, field, value, data):
validation_name = 'validate_{}'.format(field)
if hasattr(self, validation_name):
validator = getattr(self, validation_name)
validator(value)
return None
def _get_custom_field_serializers(self):
"""
Get all custom serializer functions.
If this function has a serializer attached to it, include that
"""
functions = [(
getattr(self, f),
f.replace('serialize_', '')
) for f in dir(self) if f.startswith('serialize_')]
return functions
|
andrew-szymanski/gae_django
|
refs/heads/master
|
django/contrib/admin/media/js/compress.py
|
784
|
#!/usr/bin/env python
import os
import optparse
import subprocess
import sys
here = os.path.dirname(__file__)
def main():
usage = "usage: %prog [file1..fileN]"
description = """With no file paths given this script will automatically
compress all jQuery-based files of the admin app. Requires the Google Closure
Compiler library and Java version 6 or later."""
parser = optparse.OptionParser(usage, description=description)
parser.add_option("-c", dest="compiler", default="~/bin/compiler.jar",
help="path to Closure Compiler jar file")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose")
(options, args) = parser.parse_args()
compiler = os.path.expanduser(options.compiler)
if not os.path.exists(compiler):
sys.exit("Google Closure compiler jar file %s not found. Please use the -c option to specify the path." % compiler)
if not args:
if options.verbose:
sys.stdout.write("No filenames given; defaulting to admin scripts\n")
args = [os.path.join(here, f) for f in [
"actions.js", "collapse.js", "inlines.js", "prepopulate.js"]]
for arg in args:
if not arg.endswith(".js"):
arg = arg + ".js"
to_compress = os.path.expanduser(arg)
if os.path.exists(to_compress):
to_compress_min = "%s.min.js" % "".join(arg.rsplit(".js"))
cmd = "java -jar %s --js %s --js_output_file %s" % (compiler, to_compress, to_compress_min)
if options.verbose:
sys.stdout.write("Running: %s\n" % cmd)
subprocess.call(cmd.split())
else:
sys.stdout.write("File %s not found. Sure it exists?\n" % to_compress)
if __name__ == '__main__':
main()
|
popazerty/e2-dmm
|
refs/heads/master
|
lib/python/Plugins/Extensions/PicturePlayer/plugin.py
|
5
|
from enigma import ePicLoad, eTimer, getDesktop
from Screens.Screen import Screen
from Tools.Directories import resolveFilename, pathExists, fileExists, SCOPE_MEDIA
from Plugins.Plugin import PluginDescriptor
from Components.Pixmap import Pixmap, MovingPixmap
from Components.ActionMap import ActionMap, NumberActionMap
from Components.Sources.StaticText import StaticText
from Components.FileList import FileList
from Components.AVSwitch import AVSwitch
from Components.Sources.List import List
from Components.ConfigList import ConfigList, ConfigListScreen
from Components.config import config, ConfigSubsection, ConfigInteger, ConfigSelection, ConfigText, ConfigEnableDisable, KEY_LEFT, KEY_RIGHT, KEY_0, getConfigListEntry
def getScale():
return AVSwitch().getFramebufferScale()
config.pic = ConfigSubsection()
config.pic.framesize = ConfigInteger(default=30, limits=(5, 99))
config.pic.slidetime = ConfigInteger(default=10, limits=(10, 60))
config.pic.resize = ConfigSelection(default="1", choices = [("0", _("simple")), ("1", _("better"))])
config.pic.cache = ConfigEnableDisable(default=True)
config.pic.lastDir = ConfigText(default=resolveFilename(SCOPE_MEDIA))
config.pic.infoline = ConfigEnableDisable(default=True)
config.pic.loop = ConfigEnableDisable(default=True)
config.pic.bgcolor = ConfigSelection(default="#00000000", choices = [("#00000000", _("black")),("#009eb9ff", _("blue")),("#00ff5a51", _("red")), ("#00ffe875", _("yellow")), ("#0038FF48", _("green"))])
config.pic.textcolor = ConfigSelection(default="#0038FF48", choices = [("#00000000", _("black")),("#009eb9ff", _("blue")),("#00ff5a51", _("red")), ("#00ffe875", _("yellow")), ("#0038FF48", _("green"))])
class picshow(Screen):
skin = """
<screen name="picshow" position="center,center" size="560,440" title="PicturePlayer" >
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget source="key_blue" render="Label" position="420,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#18188b" transparent="1" />
<widget source="label" render="Label" position="5,55" size="350,140" font="Regular;19" backgroundColor="#25062748" transparent="1" />
<widget name="thn" position="360,40" size="180,160" alphatest="on" />
<widget name="filelist" position="5,205" zPosition="2" size="550,230" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session):
Screen.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "DirectionActions"],
{
"cancel": self.KeyExit,
"red": self.KeyExit,
"green": self.KeyGreen,
"yellow": self.KeyYellow,
"blue": self.KeyBlue,
"ok": self.KeyOk
}, -1)
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Thumbnails"))
self["key_yellow"] = StaticText("")
self["key_blue"] = StaticText(_("Setup"))
self["label"] = StaticText("")
self["thn"] = Pixmap()
currDir = config.pic.lastDir.value
if not pathExists(currDir):
currDir = "/"
self.filelist = FileList(currDir, matchingPattern = "(?i)^.*\.(jpeg|jpg|jpe|png|bmp|gif)")
self["filelist"] = self.filelist
self["filelist"].onSelectionChanged.append(self.selectionChanged)
self.ThumbTimer = eTimer()
self.ThumbTimer.callback.append(self.showThumb)
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.showPic)
self.onLayoutFinish.append(self.setConf)
def showPic(self, picInfo=""):
ptr = self.picload.getData()
if ptr != None:
self["thn"].instance.setPixmap(ptr.__deref__())
self["thn"].show()
text = picInfo.split('\n',1)
self["label"].setText(text[1])
self["key_yellow"].setText(_("Exif"))
def showThumb(self):
if not self.filelist.canDescent():
if self.filelist.getCurrentDirectory() and self.filelist.getFilename():
if self.picload.getThumbnail(self.filelist.getCurrentDirectory() + self.filelist.getFilename()) == 1:
self.ThumbTimer.start(500, True)
def selectionChanged(self):
if not self.filelist.canDescent():
self.ThumbTimer.start(500, True)
else:
self["label"].setText("")
self["thn"].hide()
self["key_yellow"].setText("")
def KeyGreen(self):
#if not self.filelist.canDescent():
self.session.openWithCallback(self.callbackView, Pic_Thumb, self.filelist.getFileList(), self.filelist.getSelectionIndex(), self.filelist.getCurrentDirectory())
def KeyYellow(self):
if not self.filelist.canDescent():
self.session.open(Pic_Exif, self.picload.getInfo(self.filelist.getCurrentDirectory() + self.filelist.getFilename()))
def KeyBlue(self):
self.session.openWithCallback(self.setConf ,Pic_Setup)
def KeyOk(self):
if self.filelist.canDescent():
self.filelist.descent()
else:
self.session.openWithCallback(self.callbackView, Pic_Full_View, self.filelist.getFileList(), self.filelist.getSelectionIndex(), self.filelist.getCurrentDirectory())
def setConf(self):
self.setTitle(_("PicturePlayer"))
sc = getScale()
#0=Width 1=Height 2=Aspect 3=use_cache 4=resize_type 5=Background(#AARRGGBB)
self.picload.setPara((self["thn"].instance.size().width(), self["thn"].instance.size().height(), sc[0], sc[1], config.pic.cache.value, int(config.pic.resize.value), "#00000000"))
def callbackView(self, val=0):
if val > 0:
self.filelist.moveToIndex(val)
def KeyExit(self):
del self.picload
if self.filelist.getCurrentDirectory() is None:
config.pic.lastDir.value = "/"
else:
config.pic.lastDir.value = self.filelist.getCurrentDirectory()
config.pic.save()
self.close()
#------------------------------------------------------------------------------------------
class Pic_Setup(Screen, ConfigListScreen):
def __init__(self, session):
Screen.__init__(self, session)
# for the skin: first try MediaPlayerSettings, then Setup, this allows individual skinning
self.skinName = ["PicturePlayerSetup", "Setup" ]
self.setup_title = _("Settings")
self.onChangedEntry = [ ]
self.session = session
self["actions"] = ActionMap(["SetupActions"],
{
"cancel": self.keyCancel,
"save": self.keySave,
"ok": self.keySave,
}, -2)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("OK"))
self.list = []
ConfigListScreen.__init__(self, self.list, session = self.session, on_change = self.changedEntry)
self.createSetup()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(self.setup_title)
def createSetup(self):
self.list = []
self.list.append(getConfigListEntry(_("Slideshow Interval (sec.)"), config.pic.slidetime))
self.list.append(getConfigListEntry(_("Scaling Mode"), config.pic.resize))
self.list.append(getConfigListEntry(_("Cache Thumbnails"), config.pic.cache))
self.list.append(getConfigListEntry(_("show Infoline"), config.pic.infoline))
self.list.append(getConfigListEntry(_("Frame size in full view"), config.pic.framesize))
self.list.append(getConfigListEntry(_("slide picture in loop"), config.pic.loop))
self.list.append(getConfigListEntry(_("backgroundcolor"), config.pic.bgcolor))
self.list.append(getConfigListEntry(_("textcolor"), config.pic.textcolor))
self["config"].list = self.list
self["config"].l.setList(self.list)
def keyLeft(self):
ConfigListScreen.keyLeft(self)
def keyRight(self):
ConfigListScreen.keyRight(self)
# for summary:
def changedEntry(self):
for x in self.onChangedEntry:
x()
def getCurrentEntry(self):
return self["config"].getCurrent()[0]
def getCurrentValue(self):
return str(self["config"].getCurrent()[1].getText())
def createSummary(self):
from Screens.Setup import SetupSummary
return SetupSummary
#---------------------------------------------------------------------------
class Pic_Exif(Screen):
skin = """
<screen name="Pic_Exif" position="center,center" size="560,360" title="Info" >
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="menu" render="Listbox" position="5,50" size="550,310" scrollbarMode="showOnDemand" selectionDisabled="1" >
<convert type="TemplatedMultiContent">
{
"template": [ MultiContentEntryText(pos = (5, 5), size = (250, 30), flags = RT_HALIGN_LEFT, text = 0), MultiContentEntryText(pos = (260, 5), size = (290, 30), flags = RT_HALIGN_LEFT, text = 1)],
"fonts": [gFont("Regular", 20)],
"itemHeight": 30
}
</convert>
</widget>
</screen>"""
def __init__(self, session, exiflist):
Screen.__init__(self, session)
self["actions"] = ActionMap(["SetupActions", "ColorActions"],
{
"cancel": self.close
}, -1)
self["key_red"] = StaticText(_("Close"))
exifdesc = [_("filename")+':', "EXIF-Version:", "Make:", "Camera:", "Date/Time:", "Width / Height:", "Flash used:", "Orientation:", "User Comments:", "Metering Mode:", "Exposure Program:", "Light Source:", "CompressedBitsPerPixel:", "ISO Speed Rating:", "X-Resolution:", "Y-Resolution:", "Resolution Unit:", "Brightness:", "Exposure Time:", "Exposure Bias:", "Distance:", "CCD-Width:", "ApertureFNumber:"]
list = []
for x in range(len(exiflist)):
if x>0:
list.append((exifdesc[x], exiflist[x]))
else:
name = exiflist[x].split('/')[-1]
list.append((exifdesc[x], name))
self["menu"] = List(list)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(_("Info"))
#----------------------------------------------------------------------------------------
T_INDEX = 0
T_FRAME_POS = 1
T_PAGE = 2
T_NAME = 3
T_FULL = 4
class Pic_Thumb(Screen):
def __init__(self, session, piclist, lastindex, path):
self.textcolor = config.pic.textcolor.value
self.color = config.pic.bgcolor.value
textsize = 20
self.spaceX = 35
self.picX = 190
self.spaceY = 30
self.picY = 200
size_w = getDesktop(0).size().width()
size_h = getDesktop(0).size().height()
self.thumbsX = size_w / (self.spaceX + self.picX) # thumbnails in X
self.thumbsY = size_h / (self.spaceY + self.picY) # thumbnails in Y
self.thumbsC = self.thumbsX * self.thumbsY # all thumbnails
self.positionlist = []
skincontent = ""
posX = -1
for x in range(self.thumbsC):
posY = x / self.thumbsX
posX += 1
if posX >= self.thumbsX:
posX = 0
absX = self.spaceX + (posX*(self.spaceX + self.picX))
absY = self.spaceY + (posY*(self.spaceY + self.picY))
self.positionlist.append((absX, absY))
skincontent += "<widget source=\"label" + str(x) + "\" render=\"Label\" position=\"" + str(absX+5) + "," + str(absY+self.picY-textsize) + "\" size=\"" + str(self.picX - 10) + "," + str(textsize) + "\" font=\"Regular;14\" zPosition=\"2\" transparent=\"1\" noWrap=\"1\" foregroundColor=\"" + self.textcolor + "\" />"
skincontent += "<widget name=\"thumb" + str(x) + "\" position=\"" + str(absX+5)+ "," + str(absY+5) + "\" size=\"" + str(self.picX -10) + "," + str(self.picY - (textsize*2)) + "\" zPosition=\"2\" transparent=\"1\" alphatest=\"on\" />"
# Screen, backgroundlabel and MovingPixmap
self.skin = "<screen position=\"0,0\" size=\"" + str(size_w) + "," + str(size_h) + "\" flags=\"wfNoBorder\" > \
<eLabel position=\"0,0\" zPosition=\"0\" size=\""+ str(size_w) + "," + str(size_h) + "\" backgroundColor=\"" + self.color + "\" /><widget name=\"frame\" position=\"35,30\" size=\"190,200\" pixmap=\"pic_frame.png\" zPosition=\"1\" alphatest=\"on\" />" + skincontent + "</screen>"
Screen.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "DirectionActions", "MovieSelectionActions"],
{
"cancel": self.Exit,
"ok": self.KeyOk,
"left": self.key_left,
"right": self.key_right,
"up": self.key_up,
"down": self.key_down,
"showEventInfo": self.StartExif,
}, -1)
self["frame"] = MovingPixmap()
for x in range(self.thumbsC):
self["label"+str(x)] = StaticText()
self["thumb"+str(x)] = Pixmap()
self.Thumbnaillist = []
self.filelist = []
self.currPage = -1
self.dirlistcount = 0
self.path = path
index = 0
framePos = 0
Page = 0
for x in piclist:
if x[0][1] == False:
self.filelist.append((index, framePos, Page, x[0][0], path + x[0][0]))
index += 1
framePos += 1
if framePos > (self.thumbsC -1):
framePos = 0
Page += 1
else:
self.dirlistcount += 1
self.maxentry = len(self.filelist)-1
self.index = lastindex - self.dirlistcount
if self.index < 0:
self.index = 0
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.showPic)
self.onLayoutFinish.append(self.setPicloadConf)
self.ThumbTimer = eTimer()
self.ThumbTimer.callback.append(self.showPic)
def setPicloadConf(self):
sc = getScale()
self.picload.setPara([self["thumb0"].instance.size().width(), self["thumb0"].instance.size().height(), sc[0], sc[1], config.pic.cache.value, int(config.pic.resize.value), self.color])
self.paintFrame()
def paintFrame(self):
#print "index=" + str(self.index)
if self.maxentry < self.index or self.index < 0:
return
pos = self.positionlist[self.filelist[self.index][T_FRAME_POS]]
self["frame"].moveTo( pos[0], pos[1], 1)
self["frame"].startMoving()
if self.currPage != self.filelist[self.index][T_PAGE]:
self.currPage = self.filelist[self.index][T_PAGE]
self.newPage()
def newPage(self):
self.Thumbnaillist = []
#clear Labels and Thumbnail
for x in range(self.thumbsC):
self["label"+str(x)].setText("")
self["thumb"+str(x)].hide()
#paint Labels and fill Thumbnail-List
for x in self.filelist:
if x[T_PAGE] == self.currPage:
self["label"+str(x[T_FRAME_POS])].setText("(" + str(x[T_INDEX]+1) + ") " + x[T_NAME])
self.Thumbnaillist.append([0, x[T_FRAME_POS], x[T_FULL]])
#paint Thumbnail start
self.showPic()
def showPic(self, picInfo=""):
for x in range(len(self.Thumbnaillist)):
if self.Thumbnaillist[x][0] == 0:
if self.picload.getThumbnail(self.Thumbnaillist[x][2]) == 1: #zu tun probier noch mal
self.ThumbTimer.start(500, True)
else:
self.Thumbnaillist[x][0] = 1
break
elif self.Thumbnaillist[x][0] == 1:
self.Thumbnaillist[x][0] = 2
ptr = self.picload.getData()
if ptr != None:
self["thumb" + str(self.Thumbnaillist[x][1])].instance.setPixmap(ptr.__deref__())
self["thumb" + str(self.Thumbnaillist[x][1])].show()
def key_left(self):
self.index -= 1
if self.index < 0:
self.index = self.maxentry
self.paintFrame()
def key_right(self):
self.index += 1
if self.index > self.maxentry:
self.index = 0
self.paintFrame()
def key_up(self):
self.index -= self.thumbsX
if self.index < 0:
self.index =self.maxentry
self.paintFrame()
def key_down(self):
self.index += self.thumbsX
if self.index > self.maxentry:
self.index = 0
self.paintFrame()
def StartExif(self):
if self.maxentry < 0:
return
self.session.open(Pic_Exif, self.picload.getInfo(self.filelist[self.index][T_FULL]))
def KeyOk(self):
if self.maxentry < 0:
return
self.old_index = self.index
self.session.openWithCallback(self.callbackView, Pic_Full_View, self.filelist, self.index, self.path)
def callbackView(self, val=0):
self.index = val
if self.old_index != self.index:
self.paintFrame()
def Exit(self):
del self.picload
self.close(self.index + self.dirlistcount)
#---------------------------------------------------------------------------
class Pic_Full_View(Screen):
def __init__(self, session, filelist, index, path):
self.textcolor = config.pic.textcolor.value
self.bgcolor = config.pic.bgcolor.value
space = config.pic.framesize.value
size_w = getDesktop(0).size().width()
size_h = getDesktop(0).size().height()
self.skin = "<screen position=\"0,0\" size=\"" + str(size_w) + "," + str(size_h) + "\" flags=\"wfNoBorder\" > \
<eLabel position=\"0,0\" zPosition=\"0\" size=\""+ str(size_w) + "," + str(size_h) + "\" backgroundColor=\""+ self.bgcolor +"\" /><widget name=\"pic\" position=\"" + str(space) + "," + str(space) + "\" size=\"" + str(size_w-(space*2)) + "," + str(size_h-(space*2)) + "\" zPosition=\"1\" alphatest=\"on\" /> \
<widget name=\"point\" position=\""+ str(space+5) + "," + str(space+2) + "\" size=\"20,20\" zPosition=\"2\" pixmap=\"skin_default/icons/record.png\" alphatest=\"on\" /> \
<widget name=\"play_icon\" position=\""+ str(space+25) + "," + str(space+2) + "\" size=\"20,20\" zPosition=\"2\" pixmap=\"skin_default/icons/ico_mp_play.png\" alphatest=\"on\" /> \
<widget source=\"file\" render=\"Label\" position=\""+ str(space+45) + "," + str(space) + "\" size=\""+ str(size_w-(space*2)-50) + ",25\" font=\"Regular;20\" halign=\"left\" foregroundColor=\"" + self.textcolor + "\" zPosition=\"2\" noWrap=\"1\" transparent=\"1\" /></screen>"
Screen.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "DirectionActions", "MovieSelectionActions"],
{
"cancel": self.Exit,
"green": self.PlayPause,
"yellow": self.PlayPause,
"blue": self.nextPic,
"red": self.prevPic,
"left": self.prevPic,
"right": self.nextPic,
"showEventInfo": self.StartExif,
}, -1)
self["point"] = Pixmap()
self["pic"] = Pixmap()
self["play_icon"] = Pixmap()
self["file"] = StaticText(_("please wait, loading picture..."))
self.old_index = 0
self.filelist = []
self.lastindex = index
self.currPic = []
self.shownow = True
self.dirlistcount = 0
for x in filelist:
if len(filelist[0]) == 3: #orig. filelist
if x[0][1] == False:
self.filelist.append(path + x[0][0])
else:
self.dirlistcount += 1
elif len(filelist[0]) == 2: #scanlist
if x[0][1] == False:
self.filelist.append(x[0][0])
else:
self.dirlistcount += 1
else: # thumbnaillist
self.filelist.append(x[T_FULL])
self.maxentry = len(self.filelist)-1
self.index = index - self.dirlistcount
if self.index < 0:
self.index = 0
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.finish_decode)
self.slideTimer = eTimer()
self.slideTimer.callback.append(self.slidePic)
if self.maxentry >= 0:
self.onLayoutFinish.append(self.setPicloadConf)
def setPicloadConf(self):
sc = getScale()
self.picload.setPara([self["pic"].instance.size().width(), self["pic"].instance.size().height(), sc[0], sc[1], 0, int(config.pic.resize.value), self.bgcolor])
self["play_icon"].hide()
if config.pic.infoline.value == False:
self["file"].setText("")
self.start_decode()
def ShowPicture(self):
if self.shownow and len(self.currPic):
self.shownow = False
self["file"].setText(self.currPic[0])
self.lastindex = self.currPic[1]
self["pic"].instance.setPixmap(self.currPic[2].__deref__())
self.currPic = []
self.next()
self.start_decode()
def finish_decode(self, picInfo=""):
self["point"].hide()
ptr = self.picload.getData()
if ptr != None:
text = ""
try:
text = picInfo.split('\n',1)
text = "(" + str(self.index+1) + "/" + str(self.maxentry+1) + ") " + text[0].split('/')[-1]
except:
pass
self.currPic = []
self.currPic.append(text)
self.currPic.append(self.index)
self.currPic.append(ptr)
self.ShowPicture()
def start_decode(self):
self.picload.startDecode(self.filelist[self.index])
self["point"].show()
def next(self):
self.index += 1
if self.index > self.maxentry:
self.index = 0
def prev(self):
self.index -= 1
if self.index < 0:
self.index = self.maxentry
def slidePic(self):
print "slide to next Picture index=" + str(self.lastindex)
if config.pic.loop.value==False and self.lastindex == self.maxentry:
self.PlayPause()
self.shownow = True
self.ShowPicture()
def PlayPause(self):
if self.slideTimer.isActive():
self.slideTimer.stop()
self["play_icon"].hide()
else:
self.slideTimer.start(config.pic.slidetime.value*1000)
self["play_icon"].show()
self.nextPic()
def prevPic(self):
self.currPic = []
self.index = self.lastindex
self.prev()
self.start_decode()
self.shownow = True
def nextPic(self):
self.shownow = True
self.ShowPicture()
def StartExif(self):
if self.maxentry < 0:
return
self.session.open(Pic_Exif, self.picload.getInfo(self.filelist[self.lastindex]))
def Exit(self):
del self.picload
self.close(self.lastindex + self.dirlistcount)
#------------------------------------------------------------------------------------------
def main(session, **kwargs):
session.open(picshow)
def filescan_open(list, session, **kwargs):
# Recreate List as expected by PicView
filelist = [((file.path, False), None) for file in list]
session.open(Pic_Full_View, filelist, 0, file.path)
def filescan(**kwargs):
from Components.Scanner import Scanner, ScanPath
# Overwrite checkFile to only detect local
class LocalScanner(Scanner):
def checkFile(self, file):
return fileExists(file.path)
return \
LocalScanner(mimetypes = ["image/jpeg", "image/png", "image/gif", "image/bmp"],
paths_to_scan =
[
ScanPath(path = "DCIM", with_subdirs = True),
ScanPath(path = "", with_subdirs = False),
],
name = "Pictures",
description = _("View Photos..."),
openfnc = filescan_open,
)
def Plugins(**kwargs):
return \
[PluginDescriptor(name=_("PicturePlayer"), description=_("fileformats (BMP, PNG, JPG, GIF)"), icon="pictureplayer.png", where = PluginDescriptor.WHERE_PLUGINMENU, needsRestart = False, fnc=main),
PluginDescriptor(name=_("PicturePlayer"), where = PluginDescriptor.WHERE_FILESCAN, needsRestart = False, fnc = filescan)]
|
jminuscula/littleprinter
|
refs/heads/master
|
littleprinter/littleprinter/urls.py
|
1
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
import medical_eponyms.urls
urlpatterns = patterns('',
url(r'^eponyms/', include(medical_eponyms.urls)),
url(r'^admin/', include(admin.site.urls)),
)
|
MaPePeR/numpy
|
refs/heads/master
|
numpy/distutils/tests/test_fcompiler_intel.py
|
146
|
from __future__ import division, absolute_import, print_function
import numpy.distutils.fcompiler
from numpy.testing import TestCase, run_module_suite, assert_
intel_32bit_version_strings = [
("Intel(R) Fortran Intel(R) 32-bit Compiler Professional for applications"
"running on Intel(R) 32, Version 11.1", '11.1'),
]
intel_64bit_version_strings = [
("Intel(R) Fortran IA-64 Compiler Professional for applications"
"running on IA-64, Version 11.0", '11.0'),
("Intel(R) Fortran Intel(R) 64 Compiler Professional for applications"
"running on Intel(R) 64, Version 11.1", '11.1')
]
class TestIntelFCompilerVersions(TestCase):
def test_32bit_version(self):
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='intel')
for vs, version in intel_32bit_version_strings:
v = fc.version_match(vs)
assert_(v == version)
class TestIntelEM64TFCompilerVersions(TestCase):
def test_64bit_version(self):
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='intelem')
for vs, version in intel_64bit_version_strings:
v = fc.version_match(vs)
assert_(v == version)
if __name__ == '__main__':
run_module_suite()
|
ran5515/DeepDecision
|
refs/heads/master
|
tensorflow/contrib/copy_graph/__init__.py
|
94
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions to copy elements between graphs.
See the @{$python/contrib.copy_graph} guide.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.copy_graph.python.util import copy_elements
# pylint: disable=wildcard-import
from tensorflow.contrib.copy_graph.python.util.copy_elements import *
# pylint: enable=wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
remove_undocumented(__name__, doc_string_modules=[copy_elements])
|
mazcheng/snippets
|
refs/heads/master
|
spider/csdn/csdn/spiders/__init__.py
|
2415
|
# This package will contain the spiders of your Scrapy project
#
# Please refer to the documentation for information on how to create and manage
# your spiders.
|
Motorhead1991/samsung_att_kernel_source-msm7x30
|
refs/heads/master
|
tools/perf/scripts/python/sctop.py
|
895
|
# system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import thread
import time
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf trace -s syscall-counts.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40d %10d\n" % (id, val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
bjorand/django-allauth
|
refs/heads/master
|
allauth/socialaccount/providers/dropbox/urls.py
|
80
|
from allauth.socialaccount.providers.oauth.urls import default_urlpatterns
from .provider import DropboxProvider
urlpatterns = default_urlpatterns(DropboxProvider)
|
blarghmatey/pip
|
refs/heads/develop
|
pip/_vendor/requests/packages/chardet/euckrfreq.py
|
3120
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Sampling from about 20M text materials include literature and computer technology
# 128 --> 0.79
# 256 --> 0.92
# 512 --> 0.986
# 1024 --> 0.99944
# 2048 --> 0.99999
#
# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
# Random Distribution Ration = 512 / (2350-512) = 0.279.
#
# Typical Distribution Ratio
EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
EUCKR_TABLE_SIZE = 2352
# Char to FreqOrder table ,
EUCKRCharToFreqOrder = ( \
13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,
1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,
945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,
708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,
1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,
1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,
127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,
1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,
1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,
269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,
887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,
1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,
103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,
423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,
532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,
619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,
191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,
2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,
1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,
962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,
1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,
2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,
2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,
2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,
1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,
1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,
1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,
2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,
295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,
432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,
2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,
2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,
501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,
1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,
1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,
434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,
2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,
2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,
202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,
2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256
#Everything below is of no interest for detection purpose
2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658,
2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674,
2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690,
2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704,
2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720,
2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734,
2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750,
2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765,
2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779,
2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793,
2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809,
2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824,
2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840,
2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856,
1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869,
2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883,
2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899,
2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915,
2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331,
2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945,
2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961,
2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976,
2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992,
2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008,
3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021,
3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037,
3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052,
3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066,
3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080,
3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095,
3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110,
3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124,
3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140,
3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156,
3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172,
3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187,
3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201,
3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217,
3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233,
3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248,
3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264,
3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279,
3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295,
3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311,
3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327,
3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343,
3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359,
3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374,
3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389,
3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405,
3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338,
3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432,
3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446,
3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191,
3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471,
3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486,
1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499,
1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513,
3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525,
3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541,
3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557,
3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573,
3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587,
3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603,
3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618,
3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632,
3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648,
3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663,
3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679,
3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695,
3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583,
1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722,
3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738,
3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753,
3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767,
3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782,
3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796,
3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810,
3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591,
1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836,
3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851,
3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866,
3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880,
3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895,
1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905,
3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921,
3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934,
3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603,
3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964,
3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978,
3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993,
3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009,
4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024,
4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040,
1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055,
4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069,
4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083,
4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098,
4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113,
4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610,
4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142,
4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157,
4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173,
4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189,
4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205,
4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220,
4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234,
4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249,
4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265,
4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279,
4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294,
4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310,
4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326,
4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341,
4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357,
4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371,
4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387,
4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403,
4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418,
4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432,
4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446,
4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461,
4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476,
4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491,
4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507,
4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623,
4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536,
4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551,
4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567,
4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581,
4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627,
4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611,
4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626,
4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642,
4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657,
4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672,
4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687,
1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700,
4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715,
4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731,
4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633,
4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758,
4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773,
4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788,
4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803,
4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817,
4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832,
4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847,
4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863,
4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879,
4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893,
4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909,
4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923,
4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938,
4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954,
4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970,
4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645,
4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999,
5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078,
5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028,
1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042,
5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056,
5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072,
5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087,
5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103,
5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118,
1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132,
5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,
5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161,
5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177,
5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192,
5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206,
1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218,
5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,
5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249,
5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262,
5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,
5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,
5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308,
5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323,
5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338,
5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353,
5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369,
5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385,
5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400,
5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415,
5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430,
5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445,
5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461,
5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477,
5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491,
5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507,
5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523,
5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539,
5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554,
5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570,
1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585,
5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600,
5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615,
5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,
5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,
5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,
1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673,
5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688,
5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703,
5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716,
5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729,
5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744,
1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758,
5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773,
1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786,
5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801,
5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815,
5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831,
5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847,
5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862,
5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876,
5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889,
5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,
5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,
5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687,
5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,
5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963,
5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,
5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993,
5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009,
6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025,
6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039,
6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055,
6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071,
6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086,
6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102,
6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118,
6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133,
6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147,
6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,
6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,
6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194,
6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,
6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225,
6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,
6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256,
6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024
6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287,
6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699,
6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317,
6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,
6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347,
6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,
6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,
6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,
6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411,
6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425,
6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440,
6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456,
6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,
6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488,
6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266,
6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,
6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535,
6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551,
1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565,
6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581,
6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597,
6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613,
6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629,
6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644,
1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659,
6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674,
1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689,
6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705,
6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721,
6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736,
1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748,
6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763,
6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779,
6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794,
6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711,
6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825,
6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840,
6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856,
6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872,
6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888,
6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903,
6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918,
6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934,
6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950,
6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,
6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981,
6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996,
6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011,
7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,
7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042,
7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,
7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074,
7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090,
7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106,
7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122,
7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138,
7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154,
7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170,
7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186,
7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202,
7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216,
7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232,
7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248,
7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264,
7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280,
7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296,
7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312,
7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327,
7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343,
7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359,
7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375,
7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391,
7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407,
7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423,
7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439,
7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455,
7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,
7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,
7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503,
7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,
7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,
7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551,
7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567,
7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583,
7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599,
7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615,
7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631,
7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647,
7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663,
7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679,
7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695,
7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711,
7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727,
7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743,
7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759,
7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,
7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,
7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807,
7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,
7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,
7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,
7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,
7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,
7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,
7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,
7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935,
7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951,
7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967,
7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983,
7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999,
8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,
8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031,
8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047,
8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,
8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,
8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,
8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111,
8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127,
8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,
8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,
8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,
8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,
8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,
8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,
8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,
8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,
8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,
8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,
8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,
8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,
8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,
8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,
8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,
8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,
8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,
8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,
8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,
8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,
8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,
8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,
8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,
8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,
8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,
8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,
8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,
8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,
8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,
8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,
8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,
8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,
8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,
8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,
8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,
8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,
8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,
8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,
8736,8737,8738,8739,8740,8741)
# flake8: noqa
|
Jgarcia-IAS/localizacion
|
refs/heads/master
|
openerp/addons/purchase_analytic_plans/__init__.py
|
441
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#----------------------------------------------------------
# Init Sales
#----------------------------------------------------------
import purchase_analytic_plans
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Guts/Metadator
|
refs/heads/master
|
test/test_ttk_ListBox.py
|
1
|
from Tkinter import *
import ttk
root = Tk()
# Initialize our country "databases":
# - the list of country codes (a subset anyway)
# - a parallel list of country names, in the same order as the country codes
# - a hash table mapping country code to population<
countrycodes = ('ar', 'au', 'be', 'br', 'ca', 'cn', 'dk', 'fi', 'fr', 'gr', 'in', 'it', 'jp', 'mx', 'nl', 'no', 'es', 'se', 'ch')
countrynames = ('Argentina', 'Australia', 'Belgium', 'Brazil', 'Canada', 'China', 'Denmark', \
'Finland', 'France', 'Greece', 'India', 'Italy', 'Japan', 'Mexico', 'Netherlands', 'Norway', 'Spain', \
'Sweden', 'Switzerland')
cnames = StringVar(value=countrynames)
populations = {'ar':41000000, 'au':21179211, 'be':10584534, 'br':185971537, \
'ca':33148682, 'cn':1323128240, 'dk':5457415, 'fi':5302000, 'fr':64102140, 'gr':11147000, \
'in':1131043000, 'it':59206382, 'jp':127718000, 'mx':106535000, 'nl':16402414, \
'no':4738085, 'es':45116894, 'se':9174082, 'ch':7508700}
# Names of the gifts we can send
gifts = { 'card':'Greeting card', 'flowers':'Flowers', 'nastygram':'Nastygram'}
# State variables
gift = StringVar()
sentmsg = StringVar()
statusmsg = StringVar()
# Called when the selection in the listbox changes; figure out
# which country is currently selected, and then lookup its country
# code, and from that, its population. Update the status message
# with the new population. As well, clear the message about the
# gift being sent, so it doesn't stick around after we start doing
# other things.
def showPopulation(*args):
idxs = lbox.curselection()
if len(idxs)==1:
idx = int(idxs[0])
code = countrycodes[idx]
name = countrynames[idx]
popn = populations[code]
statusmsg.set("The population of %s (%s) is %d" % (name, code, popn))
sentmsg.set('')
# Called when the user double clicks an item in the listbox, presses
# the "Send Gift" button, or presses the Return key. In case the selected
# item is scrolled out of view, make sure it is visible.
#
# Figure out which country is selected, which gift is selected with the
# radiobuttons, "send the gift", and provide feedback that it was sent.
def sendGift(*args):
idxs = lbox.curselection()
if len(idxs)==1:
idx = int(idxs[0])
lbox.see(idx)
name = countrynames[idx]
# Gift sending left as an exercise to the reader
sentmsg.set("Sent %s to leader of %s" % (gifts[gift.get()], name))
# Create and grid the outer content frame
c = ttk.Frame(root, padding=(5, 5, 12, 0))
c.grid(column=0, row=0, sticky=(N,W,E,S))
root.grid_columnconfigure(0, weight=1)
root.grid_rowconfigure(0,weight=1)
# Create the different widgets; note the variables that many
# of them are bound to, as well as the button callback.
# Note we're using the StringVar() 'cnames', constructed from 'countrynames'
lbox = Listbox(c, listvariable=cnames, height=5)
lbl = ttk.Label(c, text="Send to country's leader:")
g1 = ttk.Radiobutton(c, text=gifts['card'], variable=gift, value='card');
g2 = ttk.Radiobutton(c, text=gifts['flowers'], variable=gift, value='flowers');
g3 = ttk.Radiobutton(c, text=gifts['nastygram'], variable=gift, value='nastygram');
send = ttk.Button(c, text='Send Gift', command=sendGift, default='active')
sentlbl = ttk.Label(c, textvariable=sentmsg, anchor='center');
status = ttk.Label(c, textvariable=statusmsg, anchor=W);
# Grid all the widgets
lbox.grid(column=0, row=0, rowspan=6, sticky=(N,S,E,W))
lbl.grid(column=1, row=0, padx=10, pady=5)
g1.grid(column=1, row=1, sticky=W, padx=20)
g2.grid(column=1, row=2, sticky=W, padx=20)
g3.grid(column=1, row=3, sticky=W, padx=20)
send.grid(column=2, row=4, sticky=E)
sentlbl.grid(column=1, row=5, columnspan=2, sticky=N, pady=5, padx=5)
status.grid(column=0, row=6, columnspan=2, sticky=(W,E))
c.grid_columnconfigure(0, weight=1)
c.grid_rowconfigure(5, weight=1)
# Set event bindings for when the selection in the listbox changes,
# when the user double clicks the list, and when they hit the Return key
lbox.bind('<<ListboxSelect>>', showPopulation)
lbox.bind('<Double-1>', sendGift)
root.bind('<Return>', sendGift)
# Colorize alternating lines of the listbox
for i in range(0,len(countrynames),2):
lbox.itemconfigure(i, background='#f0f0ff')
# Set the starting state of the interface, including selecting the
# default gift to send, and clearing the messages. Select the first
# country in the list; because the <<ListboxSelect>> event is only
# generated when the user makes a change, we explicitly call showPopulation.
gift.set('card')
sentmsg.set('')
statusmsg.set('')
lbox.selection_set(0)
showPopulation()
root.mainloop()
|
ucrcsedept/galah
|
refs/heads/master
|
galah/web/api/__init__.py
|
12133432
| |
rohitwaghchaure/erpnext_develop
|
refs/heads/develop
|
erpnext/education/doctype/assessment_plan_criteria/__init__.py
|
12133432
| |
fosfataza/protwis
|
refs/heads/master
|
construct/__init__.py
|
12133432
| |
cjgibson/hkvguqktacuranriagqecvebgwbjnlakvhaqytvtbyuvxt
|
refs/heads/master
|
48097711/052/i.py
|
12133432
| |
Vixionar/django
|
refs/heads/master
|
tests/file_storage/__init__.py
|
12133432
| |
mdanielwork/intellij-community
|
refs/heads/master
|
python/lib/Lib/site-packages/django/conf/locale/cy/__init__.py
|
12133432
| |
JustArchi/program-y
|
refs/heads/master
|
src/test/aiml_tests/response_tests/__init__.py
|
12133432
| |
rds0751/colinkers
|
refs/heads/master
|
env/Lib/site-packages/django/conf/locale/sl/__init__.py
|
12133432
| |
devcline/mtasa-blue
|
refs/heads/master
|
vendor/google-breakpad/src/tools/gyp/test/library/gyptest-shared.py
|
430
|
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simple build of a "Hello, world!" program with shared libraries,
including verifying that libraries are rebuilt correctly when functions
move between libraries.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('library.gyp',
'-Dlibrary=shared_library',
'-Dmoveable_function=lib1',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('library.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello from program.c
Hello from lib1.c
Hello from lib2.c
Hello from lib1_moveable.c
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.run_gyp('library.gyp',
'-Dlibrary=shared_library',
'-Dmoveable_function=lib2',
chdir='relocate/src')
# Update program.c to force a rebuild.
test.sleep()
contents = test.read('relocate/src/program.c')
contents = contents.replace('Hello', 'Hello again')
test.write('relocate/src/program.c', contents)
test.build('library.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello again from program.c
Hello from lib1.c
Hello from lib2.c
Hello from lib2_moveable.c
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.run_gyp('library.gyp',
'-Dlibrary=shared_library',
'-Dmoveable_function=lib1',
chdir='relocate/src')
# Update program.c to force a rebuild.
test.sleep()
contents = test.read('relocate/src/program.c')
contents = contents.replace('again', 'again again')
test.write('relocate/src/program.c', contents)
# TODO(sgk): we have to force a rebuild of lib2 so that it weeds out
# the "moved" module. This should be done in gyp by adding a dependency
# on the generated .vcproj file itself.
test.touch('relocate/src/lib2.c')
test.build('library.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello again again from program.c
Hello from lib1.c
Hello from lib2.c
Hello from lib1_moveable.c
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.pass_test()
|
Jusedawg/SickRage
|
refs/heads/develop
|
lib/sqlalchemy/testing/assertsql.py
|
78
|
# testing/assertsql.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from ..engine.default import DefaultDialect
from .. import util
import re
class AssertRule(object):
def process_execute(self, clauseelement, *multiparams, **params):
pass
def process_cursor_execute(self, statement, parameters, context,
executemany):
pass
def is_consumed(self):
"""Return True if this rule has been consumed, False if not.
Should raise an AssertionError if this rule's condition has
definitely failed.
"""
raise NotImplementedError()
def rule_passed(self):
"""Return True if the last test of this rule passed, False if
failed, None if no test was applied."""
raise NotImplementedError()
def consume_final(self):
"""Return True if this rule has been consumed.
Should raise an AssertionError if this rule's condition has not
been consumed or has failed.
"""
if self._result is None:
assert False, 'Rule has not been consumed'
return self.is_consumed()
class SQLMatchRule(AssertRule):
def __init__(self):
self._result = None
self._errmsg = ""
def rule_passed(self):
return self._result
def is_consumed(self):
if self._result is None:
return False
assert self._result, self._errmsg
return True
class ExactSQL(SQLMatchRule):
def __init__(self, sql, params=None):
SQLMatchRule.__init__(self)
self.sql = sql
self.params = params
def process_cursor_execute(self, statement, parameters, context,
executemany):
if not context:
return
_received_statement = \
_process_engine_statement(context.unicode_statement,
context)
_received_parameters = context.compiled_parameters
# TODO: remove this step once all unit tests are migrated, as
# ExactSQL should really be *exact* SQL
sql = _process_assertion_statement(self.sql, context)
equivalent = _received_statement == sql
if self.params:
if util.callable(self.params):
params = self.params(context)
else:
params = self.params
if not isinstance(params, list):
params = [params]
equivalent = equivalent and params \
== context.compiled_parameters
else:
params = {}
self._result = equivalent
if not self._result:
self._errmsg = \
'Testing for exact statement %r exact params %r, '\
'received %r with params %r' % (sql, params,
_received_statement, _received_parameters)
class RegexSQL(SQLMatchRule):
def __init__(self, regex, params=None):
SQLMatchRule.__init__(self)
self.regex = re.compile(regex)
self.orig_regex = regex
self.params = params
def process_cursor_execute(self, statement, parameters, context,
executemany):
if not context:
return
_received_statement = \
_process_engine_statement(context.unicode_statement,
context)
_received_parameters = context.compiled_parameters
equivalent = bool(self.regex.match(_received_statement))
if self.params:
if util.callable(self.params):
params = self.params(context)
else:
params = self.params
if not isinstance(params, list):
params = [params]
# do a positive compare only
for param, received in zip(params, _received_parameters):
for k, v in param.items():
if k not in received or received[k] != v:
equivalent = False
break
else:
params = {}
self._result = equivalent
if not self._result:
self._errmsg = \
'Testing for regex %r partial params %r, received %r '\
'with params %r' % (self.orig_regex, params,
_received_statement,
_received_parameters)
class CompiledSQL(SQLMatchRule):
def __init__(self, statement, params=None):
SQLMatchRule.__init__(self)
self.statement = statement
self.params = params
def process_cursor_execute(self, statement, parameters, context,
executemany):
if not context:
return
from sqlalchemy.schema import _DDLCompiles
_received_parameters = list(context.compiled_parameters)
# recompile from the context, using the default dialect
if isinstance(context.compiled.statement, _DDLCompiles):
compiled = \
context.compiled.statement.compile(dialect=DefaultDialect())
else:
compiled = \
context.compiled.statement.compile(dialect=DefaultDialect(),
column_keys=context.compiled.column_keys)
_received_statement = re.sub(r'[\n\t]', '', str(compiled))
equivalent = self.statement == _received_statement
if self.params:
if util.callable(self.params):
params = self.params(context)
else:
params = self.params
if not isinstance(params, list):
params = [params]
else:
params = list(params)
all_params = list(params)
all_received = list(_received_parameters)
while params:
param = dict(params.pop(0))
for k, v in context.compiled.params.items():
param.setdefault(k, v)
if param not in _received_parameters:
equivalent = False
break
else:
_received_parameters.remove(param)
if _received_parameters:
equivalent = False
else:
params = {}
all_params = {}
all_received = []
self._result = equivalent
if not self._result:
print('Testing for compiled statement %r partial params '\
'%r, received %r with params %r' % (self.statement,
all_params, _received_statement, all_received))
self._errmsg = \
'Testing for compiled statement %r partial params %r, '\
'received %r with params %r' % (self.statement,
all_params, _received_statement, all_received)
# print self._errmsg
class CountStatements(AssertRule):
def __init__(self, count):
self.count = count
self._statement_count = 0
def process_execute(self, clauseelement, *multiparams, **params):
self._statement_count += 1
def process_cursor_execute(self, statement, parameters, context,
executemany):
pass
def is_consumed(self):
return False
def consume_final(self):
assert self.count == self._statement_count, \
'desired statement count %d does not match %d' \
% (self.count, self._statement_count)
return True
class AllOf(AssertRule):
def __init__(self, *rules):
self.rules = set(rules)
def process_execute(self, clauseelement, *multiparams, **params):
for rule in self.rules:
rule.process_execute(clauseelement, *multiparams, **params)
def process_cursor_execute(self, statement, parameters, context,
executemany):
for rule in self.rules:
rule.process_cursor_execute(statement, parameters, context,
executemany)
def is_consumed(self):
if not self.rules:
return True
for rule in list(self.rules):
if rule.rule_passed(): # a rule passed, move on
self.rules.remove(rule)
return len(self.rules) == 0
assert False, 'No assertion rules were satisfied for statement'
def consume_final(self):
return len(self.rules) == 0
def _process_engine_statement(query, context):
if util.jython:
# oracle+zxjdbc passes a PyStatement when returning into
query = str(query)
if context.engine.name == 'mssql' \
and query.endswith('; select scope_identity()'):
query = query[:-25]
query = re.sub(r'\n', '', query)
return query
def _process_assertion_statement(query, context):
paramstyle = context.dialect.paramstyle
if paramstyle == 'named':
pass
elif paramstyle == 'pyformat':
query = re.sub(r':([\w_]+)', r"%(\1)s", query)
else:
# positional params
repl = None
if paramstyle == 'qmark':
repl = "?"
elif paramstyle == 'format':
repl = r"%s"
elif paramstyle == 'numeric':
repl = None
query = re.sub(r':([\w_]+)', repl, query)
return query
class SQLAssert(object):
rules = None
def add_rules(self, rules):
self.rules = list(rules)
def statement_complete(self):
for rule in self.rules:
if not rule.consume_final():
assert False, \
'All statements are complete, but pending '\
'assertion rules remain'
def clear_rules(self):
del self.rules
def execute(self, conn, clauseelement, multiparams, params, result):
if self.rules is not None:
if not self.rules:
assert False, \
'All rules have been exhausted, but further '\
'statements remain'
rule = self.rules[0]
rule.process_execute(clauseelement, *multiparams, **params)
if rule.is_consumed():
self.rules.pop(0)
def cursor_execute(self, conn, cursor, statement, parameters,
context, executemany):
if self.rules:
rule = self.rules[0]
rule.process_cursor_execute(statement, parameters, context,
executemany)
asserter = SQLAssert()
|
ebukoz/thrive
|
refs/heads/develop
|
erpnext/patches/v8_1/set_delivery_date_in_so_item.py
|
11
|
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doctype("Sales Order")
frappe.reload_doctype("Sales Order Item")
if "final_delivery_date" in frappe.db.get_table_columns("Sales Order"):
frappe.db.sql("""
update `tabSales Order`
set delivery_date = final_delivery_date
where (delivery_date is null or delivery_date = '0000-00-00')
and order_type = 'Sales'""")
frappe.db.sql("""
update `tabSales Order` so, `tabSales Order Item` so_item
set so_item.delivery_date = so.delivery_date
where so.name = so_item.parent
and so.order_type = 'Sales'
and (so_item.delivery_date is null or so_item.delivery_date = '0000-00-00')
and (so.delivery_date is not null and so.delivery_date != '0000-00-00')
""")
|
acshi/osf.io
|
refs/heads/develop
|
admin_tests/base/test_forms.py
|
41
|
from nose.tools import * # flake8: noqa
from tests.base import AdminTestCase
from admin.base.forms import GuidForm
class TestGuidForm(AdminTestCase):
def setUp(self):
super(TestGuidForm, self).setUp()
def test_valid_data(self):
guid = '12345'
form = GuidForm({
'guid': guid,
})
assert_true(form.is_valid())
assert_equal(form.cleaned_data.get('guid'), guid)
def test_blank_data(self):
form = GuidForm({})
assert_false(form.is_valid())
assert_equal(form.errors, {
'guid': [u'This field is required.'],
})
|
schmidtc/pysal
|
refs/heads/master
|
pysal/contrib/handler/tests/test_ols_sparse.py
|
1
|
import unittest
import numpy as np
import pysal
#import pysal.spreg as EC
from scipy import sparse
from pysal.contrib.handler import Model
from functools import partial
OLS = partial(Model, mtype='OLS')
BaseOLS = partial(Model, mtype='BaseOLS')
PEGP = pysal.examples.get_path
class TestBaseOLS(unittest.TestCase):
def setUp(self):
db = pysal.open(PEGP('columbus.dbf'),'r')
y = np.array(db.by_col("HOVAL"))
self.y = np.reshape(y, (49,1))
X = []
X.append(db.by_col("INC"))
X.append(db.by_col("CRIME"))
self.X = np.array(X).T
self.w = pysal.weights.rook_from_shapefile(PEGP("columbus.shp"))
def test_ols(self):
self.X = np.hstack((np.ones(self.y.shape),self.X))
self.X = sparse.csr_matrix(self.X)
ols = BaseOLS(self.y,self.X)
np.testing.assert_array_almost_equal(ols.betas, np.array([[
46.42818268], [ 0.62898397], [ -0.48488854]]))
vm = np.array([[ 1.74022453e+02, -6.52060364e+00, -2.15109867e+00],
[ -6.52060364e+00, 2.87200008e-01, 6.80956787e-02],
[ -2.15109867e+00, 6.80956787e-02, 3.33693910e-02]])
np.testing.assert_array_almost_equal(ols.vm, vm,6)
def test_OLS(self):
self.X = sparse.csr_matrix(self.X)
ols = OLS(self.y, self.X, self.w, spat_diag=True, moran=True, \
name_y='home value', name_x=['income','crime'], \
name_ds='columbus', nonspat_diag=True, white_test=True)
np.testing.assert_array_almost_equal(ols.aic, \
408.73548964604873 ,7)
np.testing.assert_array_almost_equal(ols.ar2, \
0.32123239427957662 ,7)
np.testing.assert_array_almost_equal(ols.betas, \
np.array([[ 46.42818268], [ 0.62898397], \
[ -0.48488854]]), 7)
bp = np.array([2, 5.7667905131212587, 0.05594449410070558])
ols_bp = np.array([ols.breusch_pagan['df'], ols.breusch_pagan['bp'], ols.breusch_pagan['pvalue']])
np.testing.assert_array_almost_equal(bp, ols_bp, 7)
np.testing.assert_array_almost_equal(ols.f_stat, \
(12.358198885356581, 5.0636903313953024e-05), 7)
jb = np.array([2, 39.706155069114878, 2.387360356860208e-09])
ols_jb = np.array([ols.jarque_bera['df'], ols.jarque_bera['jb'], ols.jarque_bera['pvalue']])
np.testing.assert_array_almost_equal(ols_jb,jb, 7)
white = np.array([5, 2.90606708, 0.71446484])
ols_white = np.array([ols.white['df'], ols.white['wh'], ols.white['pvalue']])
np.testing.assert_array_almost_equal(ols_white,white, 7)
np.testing.assert_equal(ols.k, 3)
kb = {'df': 2, 'kb': 2.2700383871478675, 'pvalue': 0.32141595215434604}
for key in kb:
self.assertAlmostEqual(ols.koenker_bassett[key], kb[key], 7)
np.testing.assert_array_almost_equal(ols.lm_error, \
(4.1508117035117893, 0.041614570655392716),7)
np.testing.assert_array_almost_equal(ols.lm_lag, \
(0.98279980617162233, 0.32150855529063727), 7)
np.testing.assert_array_almost_equal(ols.lm_sarma, \
(4.3222725729143736, 0.11519415308749938), 7)
np.testing.assert_array_almost_equal(ols.logll, \
-201.3677448230244 ,7)
np.testing.assert_array_almost_equal(ols.mean_y, \
38.436224469387746,7)
np.testing.assert_array_almost_equal(ols.moran_res[0], \
0.20373540938,7)
np.testing.assert_array_almost_equal(ols.moran_res[1], \
2.59180452208,7)
np.testing.assert_array_almost_equal(ols.moran_res[2], \
0.00954740031251,7)
np.testing.assert_array_almost_equal(ols.mulColli, \
12.537554873824675 ,7)
np.testing.assert_equal(ols.n, 49)
np.testing.assert_equal(ols.name_ds, 'columbus')
np.testing.assert_equal(ols.name_gwk, None)
np.testing.assert_equal(ols.name_w, 'unknown')
np.testing.assert_equal(ols.name_x, ['CONSTANT', 'income', 'crime'])
np.testing.assert_equal(ols.name_y, 'home value')
np.testing.assert_array_almost_equal(ols.predy[3], np.array([
33.53969014]),7)
np.testing.assert_array_almost_equal(ols.r2, \
0.34951437785126105 ,7)
np.testing.assert_array_almost_equal(ols.rlm_error, \
(3.3394727667427513, 0.067636278225568919),7)
np.testing.assert_array_almost_equal(ols.rlm_lag, \
(0.17146086940258459, 0.67881673703455414), 7)
np.testing.assert_equal(ols.robust, 'unadjusted')
np.testing.assert_array_almost_equal(ols.schwarz, \
414.41095054038061,7 )
np.testing.assert_array_almost_equal(ols.sig2, \
231.4568494392652,7 )
np.testing.assert_array_almost_equal(ols.sig2ML, \
217.28602192257551,7 )
np.testing.assert_array_almost_equal(ols.sig2n, \
217.28602192257551, 7)
np.testing.assert_array_almost_equal(ols.t_stat[2][0], \
-2.65440864272,7)
np.testing.assert_array_almost_equal(ols.t_stat[2][1], \
0.0108745049098,7)
if __name__ == '__main__':
unittest.main()
|
osvalr/odoo
|
refs/heads/8.0
|
addons/l10n_in_hr_payroll/report/__init__.py
|
424
|
#-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import report_payslip_details
import report_payroll_advice
import report_hr_salary_employee_bymonth
import payment_advice_report
import report_hr_yearly_salary_detail
import payslip_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
prospwro/odoo
|
refs/heads/8.0
|
addons/account_anglo_saxon/purchase.py
|
427
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class purchase_order(osv.osv):
_name = "purchase.order"
_inherit = "purchase.order"
_description = "Purchase Order"
def _choose_account_from_po_line(self, cr, uid, order_line, context=None):
account_id = super(purchase_order, self)._choose_account_from_po_line(cr, uid, order_line, context=context)
if order_line.product_id and not order_line.product_id.type == 'service':
acc_id = order_line.product_id.property_stock_account_input and order_line.product_id.property_stock_account_input.id
if not acc_id:
acc_id = order_line.product_id.categ_id.property_stock_account_input_categ and order_line.product_id.categ_id.property_stock_account_input_categ.id
if acc_id:
fpos = order_line.order_id.fiscal_position or False
account_id = self.pool.get('account.fiscal.position').map_account(cr, uid, fpos, acc_id)
return account_id
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
petecummings/django
|
refs/heads/master
|
tests/datetimes/models.py
|
245
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Article(models.Model):
title = models.CharField(max_length=100)
pub_date = models.DateTimeField()
categories = models.ManyToManyField("Category", related_name="articles")
def __str__(self):
return self.title
@python_2_unicode_compatible
class Comment(models.Model):
article = models.ForeignKey(Article, models.CASCADE, related_name="comments")
text = models.TextField()
pub_date = models.DateTimeField()
approval_date = models.DateTimeField(null=True)
def __str__(self):
return 'Comment to %s (%s)' % (self.article.title, self.pub_date)
class Category(models.Model):
name = models.CharField(max_length=255)
|
lixiangning888/whole_project
|
refs/heads/master
|
modules/signatures/antiemu_wine.py
|
3
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Claudio "nex" Guarnieri (@botherder)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from lib.cuckoo.common.abstracts import Signature
class WineDetectReg(Signature):
name = "antiemu_wine_reg"
description = "通过注册表键检测是否存在Wine模拟器"
severity = 3
categories = ["anti-emulation"]
authors = ["nex"]
minimum = "0.5"
def run(self):
return self.check_key(pattern="HKEY_CURRENT_USER\\Software\\Wine")
|
ashishdeshpande/robotframework
|
refs/heads/master
|
atest/testdata/test_libraries/InitLogging.py
|
26
|
import sys
from robot.api import logger
class InitLogging:
called = 0
def __init__(self):
InitLogging.called += 1
print '*WARN* Warning via stdout in init', self.called
print >> sys.stderr, 'Info via stderr in init', self.called
logger.warn('Warning via API in init %d' % self.called)
def keyword(self):
pass
|
sikuli/sikuli
|
refs/heads/develop
|
sikuli-script/src/test/python/old/test-wait.py
|
3
|
# Copyright 2010-2011, Sikuli.org
# Released under the MIT License.
from __future__ import with_statement
from sikuli.Sikuli import *
print wait("test-res/apple.png",0)
assert( waitVanish("test-res/about-this-mac.png", 0) == True)
with Region(10, 0, 200,200) as r:
p = Pattern("test-res/apple.png").targetOffset(30,5)
r.setAutoWaitTimeout(1)
m = r.wait(p)
print m
print "center: " + str(r.getLastMatch().getCenter())
print "target(+30,+5): " + str(r.getLastMatch().getTarget())
print "click 1"
click(m)
wait(1)
click(m)
apple = find(Pattern("test-res/apple.png").similar(0.96))
click(apple)
wait(1)
p = Pattern("test-res/about-this-mac.png").similar(0.8)
assert( find(p) != None)
wait(1)
click(apple)
assert( waitVanish(p, 2) == True)
apple= Pattern("test-res/apple.png").similar(0.96)
print "ctrl-click below the apple icon"
apple.targetOffset(0,30)
click(apple, KEY_CTRL)
|
fhennecker/semiteleporter
|
refs/heads/master
|
research/triangulation_4/filter.py
|
1
|
from sys import argv
import cv2
import numpy as np
RedMask = np.array([[[0, 0, 1]]])
def calibrationMask(img_with, img_without):
res = substract(img_with, img_without)
res = filterNoise(res)
return (res-1)/255
def substract(image_with_lasers, image_without_lasers):
"""
Substract the image without lasers from the one with lasers.
@param image_with_lasers A (heigh, width, 3) shaped NumPy array
@param image_without_lasers A (heigh, width, 3) shaped NumPy array
"""
assert image_with_lasers.shape == image_without_lasers.shape
assert len(image_with_lasers.shape) == 3
assert image_with_lasers.shape[2] == 3
global RedMask
if(RedMask.shape != image_with_lasers.shape):
RedMask = np.array(image_with_lasers.shape, dtype=np.int16)
RedMask[:] = [0, 0, 1]
#RedMask = np.zeros(image_with_lasers.shape, dtype=np.int16)
#RedMask[270:800,730:1120] = [0, 0, 1]
res = np.array(image_with_lasers*RedMask - image_without_lasers*RedMask, dtype=np.int16)
return np.array(res.clip(0), dtype=np.uint8)
def filterNoise(img):
"""Apply filters to remove lonesome points"""
img = cv2.GaussianBlur(img,(5,5),0)
ret, img = cv2.threshold(img, 27, 255, cv2.THRESH_TOZERO)
return img
def massCenter(img, limit=None, output=None):
"""
Search mass center of the red color by line in each area delimited by limit
"""
height,x,y = 0,0,0
parts = []
res = [[],[]]
if(limit == None):
height = img.shape[0]
else:
height = len(limit)
for line in range(height):
if(limit == None):
x,y = img.shape[1], line
parts = [img[y,:,2]]
else:
x,y = limit[line]
parts = [img[y,:x,2], img[y,x:,2]]
for side in range(len(parts)):
moments = cv2.moments(parts[side])
if(moments['m00'] != 0):
point = [round(moments['m01']/moments['m00']+x*side), y]
res[side].append(point)
if (output != None):
output[point[1]][point[0]] = np.array([0,255,0], dtype=np.uint8)
return res[0]+res[1]
def linearRegression(points, output=None):
"""
Apply linear regression on all points
"""
x,y = np.array(points).T
param = np.linalg.lstsq(np.array([y, np.ones(y.shape)]).T, x)[0]
line = np.array([param[0]*y+param[1],y]).T
if output != None:
for x,y in line:
output[y][x] = np.array([255,0,0], dtype=np.uint8)
return line
def display(img, title):
"""Show results (demo)"""
cv2.imshow(title, cv2.resize(img, (640, 360), interpolation=cv2.INTER_AREA))
cv2.waitKey(0)
def findPoints(_with, without):
img = filterNoise(substract(_with, without))
points = massCenter(img, None, img)
points = linearRegression(points, img)
return massCenter(img, points, img)
if(__name__ == "__main__"):
import matplotlib.pyplot as plt
wi, wo = cv2.imread(argv[1]), cv2.imread(argv[2])
display(wi, "Avec lasers")
display(wo, "Sans lasers")
img = substract(wi, wo)
display(img, "Substraction result")
img = filterNoise(img)
display(img, "soft filter to delete noise")
points = massCenter(img, None, img)
display(img,"First mass center step")
#points = linearRegression(points, img)
#display(img, "linear regression result")
#wi *= 0.25
#points = massCenter(img, points, wi)
#display(wi,"Second mass center step to fit lasers")
X, Y = map(np.array, zip(*points))
plt.scatter(X, -Y)
plt.xlim(0, 1920)
plt.ylim(-1080, 0)
plt.show()
|
jk1/intellij-community
|
refs/heads/master
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_0/__init__.py
|
30
|
from ._pkg0_0_0_0 import *
from ._pkg0_0_0_1 import *
|
sublime1809/django
|
refs/heads/master
|
django/utils/itercompat.py
|
712
|
"""
Providing iterator functions that are not in all version of Python we support.
Where possible, we try to use the system-native version and only fall back to
these implementations if necessary.
"""
def is_iterable(x):
"A implementation independent way of checking for iterables"
try:
iter(x)
except TypeError:
return False
else:
return True
|
Emaasit/PredictionIO
|
refs/heads/develop
|
examples/scala-parallel-similarproduct/add-rateevent/data/send_query.py
|
283
|
"""
Send sample query to prediction engine
"""
import predictionio
engine_client = predictionio.EngineClient(url="http://localhost:8000")
print engine_client.send_query({"items": ["i1", "i3"], "num": 4})
|
Zackory/CopterLab
|
refs/heads/Basics
|
lib/cflib/crazyflie/log.py
|
2
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Enableds logging of variables from the Crazyflie.
When a Crazyflie is connected it's possible to download a TableOfContent of all
the variables that can be logged. Using this it's possible to add logging
configurations where selected variables are sent to the client at a
specified period.
"""
__author__ = 'Bitcraze AB'
__all__ = ['Log', 'LogTocElement']
import struct
from cflib.crtp.crtpstack import CRTPPacket, CRTPPort
from cflib.utils.callbacks import Caller
from .toc import Toc, TocFetcher
# Channels used for the logging port
CHAN_TOC = 0
CHAN_SETTINGS = 1
CHAN_LOGDATA = 2
# Commands used when accessing the Table of Contents
CMD_TOC_ELEMENT = 0
CMD_TOC_INFO = 1
# Commands used when accessing the Log configurations
CMD_CREATE_BLOCK = 0
CMD_APPEND_BLOCK = 1
CMD_DELETE_BLOCK = 2
CMD_START_LOGGING = 3
CMD_STOP_LOGGING = 4
CMD_RESET_LOGGING = 5
# Possible states when receiving TOC
IDLE = "IDLE"
GET_TOC_INF = "GET_TOC_INFO"
GET_TOC_ELEMENT = "GET_TOC_ELEMENT"
# The max size of a CRTP packet payload
MAX_LOG_DATA_PACKET_SIZE = 30
import logging
logger = logging.getLogger(__name__)
class LogEntry:
blockIdCounter = 1
def __init__(self, crazyflie, logconf):
self.dataReceived = Caller()
self.error = Caller()
self.logconf = logconf
self.blockId = LogEntry.blockIdCounter
LogEntry.blockIdCounter += 1
self.cf = crazyflie
self.period = logconf.getPeriod() / 10
self.blockCreated = False
def start(self):
if (self.cf.link is not None):
if (self.blockCreated is False):
logger.debug("First time block is started, add block")
self.blockCreated = True
pk = CRTPPacket()
pk.set_header(5, CHAN_SETTINGS)
pk.data = (CMD_CREATE_BLOCK, self.blockId)
for v in self.logconf.getVariables():
if (v.isTocVariable() is False): # Memory location
logger.debug("Logging to raw memory %d, 0x%04X",
v.getStoredFetchAs(), v.getAddress())
pk.data += struct.pack('<B', v.getStoredFetchAs())
pk.data += struct.pack('<I', v.getAddress())
else: # Item in TOC
logger.debug("Adding %s with id=%d and type=0x%02X",
v.getName(),
self.cf.log.toc.get_element_id(
v.getName()), v.getStoredFetchAs())
pk.data += struct.pack('<B', v.getStoredFetchAs())
pk.data += struct.pack('<B', self.cf.log.toc.
get_element_id(v.getName()))
logger.debug("Adding log block id {}".format(self.blockId))
self.cf.send_packet(pk)
else:
logger.debug("Block already registered, starting logging"
" for %d", self.blockId)
pk = CRTPPacket()
pk.set_header(5, CHAN_SETTINGS)
pk.data = (CMD_START_LOGGING, self.blockId, self.period)
self.cf.send_packet(pk)
def stop(self):
if (self.cf.link is not None):
if (self.blockId is None):
logger.warning("Stopping block, but no block registered")
else:
logger.debug("Sending stop logging for block %d", self.blockId)
pk = CRTPPacket()
pk.set_header(5, CHAN_SETTINGS)
pk.data = (CMD_STOP_LOGGING, self.blockId)
self.cf.send_packet(pk)
def close(self):
if (self.cf.link is not None):
if (self.blockId is None):
logger.warning("Delete block, but no block registered")
else:
logger.debug("LogEntry: Sending delete logging for block %d"
% self.blockId)
pk = CRTPPacket()
pk.set_header(5, CHAN_SETTINGS)
pk.data = (CMD_DELETE_BLOCK, self.blockId)
self.cf.send_packet(pk)
self.blockId = None # Wait until we get confirmation of delete
def unpack_log_data(self, logData):
retData = {}
dataIndex = 0
for v in self.logconf.getVariables():
size = LogTocElement.get_size_from_id(v.getFetchAs())
name = v.getName()
unpackstring = LogTocElement.get_unpack_string_from_id(
v.getFetchAs())
value = struct.unpack(unpackstring,
logData[dataIndex:dataIndex + size])[0]
dataIndex += size
retData[name] = value
self.dataReceived.call(retData)
class LogTocElement:
"""An element in the Log TOC."""
types = {0x01: ("uint8_t", '<B', 1),
0x02: ("uint16_t", '<H', 2),
0x03: ("uint32_t", '<L', 4),
0x04: ("int8_t", '<b', 1),
0x05: ("int16_t", '<h', 2),
0x06: ("int32_t", '<i', 4),
0x08: ("FP16", '<h', 2),
0x07: ("float", '<f', 4)}
@staticmethod
def get_id_from_cstring(s):
"""Return variable type id given the C-storage name"""
for t in LogTocElement.types.keys():
if (LogTocElement.types[t][0] == s):
return t
raise KeyError("Type [%s] not found in LogTocElement.types!" % s)
@staticmethod
def get_cstring_from_id(ident):
"""Return the C-storage name given the variable type id"""
try:
return LogTocElement.types[ident][0]
except KeyError:
raise KeyError("Type [%d] not found in LogTocElement.types"
"!" % ident)
@staticmethod
def get_size_from_id(ident):
"""Return the size in bytes given the variable type id"""
try:
return LogTocElement.types[ident][2]
except KeyError:
raise KeyError("Type [%d] not found in LogTocElement.types"
"!" % ident)
@staticmethod
def get_unpack_string_from_id(ident):
"""Return the Python unpack string given the variable type id"""
try:
return LogTocElement.types[ident][1]
except KeyError:
raise KeyError("Type [%d] not found in LogTocElement.types"
"!" % ident)
def __init__(self, data=None):
"""TocElement creator. Data is the binary payload of the element."""
if (data):
strs = struct.unpack("s" * len(data[2:]), data[2:])
strs = ("{}" * len(strs)).format(*strs).split("\0")
self.group = strs[0]
self.name = strs[1]
self.ident = ord(data[0])
self.ctype = LogTocElement.get_cstring_from_id(ord(data[1]))
self.pytype = LogTocElement.get_unpack_string_from_id(ord(data[1]))
self.access = ord(data[1]) & 0x10
class Log():
"""Create log configuration"""
def __init__(self, crazyflie=None):
self.logBlocks = []
self.cf = crazyflie
self.cf.add_port_callback(CRTPPort.LOGGING, self._new_packet_cb)
self.tocUpdated = Caller()
self.state = IDLE
self.fakeTOCCRC = 0xBABEBABA
def create_log_packet(self, logconf):
"""Create a new log configuration"""
size = 0
period = logconf.getPeriod() / 10
for v in logconf.getVariables():
size += LogTocElement.get_size_from_id(v.getFetchAs())
# Check that we are able to find the variable in the TOC so
# we can return error already now and not when the config is sent
if (v.isTocVariable()):
if (self.toc.get_element_by_complete_name(
v.getName()) is None):
logger.warning("Log: %s not in TOC, this block cannot be"
" used!", v.getName())
return None
if (size <= MAX_LOG_DATA_PACKET_SIZE and period > 0 and period < 0xFF):
block = LogEntry(self.cf, logconf)
self.logBlocks.append(block)
return block
else:
return None
def refresh_toc(self, refreshDoneCallback, toc_cache):
pk = CRTPPacket()
pk.set_header(CRTPPort.LOGGING, CHAN_SETTINGS)
pk.data = (CMD_RESET_LOGGING, )
self.cf.send_packet(pk)
self.toc = Toc()
tocFetcher = TocFetcher(self.cf, LogTocElement, CRTPPort.LOGGING,
self.toc, refreshDoneCallback, toc_cache)
tocFetcher.start()
def _new_packet_cb(self, packet):
chan = packet.channel
cmd = packet.datal[0]
payload = struct.pack("B" * (len(packet.datal) - 1), *packet.datal[1:])
if (chan == CHAN_SETTINGS):
newBlockId = ord(payload[0])
errorStatus = ord(payload[1])
if (cmd == CMD_CREATE_BLOCK):
block = None
for b in self.logBlocks:
if (b.blockId == newBlockId):
block = b
if (block is not None):
if (errorStatus == 0): # No error
logger.debug("Have successfully added blockId=%d",
newBlockId)
pk = CRTPPacket()
pk.set_header(5, CHAN_SETTINGS)
pk.data = (CMD_START_LOGGING, newBlockId, block.period)
self.cf.send_packet(pk)
else:
logger.warning("Error when adding blockId=%d, should"
" tell listenders...", newBlockId)
else:
logger.warning("No LogEntry to assign block to !!!")
if (cmd == CMD_START_LOGGING):
if (errorStatus == 0x00):
logger.info("Have successfully logging for block=%d",
newBlockId)
else:
logger.warning("Error=%d when starting logging for "
"block=%d", errorStatus, newBlockId)
if (chan == CHAN_LOGDATA):
chan = packet.channel
blockId = ord(packet.data[0])
# timestamp = packet.data[0:4] # Not currently used
logdata = packet.data[4:]
block = None
for b in self.logBlocks:
if (b.blockId == blockId):
block = b
if (block is not None):
block.unpack_log_data(logdata)
else:
logger.warning("Error no LogEntry to handle block=%d", blockId)
|
Therp/odoo
|
refs/heads/8.0
|
addons/l10n_ve/__openerp__.py
|
260
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
##############################################################################
# Module programed and financed by:
# Vauxoo, C.A. (<http://vauxoo.com>).
# Our Community team mantain this module:
# https://launchpad.net/~openerp-venezuela
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Venezuela - Accounting',
'version': '1.0',
'author': ['OpenERP SA', 'Vauxoo'],
'category': 'Localization/Account Charts',
'description':
"""
Chart of Account for Venezuela.
===============================
Venezuela doesn't have any chart of account by law, but the default
proposed in OpenERP should comply with some Accepted best practices in Venezuela,
this plan comply with this practices.
This module has been tested as base for more of 1000 companies, because
it is based in a mixtures of most common software in the Venezuelan
market what will allow for sure to accountants feel them first steps with
OpenERP more confortable.
This module doesn't pretend be the total localization for Venezuela,
but it will help you to start really quickly with OpenERP in this country.
This module give you.
---------------------
- Basic taxes for Venezuela.
- Have basic data to run tests with community localization.
- Start a company from 0 if your needs are basic from an accounting PoV.
We recomend install account_anglo_saxon if you want valued your
stocks as Venezuela does with out invoices.
If you install this module, and select Custom chart a basic chart will be proposed,
but you will need set manually account defaults for taxes.
""",
'depends': ['account',
'base_vat',
'account_chart'
],
'demo': [],
'data': ['data/account_tax_code.xml',
'data/account_user_types.xml',
'data/account_chart.xml',
'data/account_tax.xml',
'data/l10n_chart_ve_wizard.xml'
],
'auto_install': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
paulnovo/yaml-cpp
|
refs/heads/master
|
test/gmock-1.7.0/gtest/test/gtest_uninitialized_test.py
|
2901
|
#!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test warns the user when not initialized properly."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import gtest_test_utils
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_uninitialized_test_')
def Assert(condition):
if not condition:
raise AssertionError
def AssertEq(expected, actual):
if expected != actual:
print 'Expected: %s' % (expected,)
print ' Actual: %s' % (actual,)
raise AssertionError
def TestExitCodeAndOutput(command):
"""Runs the given command and verifies its exit code and output."""
# Verifies that 'command' exits with code 1.
p = gtest_test_utils.Subprocess(command)
Assert(p.exited)
AssertEq(1, p.exit_code)
Assert('InitGoogleTest' in p.output)
class GTestUninitializedTest(gtest_test_utils.TestCase):
def testExitCodeAndOutput(self):
TestExitCodeAndOutput(COMMAND)
if __name__ == '__main__':
gtest_test_utils.Main()
|
atzengin/OCC
|
refs/heads/master
|
occ/gui/PropsDialog.py
|
1
|
"""
Copyright 2007, 2008, 2009 Free Software Foundation, Inc.
This file is part of GNU Radio
OpenCV Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
OpenCV Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import pygtk
pygtk.require('2.0')
import gtk
from Dialogs import TextDisplay
from Constants import MIN_DIALOG_WIDTH, MIN_DIALOG_HEIGHT
def get_title_label(title):
"""
Get a title label for the params window.
The title will be bold, underlined, and left justified.
Args:
title: the text of the title
Returns:
a gtk object
"""
label = gtk.Label()
label.set_markup('\n<b><span underline="low">%s</span>:</b>\n'%title)
hbox = gtk.HBox()
hbox.pack_start(label, False, False, padding=11)
return hbox
class PropsDialog(gtk.Dialog):
"""
A dialog to set block parameters, view errors, and view documentation.
"""
def __init__(self, block):
"""
Properties dialog contructor.
Args:
block: a block instance
"""
self._hash = 0
LABEL_SPACING = 7
gtk.Dialog.__init__(self,
title='Properties: %s'%block.get_name(),
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT, gtk.STOCK_OK, gtk.RESPONSE_ACCEPT),
)
self._block = block
self.set_size_request(MIN_DIALOG_WIDTH, MIN_DIALOG_HEIGHT)
vbox = gtk.VBox()
#Create the scrolled window to hold all the parameters
scrolled_window = gtk.ScrolledWindow()
scrolled_window.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
scrolled_window.add_with_viewport(vbox)
self.vbox.pack_start(scrolled_window, True)
#Params box for block parameters
self._params_box = gtk.VBox()
self._params_box.pack_start(get_title_label('Parameters'), False)
self._input_object_params = list()
#Error Messages for the block
self._error_box = gtk.VBox()
self._error_messages_text_display = TextDisplay()
self._error_box.pack_start(gtk.Label(), False, False, LABEL_SPACING)
self._error_box.pack_start(get_title_label('Error Messages'), False)
self._error_box.pack_start(self._error_messages_text_display, False)
#Docs for the block
self._docs_box = err_box = gtk.VBox()
self._docs_text_display = TextDisplay()
self._docs_box.pack_start(gtk.Label(), False, False, LABEL_SPACING)
self._docs_box.pack_start(get_title_label('Documentation'), False)
self._docs_box.pack_start(self._docs_text_display, False)
#Add the boxes
vbox.pack_start(self._params_box, False)
vbox.pack_start(self._error_box, False)
vbox.pack_start(self._docs_box, False)
#connect events
self.connect('key-press-event', self._handle_key_press)
self.connect('show', self._update_gui)
#show all (performs initial gui update)
self.show_all()
def _params_changed(self):
"""
Have the params in this dialog changed?
Ex: Added, removed, type change, hide change...
To the props dialog, the hide setting of 'none' and 'part' are identical.
Therfore, the props dialog only cares if the hide setting is/not 'all'.
Make a hash that uniquely represents the params' state.
Returns:
true if changed
"""
old_hash = self._hash
#create a tuple of things from each param that affects the params box
self._hash = hash(tuple([(
hash(param), param.get_type(), param.get_hide() == 'all',
) for param in self._block.get_params()]))
return self._hash != old_hash
def _handle_changed(self, *args):
"""
A change occured within a param:
Rewrite/validate the block and update the gui.
"""
#update for the block
self._block.rewrite()
self._block.validate()
self._update_gui()
def _update_gui(self, *args):
"""
Repopulate the parameters box (if changed).
Update all the input parameters.
Update the error messages box.
Hide the box if there are no errors.
Update the documentation block.
Hide the box if there are no docs.
"""
#update the params box
if self._params_changed():
#hide params box before changing
self._params_box.hide_all()
#empty the params box
for io_param in list(self._input_object_params):
self._params_box.remove(io_param)
self._input_object_params.remove(io_param)
io_param.destroy()
#repopulate the params box
for param in self._block.get_params():
if param.get_hide() == 'all': continue
io_param = param.get_input(self._handle_changed)
self._input_object_params.append(io_param)
self._params_box.pack_start(io_param, False)
#show params box with new params
self._params_box.show_all()
#update the errors box
if self._block.is_valid(): self._error_box.hide()
else: self._error_box.show()
messages = '\n\n'.join(self._block.get_error_messages())
self._error_messages_text_display.set_text(messages)
#update the docs box
if self._block.get_doc(): self._docs_box.show()
else: self._docs_box.hide()
self._docs_text_display.set_text(self._block.get_doc())
def _handle_key_press(self, widget, event):
"""
Handle key presses from the keyboard.
Call the ok response when enter is pressed.
Returns:
false to forward the keypress
"""
if event.keyval == gtk.keysyms.Return:
self.response(gtk.RESPONSE_ACCEPT)
return True #handled here
return False #forward the keypress
def run(self):
"""
Run the dialog and get its response.
Returns:
true if the response was accept
"""
response = gtk.Dialog.run(self)
self.destroy()
return response == gtk.RESPONSE_ACCEPT
|
PulsePod/old-www-do-not-use
|
refs/heads/master
|
lib/python2.7/site-packages/flask/testsuite/test_apps/moduleapp/apps/frontend/__init__.py
|
628
|
from flask import Module, render_template
frontend = Module(__name__)
@frontend.route('/')
def index():
return render_template('frontend/index.html')
|
nuagenetworks/tempest
|
refs/heads/master
|
tempest/api/database/versions/__init__.py
|
12133432
| |
faun/django_test
|
refs/heads/master
|
build/lib/django/contrib/gis/management/__init__.py
|
12133432
| |
blighj/django
|
refs/heads/master
|
tests/admin_scripts/__init__.py
|
12133432
| |
OctavianLee/thriftpy
|
refs/heads/develop
|
thriftpy/__init__.py
|
3
|
# -*- coding: utf-8 -*-
import sys
from .hook import install_import_hook, remove_import_hook
from .parser import load, load_module
__version__ = '0.3.1'
__python__ = sys.version_info
__all__ = ["install_import_hook", "remove_import_hook", "load", "load_module"]
|
nandhp/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/clubic.py
|
18
|
# coding: utf-8
from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from ..utils import (
clean_html,
qualities,
)
class ClubicIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?clubic\.com/video/(?:[^/]+/)*video.*-(?P<id>[0-9]+)\.html'
_TESTS = [{
'url': 'http://www.clubic.com/video/clubic-week/video-clubic-week-2-0-le-fbi-se-lance-dans-la-photo-d-identite-448474.html',
'md5': '1592b694ba586036efac1776b0b43cd3',
'info_dict': {
'id': '448474',
'ext': 'mp4',
'title': 'Clubic Week 2.0 : le FBI se lance dans la photo d\u0092identité',
'description': 're:Gueule de bois chez Nokia. Le constructeur a indiqué cette.*',
'thumbnail': 're:^http://img\.clubic\.com/.*\.jpg$',
}
}, {
'url': 'http://www.clubic.com/video/video-clubic-week-2-0-apple-iphone-6s-et-plus-mais-surtout-le-pencil-469792.html',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
player_url = 'http://player.m6web.fr/v1/player/clubic/%s.html' % video_id
player_page = self._download_webpage(player_url, video_id)
config_json = self._search_regex(
r'(?m)M6\.Player\.config\s*=\s*(\{.+?\});$', player_page,
'configuration')
config = json.loads(config_json)
video_info = config['videoInfo']
sources = config['sources']
quality_order = qualities(['sd', 'hq'])
formats = [{
'format_id': src['streamQuality'],
'url': src['src'],
'quality': quality_order(src['streamQuality']),
} for src in sources]
self._sort_formats(formats)
return {
'id': video_id,
'title': video_info['title'],
'formats': formats,
'description': clean_html(video_info.get('description')),
'thumbnail': config.get('poster'),
}
|
cosmiclattes/TPBviz
|
refs/heads/master
|
torrent/lib/python2.7/site-packages/south/migration/base.py
|
57
|
from __future__ import print_function
from collections import deque
import datetime
from imp import reload
import os
import re
import sys
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.conf import settings
from django.utils import importlib
from south import exceptions
from south.migration.utils import depends, dfs, flatten, get_app_label
from south.orm import FakeORM
from south.utils import memoize, ask_for_it_by_name, datetime_utils
from south.migration.utils import app_label_to_app_module
from south.utils.py3 import string_types, with_metaclass
def all_migrations(applications=None):
"""
Returns all Migrations for all `applications` that are migrated.
"""
if applications is None:
applications = models.get_apps()
for model_module in applications:
# The app they've passed is the models module - go up one level
app_path = ".".join(model_module.__name__.split(".")[:-1])
app = ask_for_it_by_name(app_path)
try:
yield Migrations(app)
except exceptions.NoMigrations:
pass
def application_to_app_label(application):
"Works out the app label from either the app label, the app name, or the module"
if isinstance(application, string_types):
app_label = application.split('.')[-1]
else:
app_label = application.__name__.split('.')[-1]
return app_label
class MigrationsMetaclass(type):
"""
Metaclass which ensures there is only one instance of a Migrations for
any given app.
"""
def __init__(self, name, bases, dict):
super(MigrationsMetaclass, self).__init__(name, bases, dict)
self.instances = {}
def __call__(self, application, **kwds):
app_label = application_to_app_label(application)
# If we don't already have an instance, make one
if app_label not in self.instances:
self.instances[app_label] = super(MigrationsMetaclass, self).__call__(app_label_to_app_module(app_label), **kwds)
return self.instances[app_label]
def _clear_cache(self):
"Clears the cache of Migration objects."
self.instances = {}
class Migrations(with_metaclass(MigrationsMetaclass, list)):
"""
Holds a list of Migration objects for a particular app.
"""
if getattr(settings, "SOUTH_USE_PYC", False):
MIGRATION_FILENAME = re.compile(r'(?!__init__)' # Don't match __init__.py
r'[0-9a-zA-Z_]*' # Don't match dotfiles, or names with dots/invalid chars in them
r'(\.pyc?)?$') # Match .py or .pyc files, or module dirs
else:
MIGRATION_FILENAME = re.compile(r'(?!__init__)' # Don't match __init__.py
r'[0-9a-zA-Z_]*' # Don't match dotfiles, or names with dots/invalid chars in them
r'(\.py)?$') # Match only .py files, or module dirs
def __init__(self, application, force_creation=False, verbose_creation=True):
"Constructor. Takes the module of the app, NOT its models (like get_app returns)"
self._cache = {}
self.set_application(application, force_creation, verbose_creation)
def create_migrations_directory(self, verbose=True):
"Given an application, ensures that the migrations directory is ready."
migrations_dir = self.migrations_dir()
# Make the directory if it's not already there
if not os.path.isdir(migrations_dir):
if verbose:
print("Creating migrations directory at '%s'..." % migrations_dir)
os.mkdir(migrations_dir)
# Same for __init__.py
init_path = os.path.join(migrations_dir, "__init__.py")
if not os.path.isfile(init_path):
# Touch the init py file
if verbose:
print("Creating __init__.py in '%s'..." % migrations_dir)
open(init_path, "w").close()
def migrations_dir(self):
"""
Returns the full path of the migrations directory.
If it doesn't exist yet, returns where it would exist, based on the
app's migrations module (defaults to app.migrations)
"""
module_path = self.migrations_module()
try:
module = importlib.import_module(module_path)
except ImportError:
# There's no migrations module made yet; guess!
try:
parent = importlib.import_module(".".join(module_path.split(".")[:-1]))
except ImportError:
# The parent doesn't even exist, that's an issue.
raise exceptions.InvalidMigrationModule(
application = self.application.__name__,
module = module_path,
)
else:
# Good guess.
return os.path.join(os.path.dirname(parent.__file__), module_path.split(".")[-1])
else:
# Get directory directly
return os.path.dirname(module.__file__)
def migrations_module(self):
"Returns the module name of the migrations module for this"
app_label = application_to_app_label(self.application)
if hasattr(settings, "SOUTH_MIGRATION_MODULES"):
if app_label in settings.SOUTH_MIGRATION_MODULES:
# There's an override.
return settings.SOUTH_MIGRATION_MODULES[app_label]
return self._application.__name__ + '.migrations'
def get_application(self):
return self._application
def set_application(self, application, force_creation=False, verbose_creation=True):
"""
Called when the application for this Migrations is set.
Imports the migrations module object, and throws a paddy if it can't.
"""
self._application = application
if not hasattr(application, 'migrations'):
try:
module = importlib.import_module(self.migrations_module())
self._migrations = application.migrations = module
except ImportError:
if force_creation:
self.create_migrations_directory(verbose_creation)
module = importlib.import_module(self.migrations_module())
self._migrations = application.migrations = module
else:
raise exceptions.NoMigrations(application)
self._load_migrations_module(application.migrations)
application = property(get_application, set_application)
def _load_migrations_module(self, module):
self._migrations = module
filenames = []
dirname = self.migrations_dir()
for f in os.listdir(dirname):
if self.MIGRATION_FILENAME.match(os.path.basename(f)):
full_path = os.path.join(dirname, f)
# If it's a .pyc file, only append if the .py isn't already around
if f.endswith(".pyc") and (os.path.isfile(full_path[:-1])):
continue
# If it's a module directory, only append if it contains __init__.py[c].
if os.path.isdir(full_path):
if not (os.path.isfile(os.path.join(full_path, "__init__.py")) or \
(getattr(settings, "SOUTH_USE_PYC", False) and \
os.path.isfile(os.path.join(full_path, "__init__.pyc")))):
continue
filenames.append(f)
filenames.sort()
self.extend(self.migration(f) for f in filenames)
def migration(self, filename):
name = Migration.strip_filename(filename)
if name not in self._cache:
self._cache[name] = Migration(self, name)
return self._cache[name]
def __getitem__(self, value):
if isinstance(value, string_types):
return self.migration(value)
return super(Migrations, self).__getitem__(value)
def _guess_migration(self, prefix):
prefix = Migration.strip_filename(prefix)
matches = [m for m in self if m.name().startswith(prefix)]
if len(matches) == 1:
return matches[0]
elif len(matches) > 1:
raise exceptions.MultiplePrefixMatches(prefix, matches)
else:
raise exceptions.UnknownMigration(prefix, None)
def guess_migration(self, target_name):
if target_name == 'zero' or not self:
return
elif target_name is None:
return self[-1]
else:
return self._guess_migration(prefix=target_name)
def app_label(self):
return self._application.__name__.split('.')[-1]
def full_name(self):
return self._migrations.__name__
@classmethod
def calculate_dependencies(cls, force=False):
"Goes through all the migrations, and works out the dependencies."
if getattr(cls, "_dependencies_done", False) and not force:
return
for migrations in all_migrations():
for migration in migrations:
migration.calculate_dependencies()
cls._dependencies_done = True
@staticmethod
def invalidate_all_modules():
"Goes through all the migrations, and invalidates all cached modules."
for migrations in all_migrations():
for migration in migrations:
migration.invalidate_module()
def next_filename(self, name):
"Returns the fully-formatted filename of what a new migration 'name' would be"
highest_number = 0
for migration in self:
try:
number = int(migration.name().split("_")[0])
highest_number = max(highest_number, number)
except ValueError:
pass
# Work out the new filename
return "%04i_%s.py" % (
highest_number + 1,
name,
)
class Migration(object):
"""
Class which represents a particular migration file on-disk.
"""
def __init__(self, migrations, filename):
"""
Returns the migration class implied by 'filename'.
"""
self.migrations = migrations
self.filename = filename
self.dependencies = set()
self.dependents = set()
def __str__(self):
return self.app_label() + ':' + self.name()
def __repr__(self):
return '<Migration: %s>' % str(self)
def __eq__(self, other):
return self.app_label() == other.app_label() and self.name() == other.name()
def __hash__(self):
return hash(str(self))
def app_label(self):
return self.migrations.app_label()
@staticmethod
def strip_filename(filename):
return os.path.splitext(os.path.basename(filename))[0]
def name(self):
return self.strip_filename(os.path.basename(self.filename))
def full_name(self):
return self.migrations.full_name() + '.' + self.name()
def migration(self):
"Tries to load the actual migration module"
full_name = self.full_name()
try:
migration = sys.modules[full_name]
except KeyError:
try:
migration = __import__(full_name, {}, {}, ['Migration'])
except ImportError as e:
raise exceptions.UnknownMigration(self, sys.exc_info())
except Exception as e:
raise exceptions.BrokenMigration(self, sys.exc_info())
# Override some imports
migration._ = lambda x: x # Fake i18n
migration.datetime = datetime_utils
return migration
migration = memoize(migration)
def migration_class(self):
"Returns the Migration class from the module"
return self.migration().Migration
def migration_instance(self):
"Instantiates the migration_class"
return self.migration_class()()
migration_instance = memoize(migration_instance)
def previous(self):
"Returns the migration that comes before this one in the sequence."
index = self.migrations.index(self) - 1
if index < 0:
return None
return self.migrations[index]
previous = memoize(previous)
def next(self):
"Returns the migration that comes after this one in the sequence."
index = self.migrations.index(self) + 1
if index >= len(self.migrations):
return None
return self.migrations[index]
next = memoize(next)
def _get_dependency_objects(self, attrname):
"""
Given the name of an attribute (depends_on or needed_by), either yields
a list of migration objects representing it, or errors out.
"""
for app, name in getattr(self.migration_class(), attrname, []):
try:
migrations = Migrations(app)
except ImproperlyConfigured:
raise exceptions.DependsOnUnmigratedApplication(self, app)
migration = migrations.migration(name)
try:
migration.migration()
except exceptions.UnknownMigration:
raise exceptions.DependsOnUnknownMigration(self, migration)
if migration.is_before(self) == False:
raise exceptions.DependsOnHigherMigration(self, migration)
yield migration
def calculate_dependencies(self):
"""
Loads dependency info for this migration, and stores it in itself
and any other relevant migrations.
"""
# Normal deps first
for migration in self._get_dependency_objects("depends_on"):
self.dependencies.add(migration)
migration.dependents.add(self)
# And reverse deps
for migration in self._get_dependency_objects("needed_by"):
self.dependents.add(migration)
migration.dependencies.add(self)
# And implicit ordering deps
previous = self.previous()
if previous:
self.dependencies.add(previous)
previous.dependents.add(self)
def invalidate_module(self):
"""
Removes the cached version of this migration's module import, so we
have to re-import it. Used when south.db.db changes.
"""
reload(self.migration())
self.migration._invalidate()
def forwards(self):
return self.migration_instance().forwards
def backwards(self):
return self.migration_instance().backwards
def forwards_plan(self):
"""
Returns a list of Migration objects to be applied, in order.
This list includes `self`, which will be applied last.
"""
return depends(self, lambda x: x.dependencies)
def _backwards_plan(self):
return depends(self, lambda x: x.dependents)
def backwards_plan(self):
"""
Returns a list of Migration objects to be unapplied, in order.
This list includes `self`, which will be unapplied last.
"""
return list(self._backwards_plan())
def is_before(self, other):
if self.migrations == other.migrations:
if self.filename < other.filename:
return True
return False
def is_after(self, other):
if self.migrations == other.migrations:
if self.filename > other.filename:
return True
return False
def prev_orm(self):
if getattr(self.migration_class(), 'symmetrical', False):
return self.orm()
previous = self.previous()
if previous is None:
# First migration? The 'previous ORM' is empty.
return FakeORM(None, self.app_label())
return previous.orm()
prev_orm = memoize(prev_orm)
def orm(self):
return FakeORM(self.migration_class(), self.app_label())
orm = memoize(orm)
def no_dry_run(self):
migration_class = self.migration_class()
try:
return migration_class.no_dry_run
except AttributeError:
return False
|
remiremi/django_bigautohack
|
refs/heads/master
|
setup.py
|
1
|
# coding=utf-8
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-bigautohack',
version='0.1',
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='Nice working solution to make up for the absence of BigAutoField in django (#14286).',
long_description=README,
url='https://github.com/Remiremi/django_bigautohack',
author='Rémi Koenig',
author_email='remi.koenig@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
],
)
|
benchisell/photostream-bc
|
refs/heads/master
|
flask/lib/python2.7/site-packages/whoosh/filedb/__init__.py
|
12133432
| |
brianrodri/oppia
|
refs/heads/develop
|
core/storage/config/__init__.py
|
12133432
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.