repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
hartwork/wnpp.debian.net | wnpp_debian_net/tests/factories.py | Python | agpl-3.0 | 984 | 0.001016 | # Copyright (C) 2021 Sebastian Pipping <sebastian@pipping.org>
# Licensed under GNU Affero GPL v3 or later
from django.utils.timezone import now
from factory import LazyFunction, Sequence
from factory.django import DjangoModelFactory
from ..models import DebianLogIndex, DebianLogMods, DebianPopcon, DebianWnpp, IssueKind
class DebianLogI | ndexFactory(DjangoModelFactory):
class Meta:
model = DebianLogIndex
event_stamp = LazyFunction(now)
log_stamp = LazyFunction(now)
class DebianLogModsFactory(DjangoModelFactory):
class Meta:
model = DebianLogMods
class DebianPopconFactory(DjangoModelFactory):
class Meta:
model = DebianPopcon
class | DebianWnppFactory(DjangoModelFactory):
class Meta:
model = DebianWnpp
ident = Sequence(int)
cron_stamp = LazyFunction(now)
mod_stamp = LazyFunction(now)
open_stamp = LazyFunction(now)
kind = IssueKind.RFA.value # anything that matches the default filters
|
sidgan/whats_in_a_question | caffe/monitor/extract_seconds.py | Python | gpl-3.0 | 2,142 | 0.002334 | #!/usr/bin/env python
############################
### Reusing Existing code ##
# Reference:https://github.com/BVLC/caffe/blob/master/tools/extra/extract_seconds.py #
############################
import datetime
import os
import sys
def extract_datetime_from_line(line, year):
# Expected format: I0210 13:39:22.381027 25210 solver.cpp:204] Iteration 100, lr = 0.00992565
line = line.strip().split()
month = int(line[0][1:3])
day = int(line[0][3:])
timestamp = line[1]
pos = timestamp.rfind('.')
ts = [int(x) for x in timestamp[:pos].split(':')]
hour = ts[0]
minute = ts[1]
second = ts[2]
microsecond = int(timestamp[pos + 1:])
dt = datetime.datetime(year, month, day, hour, minute, second, microsecond)
return dt
def get_log_created_year(input_file):
"""Get year from log file system timestamp
" | ""
log_created_time = os.path.getctime(input_file)
log_created_year = datetime.datetime.fro | mtimestamp(log_created_time).year
return log_created_year
def get_start_time(line_iterable, year):
"""Find start time from group of lines
"""
start_datetime = None
for line in line_iterable:
line = line.strip()
if line.find('Solving') != -1:
start_datetime = extract_datetime_from_line(line, year)
break
return start_datetime
def extract_seconds(input_file, output_file):
with open(input_file, 'r') as f:
lines = f.readlines()
log_created_year = get_log_created_year(input_file)
start_datetime = get_start_time(lines, log_created_year)
assert start_datetime, 'Start time not found'
out = open(output_file, 'w')
for line in lines:
line = line.strip()
if line.find('Iteration') != -1:
dt = extract_datetime_from_line(line, log_created_year)
elapsed_seconds = (dt - start_datetime).total_seconds()
out.write('%f\n' % elapsed_seconds)
out.close()
if __name__ == '__main__':
if len(sys.argv) < 3:
print('Usage: ./extract_seconds input_file output_file')
exit(1)
extract_seconds(sys.argv[1], sys.argv[2])
|
langerhans/dogecoin | qa/rpc-tests/rpcbind_test.py | Python | mit | 5,804 | 0.006203 | #!/usr/bin/env python
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Test for -rpcbind, as well as -rpcallowip and -rpcconnect
# Add python-bitcoinrpc to module search path:
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-bitcoinrpc"))
import json
import shutil
import subprocess
import tempfile
import traceback
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
from netutil import *
def run_bind_test(tmpdir, allow_ips, connect_to, addresses, expected):
'''
Start a node with requested rpcallowip and rpcbind parameters,
then try to connect, and check if the set of bound addresses
matches the expected set.
'''
expected = [(addr_to_hex(addr), port) for (addr, port) in expected]
base_args = ['-disablewallet', '-nolisten']
if allow_ips:
base_args += ['-rpcallowip=' + x for x in allow_ips]
binds = ['-rpcbind='+addr for addr in addresses]
nodes = start_nodes(1, tmpdir, [base_args + binds], connect_to)
try:
pid = dogecoind_processes[0].pid
assert_equal(set(get_bind_addrs(pid)), set(expected))
finally:
stop_nodes(nodes)
wait_dogecoinds()
def run_allowip_test(tmpdir, allow_ips, rpchost, rpcport):
'''
Start a node with rpcwallow IP, and request getinfo
at a non-localhost IP.
'''
base_args = ['-disablewallet', '-nolisten'] + ['-rpcallowip='+x for x in allow_ips]
nodes = start_no | des(1, tmpdir, [base_args])
try:
# connect to node through non-loopback interface
url = "http://wowsuchtest:3kt4yEUdDJ4YGzsGNADvjYwubwaFhEEYjotPJDU2XMgG@%s:%d" % (rpchost, rpcport,)
node = AuthServiceProxy(url)
node.getinfo()
finally:
node = None # make sure connection will be garbage collected and closed
stop_nodes(nodes)
wait_dogecoin | ds()
def run_test(tmpdir):
assert(sys.platform == 'linux2') # due to OS-specific network stats queries, this test works only on Linux
# find the first non-loopback interface for testing
non_loopback_ip = None
for name,ip in all_interfaces():
if ip != '127.0.0.1':
non_loopback_ip = ip
break
if non_loopback_ip is None:
assert(not 'This test requires at least one non-loopback IPv4 interface')
print("Using interface %s for testing" % non_loopback_ip)
defaultport = rpc_port(0)
# check default without rpcallowip (IPv4 and IPv6 localhost)
run_bind_test(tmpdir, None, '127.0.0.1', [],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check default with rpcallowip (IPv6 any)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', [],
[('::0', defaultport)])
# check only IPv4 localhost (explicit)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', ['127.0.0.1'],
[('127.0.0.1', defaultport)])
# check only IPv4 localhost (explicit) with alternative port
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'],
[('127.0.0.1', 32171)])
# check only IPv4 localhost (explicit) with multiple alternative ports on same host
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'],
[('127.0.0.1', 32171), ('127.0.0.1', 32172)])
# check only IPv6 localhost (explicit)
run_bind_test(tmpdir, ['[::1]'], '[::1]', ['[::1]'],
[('::1', defaultport)])
# check both IPv4 and IPv6 localhost (explicit)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check only non-loopback interface
run_bind_test(tmpdir, [non_loopback_ip], non_loopback_ip, [non_loopback_ip],
[(non_loopback_ip, defaultport)])
# Check that with invalid rpcallowip, we are denied
run_allowip_test(tmpdir, [non_loopback_ip], non_loopback_ip, defaultport)
try:
run_allowip_test(tmpdir, ['1.1.1.1'], non_loopback_ip, defaultport)
assert(not 'Connection not denied by rpcallowip as expected')
except ValueError:
pass
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave dogecoinds and test.* datadir on exit or error")
parser.add_option("--srcdir", dest="srcdir", default="../../src",
help="Source directory containing dogecoind/dogecoin-cli (default: %default%)")
parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"),
help="Root directory for datadirs")
(options, args) = parser.parse_args()
os.environ['PATH'] = options.srcdir+":"+os.environ['PATH']
check_json_precision()
success = False
nodes = []
try:
print("Initializing test directory "+options.tmpdir)
if not os.path.isdir(options.tmpdir):
os.makedirs(options.tmpdir)
initialize_chain(options.tmpdir)
run_test(options.tmpdir)
success = True
except AssertionError as e:
print("Assertion failed: "+e.message)
except Exception as e:
print("Unexpected exception caught during testing: "+str(e))
traceback.print_tb(sys.exc_info()[2])
if not options.nocleanup:
print("Cleaning up")
wait_dogecoinds()
shutil.rmtree(options.tmpdir)
if success:
print("Tests successful")
sys.exit(0)
else:
print("Failed")
sys.exit(1)
if __name__ == '__main__':
main()
|
Azure/azure-sdk-for-python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_09_30/operations/_disk_encryption_sets_operations.py | Python | mit | 40,114 | 0.004961 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_create_or_update_request_initial(
subscription_id: str,
resource_group_name: str,
disk_encryption_set_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2020-09-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/diskEncryptionSets/{diskEncryptionSetName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"diskEncryptionSetName": _SERIALIZER.url("disk_encryption_set_name", disk_encryption_set_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_update_request_initial(
subscription_id: str,
resource_group_name: str,
disk_encryption_set_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2020-09-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/diskEncryptionSets/{diskEncryptionSetName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"diskEncryptionSetName": _SERIALIZER.url("disk_encryption_set_name", disk_encryption_set_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PATCH",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_request(
subscription_id: str,
resource_group_name: str,
disk_encryption_set_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-09-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/diskEncryptionSets/{diskEncryptionSetName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"diskEncryptionSetName": _SERIALIZER.url("disk_encryption_set_name", disk_encryption_set_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_delete_request_initial(
subscription_id: str,
resource_group_name: str,
disk_encryption_set_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-09-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/diskEncryptionSets/{diskEn | cryptionSetName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url( | "resource_group_name", resource_group_name, 'str'),
"diskEncryptionSetName": _SERIALIZER.url("disk_encryption_set_name", disk_encryption_set_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_by_resource_group_request(
subscription_id: str,
resource_group_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-09-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/diskEncryptionSets')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # |
cloudcomputinghust/IoT | test-component/receive_data_only.py | Python | mit | 1,140 | 0.003509 | #!/usr/bin/env python
import pika
import os
cloud_broker_host = os.environ.get('CLOUD_BROKER_HOST')
cloud_broker_port = os.environ.get('CLOUD_BROKER_PORT')
cloud_broker_auth = os.environ.get('CLOUD_BROKER_AUTH')
cloud_username = cloud_broker_auth.split(':')[0]
cloud_password = cloud_broker_auth.split(':')[1]
# Connect to fog broker service
credentials = pika.PlainCredentials(cloud_username, cloud_password)
connection = pika.BlockingConnection(pika.ConnectionParameters(
host=cloud_broker_host, port=int(cloud_broker_port), credentials=credentials))
channel = connection.channel()
channel.exchange_declare(exchange='sensor_collector',
exchange_type='fanout')
#
result = channel.queue_declare(durable=True, exclusive=False)
queue_name = result.method.queue
channel.queue_bind(exchange='senso | r_collector',
queue=queue_name)
print(' [*] Waiting for data. To exit press CTRL+C')
def callback(ch, method, properties, body):
print(" [x] %r" % body)
channel.basic_consume(callback,
| queue=queue_name,
no_ack=True)
channel.start_consuming() |
ak110/pytoolkit | pytoolkit/applications/__init__.py | Python | mit | 117 | 0 | """Kerasの各種モデル。"""
# pylint: sk | ip-file
# flake8: noq | a
from . import darknet53, efficientnet, xception
|
usc-isi/extra-specs | nova/tests/test_xenapi.py | Python | apache-2.0 | 83,206 | 0.001058 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test suite for XenAPI."""
import ast
import contextlib
import datetime
import functools
import os
import re
import mox
from nova.compute import aggregate_states
from nova.compute import instance_types
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova import context
from nova import db
from nova import exception
from nova import flags
from nova import log as logging
from nova.openstack.common import importutils
from nova import test
from nova.tests.db import fakes as db_fakes
from nova.tests import fake_network
from nova.tests import fake_utils
from nova.tests.glance import stubs as glance_stubs
from nova.tests.xenapi import stubs
from nova.virt.xenapi import connection as xenapi_conn
from nova.virt.xenapi import fake as xenapi_fake
from nova.virt.xenapi import vm_utils
from nova.virt.xenapi import vmops
from nova.virt.xenapi import volume_utils
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
def stub_vm_utils_with_vdi_attached_here(function, should_return=True):
"""
vm_utils.with_vdi_attached_here needs to be stubbed out because it
calls down to the filesystem to attach a vdi. This provides a
decorator to handle that.
"""
@functools.wraps(function)
def decorated_function(self, *args, **kwargs):
@contextlib.contextmanager
def fake_vdi_attached_here(*args, **kwargs):
fake_dev = 'fakedev'
yield fake_dev
def fake_stream_disk(*args, **kwargs):
pass
def fake_is_vdi_pv(*args, **kwargs):
return should_return
orig_vdi_attached_here = vm_utils.vdi_attached_here
orig_stream_disk = vm_utils._stream_disk
orig_is_vdi_pv = vm_utils._is_vdi_pv
try:
vm_utils.vdi_attached_here = fake_vdi_attached_here
vm_utils._stream_disk = fake_stream_disk
vm_utils._is_vdi_pv = fake_is_vdi_pv
return function(self, *args, **kwargs)
finally:
vm_utils._is_vdi_pv = orig_is_vdi_pv
vm_utils._stream_disk = orig_stream_disk
vm_utils.vdi_attached_here = orig_vdi_attached_here
return decorated_function
class XenAPIVolumeTestCase(test.TestCase):
"""Unit tests for Volume operations."""
def setUp(self):
super(XenAPIVolumeTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.flags(target_host='127.0.0.1',
xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
db_fakes.stub_out_db_instance_api(self.stubs)
xenapi_fake.reset()
self.instance_values = {'id': 1,
'project_id': self.user_id,
'user_id': 'fake',
'image_ref': 1,
'kernel_id': 2,
'ramdisk_id': 3,
'root_gb': 20,
'instance_type_id': '3', # m1.large
'os_type': 'linux',
'architecture': 'x86-64'}
def _create_volume(self, size='0'):
"""Create a volume object."""
vol = {}
vol['size'] = size
vol['user_id'] = 'fake'
vol['project_id'] = 'fake'
vol['host'] = 'localhost'
vol['availability_zone'] = FLAGS.storage_availability_zone
vol['status'] = "creating"
vol['attach_status'] = "detached"
return db.volume_create(self.context, vol)
@staticmethod
def _make_info():
return {
'driver_volume_type': 'iscsi',
'data': {
'volume_id': 1,
'target_iqn': 'iqn.2010-10.org.openstack:volume-00000001',
'target_portal': '127.0.0.1:3260,fake',
'target_lun': None,
'auth_method': 'CHAP',
'auth_method': 'fake',
'auth_method': 'fake',
}
}
def test_mountpoint_to_number(self):
cases = {
'sda': 0,
'sdp': 15,
'hda': 0,
'hdp': 15,
'vda': 0,
'xvda': 0,
'0': 0,
'10': 10,
'vdq': -1,
'sdq': -1,
'hdq': -1,
'xvdq': -1,
}
for (input, expected) in cases.iteritems():
func = volume_utils.VolumeHelper.mountpoint_to_number
actual = func(input)
self.assertEqual(actual, expected,
'%s yielded %s, not %s' % (input, actual, expected))
def test_parse_volume_info_raise_exception(self):
"""This shows how to test helper classes' methods."""
stubs.stubout_session(self.stubs, stubs.FakeSessionForVolumeTests)
session = xenapi_conn.XenAPISession('test_url', 'root', 'test_pass')
helper = volume_utils.VolumeHelper
helper.XenAPI = session.get_imported_xenapi()
vol = self._create_volume()
# oops, wrong mount point!
self.assertRaises(volume_utils.StorageError,
helper.parse_volume_info,
self._make_info(),
'dev/sd'
)
db.volume_destroy(context.get_admin_context(), vol['id'])
def test_attach_volume(self):
"""This shows how to test Ops classes' methods."""
stubs.stubout_session(self.stubs, stubs.FakeSessionForVolumeTests)
conn = xenapi_conn.get_connection(False)
volume = self._create_volume()
instance = db.instance_create(self.context, self.instance_values)
vm = xenapi_fake.create_vm(instance.name, 'Running')
result = conn.attach_volume(self._make_info(),
instance.name, '/dev/sdc')
# chec | k that the VM has a VBD attached to it
# Get XenAPI record for VBD
vbds = xenapi_fake.get_all('VBD')
vbd = xenapi | _fake.get_record('VBD', vbds[0])
vm_ref = vbd['VM']
self.assertEqual(vm_ref, vm)
def test_attach_volume_raise_exception(self):
"""This shows how to test when exceptions are raised."""
stubs.stubout_session(self.stubs,
stubs.FakeSessionForVolumeFailedTests)
conn = xenapi_conn.get_connection(False)
volume = self._create_volume()
instance = db.instance_create(self.context, self.instance_values)
xenapi_fake.create_vm(instance.name, 'Running')
self.assertRaises(exception.VolumeDriverNotFound,
conn.attach_volume,
{'driver_volume_type': 'nonexist'},
instance.name,
'/dev/sdc')
class XenAPIVMTestCase(test.TestCase):
"""Unit tests for VM operations."""
def setUp(self):
super(XenAPIVMTestCase, self).setUp()
self.network = importutils.import_object(FLAGS.network_manager)
self.flags(xenapi_connection_url='test_url',
xenapi_connection_password='test_pass',
instance_name_template='%d',
firewall_driver='nova.virt.xenapi.firewall.'
'Dom0IptablesFirewallDriver')
xenapi_fake.reset()
|
huangshiyu13/funnyLittleProgram | WhatKindofGirlYouLIke/labelImg/libs/pascal_voc_io.py | Python | apache-2.0 | 4,750 | 0.007789 | import sys
from xml.etree import ElementTree
from xml.etree.ElementTree import Element, SubElement
from xml.dom import minidom
from lxml import etree
class PascalVocWriter:
def __init__(self, foldername, filename, imgSize, databaseSrc='Unknown', localImgPath=None):
self.foldername = foldername
self.filename = filename
self.databaseSrc = databaseSrc
self.imgSize = imgSize
self.boxlist = []
self.localImgPath = localImgPath
def prettify(self, elem):
"""
Return a pretty-printed XML string for the Element.
"""
rough_string = ElementTree.tostring(elem,'utf8')
reparsed = minidom.parseString(rough_string)
return reparsed.toprettyxml(indent="\t")
def genXML(self):
"""
Return XML root
"""
# Check conditions
if self.filename is None or \
self.foldername is None or \
self.imgSize is None or \
len(self.boxlist) <= 0:
return None
top = Element('annotation')
folder = SubElement(top,'folder')
folder.text = self.foldername
filename = SubElement(top,'filename')
filename.text = self.filename
localImgPath = SubElement(top,'path')
localImgPath.text = self.localImgPath
source = SubElement(top,'source')
database = SubElement(source,'database')
database.text = self.databaseSrc
size_part = SubElement(top,'size')
width = SubElement(size_part,'width')
height = SubElement(size_part,'height')
depth = SubElement(size_part,'depth')
width.text = str(self.imgSize[1])
height.text = str(self.imgSize[0])
if len(self.imgSize)==3:
depth.text = str(self.imgSize[2])
else:
| depth.text = '1'
segmented = SubEleme | nt(top,'segmented')
segmented.text ='0'
return top
def addBndBox(self, xmin, ymin, xmax, ymax, name):
bndbox = {'xmin':xmin, 'ymin':ymin, 'xmax':xmax, 'ymax':ymax}
bndbox['name'] = name
self.boxlist.append(bndbox);
def appendObjects(self, top):
for each_object in self.boxlist:
object_item = SubElement(top,'object')
name = SubElement(object_item, 'name')
name.text = str(each_object['name'])
pose = SubElement(object_item, 'pose')
pose.text = "Unspecified"
truncated = SubElement(object_item, 'truncated')
truncated.text = "0"
difficult = SubElement(object_item, 'difficult')
difficult.text = "0"
bndbox = SubElement(object_item, 'bndbox')
xmin = SubElement(bndbox, 'xmin')
xmin.text = str(each_object['xmin'])
ymin = SubElement(bndbox, 'ymin')
ymin.text = str(each_object['ymin'])
xmax = SubElement(bndbox, 'xmax')
xmax.text = str(each_object['xmax'])
ymax = SubElement(bndbox, 'ymax')
ymax.text = str(each_object['ymax'])
def save(self, targetFile = None):
root = self.genXML()
self.appendObjects(root)
out_file = None
if targetFile is None:
out_file = open(self.filename + '.xml','w')
else:
out_file = open(targetFile, 'w')
out_file.write(self.prettify(root))
out_file.close()
class PascalVocReader:
def __init__(self, filepath):
## shapes type:
## [labbel, [(x1,y1), (x2,y2), (x3,y3), (x4,y4)], color, color]
self.shapes=[]
self.filepath = filepath
self.parseXML()
def getShapes(self):
return self.shapes
def addShape(self, label, rect):
xmin = rect[0]
ymin = rect[1]
xmax = rect[2]
ymax = rect[3]
points = [(xmin,ymin), (xmin,ymax), (xmax, ymax), (xmax, ymin)]
self.shapes.append((label, points, None, None))
def parseXML(self):
assert self.filepath.endswith('.xml'), "Unsupport file format"
xmltree = ElementTree.parse(self.filepath).getroot()
filename = xmltree.find('filename').text
for object_iter in xmltree.findall('object'):
rects = []
bndbox = object_iter.find("bndbox")
rects.append([int(it.text) for it in bndbox])
label = object_iter.find('name').text
for rect in rects:
self.addShape(label, rect)
return True
# tempParseReader = PascalVocReader('test.xml')
# print tempParseReader.getShapes()
"""
# Test
tmp = PascalVocWriter('temp','test', (10,20,3))
tmp.addBndBox(10,10,20,30,'chair')
tmp.addBndBox(1,1,600,600,'car')
tmp.save()
"""
|
betzw/mbed-os | tools/targets/REALTEK_RTL8195AM.py | Python | apache-2.0 | 5,555 | 0.0036 | """
Realtek Semiconductor Corp.
RTL8195A elf2bin script
"""
import sys, array, struct, os, re, subprocess
import hashlib
import shutil
import time
import binascii
import elftools
from tools.paths import TOOLS_BOOTLOADERS
from tools.toolchains import TOOLCHAIN_PATHS
# Constant Variables
TAG = 0x81950001
VER = 0x81950001
CAMPAIGN = binascii.hexlify('FFFFFFFFFFFFFFFF')
RAM2_HEADER = {
'tag': 0,
'ver': 0,
'timestamp': 0,
'size': 72,
'hash': 'FF',
'campaign': 'FF',
'crc32': 0xFFFFFFFF,
}
def format_number(number, width):
# convert to string
line = format(number, '0%dx' % (width))
if len(line) > width:
print "[ERROR] 0x%s cannot fit in width %d" % (line, width)
sys.exit(-1)
# cut string to list & reverse
line = [line[i:i+2] for i in range(0, len(line), 2)]
line.reverse()
return binascii.a2b_hex("".join(line))
def format_string(string):
return binascii.a2b_hex(string)
def write_number(value, width, output):
output.write(format_number(value, width))
def write_string(value, width, output):
output.write(format_string(value))
def append_image_file(image, output):
input = open(image, "rb")
output.write(input.read())
input.close()
def write_padding_bytes(output_name, size):
current_size = os.stat(output_name).st_size
padcount = size - current_size
if padcount < 0:
print "[ERROR] image is larger than expected size"
sys.exit(-1)
output = open(output_name, "ab")
output.write('\377' * padcount)
output.close()
def crc32_checksum(string):
return binascii.crc32(string) & 0xFFFFFFFF
def sha256_checksum(filename, block_size=65536):
sha256 = hashlib.sha256()
with open(filename, 'rb') as f:
for block in iter(lambda: f.read(block_size), b''):
sha256.update(block)
return sha256.hexdigest()
def epoch_timestamp():
epoch = int(time.time())
return epoch
def find_symbol(toolchain, mapfile, symbol):
ret = None
HEX = '0x0{,8}(?P<addr>[0-9A-Fa-f]{8})'
if toolchain == "GCC_ARM":
SYM = re.compile(r'^\s+' + HEX + r'\s+' + symbol + '\r?$')
elif toolchain in ["ARM_STD", "ARM", "ARM_MICRO"]:
SYM = re.compile(r'^\s+' + HEX + r'\s+0x[0-9A-Fa-f]{8}\s+Code.*\s+i\.' + symbol + r'\s+.*$')
elif toolchain == "IAR":
SYM = re.compile(r'^' + symbol + r'\s+' + HEX + '\s+.*$')
with open(mapfile, 'r') as infile:
for line in infile:
match = re.match(SYM, line)
if match:
ret = match.group("addr")
if not ret:
print "[ERROR] cannot find the address of symbol " + symbol
return 0
return int(ret,16) | 1
def _parse_load_segment_inner(image_elf):
with open(image_elf, "rb") as fd:
elffile = elftools.elf.elffile.ELFFile(fd)
for segment in elffile.iter_segments():
offset = segment['p_offset']
addr = segment['p_vaddr']
size = segment['p_filesz']
if (addr != 0 and size != 0 and segment['p_type'] == 'PT_LOAD'):
yield offset, addr, size
def parse_load_segment(toolchain, image_elf):
return list(_parse_load_segment_inner(image_elf))
def create_payload(image_elf, ram2_bin, entry, segment):
file_elf = open(image_elf, "rb")
file_bin = open(ram2_bin, "wb")
write_number(int(entry), 8, file_bin)
write_number(int(len(segment)), 8, file_bin)
write_number(0xFFFFFFFF, 8, file_bin)
write_number(0xFFFFFFFF, 8, file_bin)
for (offset, addr, size) in segment:
file_elf.seek(offset)
# write image header - size & ad | dr
write_number(addr, 8, file_bin)
write_number(size, 8, file_bin)
# write load segment
file_bin.write(file_elf.read(size))
delta = size % 4
if delta != 0:
padding = 4 - delta
write_number(0x0, padding * 2, file_bin)
file_bin.close()
file_elf.close()
def create_daplink(image_bin, r | am1_bin, ram2_bin):
# remove target binary file/path
if os.path.isfile(image_bin):
os.remove(image_bin)
else:
shutil.rmtree(image_bin)
RAM2_HEADER['tag'] = format_number(TAG, 8)
RAM2_HEADER['ver'] = format_number(VER, 8)
RAM2_HEADER['timestamp'] = format_number(epoch_timestamp(), 16)
RAM2_HEADER['size'] = format_number(os.stat(ram2_bin).st_size + 72, 8)
RAM2_HEADER['hash'] = format_string(sha256_checksum(ram2_bin))
RAM2_HEADER['campaign'] = format_string(CAMPAIGN)
output = open(image_bin, "wb")
append_image_file(ram1_bin, output)
append_image_file(ram2_bin, output)
output.seek(0xb000)
line = ""
for key in ['tag', 'ver', 'timestamp', 'size', 'hash', 'campaign']:
line += RAM2_HEADER[key]
output.write(RAM2_HEADER[key])
RAM2_HEADER['crc32'] = format_number(crc32_checksum(line), 8)
output.write(RAM2_HEADER['crc32'])
output.close()
# ----------------------------
# main function
# ----------------------------
def rtl8195a_elf2bin(t_self, image_elf, image_bin):
image_name = list(os.path.splitext(image_elf))[:-1]
image_map = ".".join(image_name + ['map'])
ram1_bin = os.path.join(TOOLS_BOOTLOADERS, "REALTEK_RTL8195AM", "ram_1.bin")
ram2_bin = ".".join(image_name) + '-payload.bin'
entry = find_symbol(t_self.name, image_map, "PLAT_Start")
segment = parse_load_segment(t_self.name, image_elf)
create_payload(image_elf, ram2_bin, entry, segment)
create_daplink(image_bin, ram1_bin, ram2_bin)
|
chenkianwee/envuo | py4design/py3dmodel/export_collada.py | Python | gpl-3.0 | 18,035 | 0.014084 | # ==================================================================================================
#
# Copyright (c) 2016, Chen Kian Wee (chenkianwee@gmail.com)
#
# This file is part of py4design
#
# py4design is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# py4design is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with py4design. If not, see <http://www.gnu.org/licenses/>.
#
# ==================================================================================================
from . import construct
from . import calculate
from . import fetch
from . import modify
from . import utility
def occtopo_2_collada(dae_filepath, occface_list = None, face_rgb_colour_list=None,
occedge_list = None):
"""
This function converts OCCtopologies into a pycollada Collada class. The units are in meter.
Parameters
----------
dae_filepath : str
The file path of the DAE (Collada) file.
occface_list : list of OCCfaces
The geometries to be visualised with the results. The list of geometries must correspond to the list of results. Other OCCtopologies
are also accepted, but the OCCtopology must contain OCCfaces. OCCtopology includes: OCCshape, OCCcompound, OCCcompsolid,
OCCsolid, OCCshell, OCCface.
face_rgb_colour_list : list of tuple of floats, optional
Each tuple is a r,g,b that is specifying the colour of the face. The number of colours must correspond to the number of OCCfaces.
occedge_list : list of OCCedges, optional
OCCedges to be visualised together, Default = None.
Returns
-------
mesh : pycollada Collada class object
The collada object from pycollada library.
"""
import collada
from collada import asset, material, source, geometry, scene
import numpy
mesh = collada.Collada()
mesh.assetInfo.upaxis = asset.UP_AXIS.Z_UP
mesh.assetInfo.unitmeter = 1.0
mesh.assetInfo.unitname = "meter"
if face_rgb_colour_list != None:
mat_list = []
colour_cnt = 0
for rgb_colour in face_rgb_colour_list:
effect = material.Effect("effect" + str(colour_cnt), [], "phong", diffuse=rgb_colour, specular=rgb_colour, double_sided = True)
mat = material.Material("material" + str(colour_cnt), "mymaterial" + str(colour_cnt), effect)
mesh.effects.append(effect)
mesh.materials.append(mat)
mat_list.append(mat)
colour_cnt+=1
else:
effect = material.Effect("effect0", [], "phong", diffuse=(1,1,1), specular=(1,1,1))
mat = material.Material("material0", "mymaterial", effect)
mesh.effects.append(effect)
mesh.materials.append(mat)
edgeeffect = material.Effect("edgeeffect0", [], "phong", diffuse=(1,1,1), specular=(1,1,1), double_sided = True)
edgemat = material.Material("edgematerial0", "myedgematerial", effect)
mesh.effects.append(edgeeffect)
mesh.materials.append(edgemat)
geomnode_list = []
shell_cnt = 0
if occface_list:
for occshell in occface_list:
vert_floats = []
normal_floats = []
vcnt = []
indices = []
print('OCCSHELL', occshell)
face_list = fetch.topo_explorer(occshell, "face")
vert_cnt = 0
for face in face_list:
wire_list = fetch.topo_explorer(face, "wire")
nwire = len(wire_list)
if nwire == 1:
pyptlist = fetch.points_frm_occface(face)
vcnt.append(len(pyptlist))
face_nrml = calculate.face_normal(face)
#pyptlist.reverse()
for pypt in pyptlist:
vert_floats.append(pypt[0])
vert_floats.append(pypt[1])
vert_floats.append(pypt[2])
normal_floats.append(face_nrml[0])
normal_floats.append(face_nrml[1])
normal_floats.append(face_nrml[2])
indices.append(vert_cnt)
vert_cnt+=1
if nwire >1:
tri_face_list = construct.simple_mesh(face)
for tface in tri_face_list:
pyptlist = fetch.points_frm_occface(tface)
vcnt.append(len(pyptlist))
face_nrml = calculate.face_normal(tface)
#pyptlist.reverse()
for pypt in pyptlist:
vert_floats.append(pypt[0])
vert_floats.append(pypt[1])
vert_floats.append(pypt[2])
normal_floats.append(face_nrml[0])
normal_floats.append(face_nrml[1])
normal_floats.append(face_nrml[2])
indices.append(vert_cnt)
vert_cnt+=1
vert_id = "ID"+str(shell_cnt) + "1"
vert_src = source.FloatSource(vert_id, numpy.array(vert_floats), ('X', 'Y', 'Z'))
normal_id = "ID"+str(shell_cnt) + "2"
normal_src = source.FloatSource | (normal_id, numpy.array(normal_floats), ('X', 'Y', 'Z'))
geom = geometry.Geometry(mesh, "geometry" + str(shell_cnt), "geometry" + str(shell_cnt), [vert_src, normal_src])
input_list = source.InputList()
input_list.addInput(0, 'VERTEX', "#" | +vert_id)
#input_list.addInput(1, 'NORMAL', "#"+normal_id)
vcnt = numpy.array(vcnt)
indices = numpy.array(indices)
if face_rgb_colour_list!=None:
mat_name="materialref"+ str(shell_cnt)
polylist = geom.createPolylist(indices, vcnt, input_list, mat_name)
geom.primitives.append(polylist)
mesh.geometries.append(geom)
matnode = scene.MaterialNode(mat_name, mat_list[shell_cnt], inputs=[])
geomnode = scene.GeometryNode(geom, [matnode])
geomnode_list.append(geomnode)
else:
mat_name="materialref"
polylist = geom.createPolylist(indices, vcnt, input_list, mat_name)
geom.primitives.append(polylist)
mesh.geometries.append(geom)
matnode = scene.MaterialNode(mat_name, mat, inputs=[])
geomnode = scene.GeometryNode(geom, [matnode])
geomnode_list.append(geomnode)
shell_cnt +=1
if occedge_list:
edge_cnt = 0
for occedge in occedge_list:
vert_floats = []
indices = []
pypt_list =fetch.points_frm_edge(occedge)
if len(pypt_list) == 2:
vert_cnt = 0
for pypt in pypt_list:
vert_floats.append(pypt[0])
vert_floats.append(pypt[1])
vert_floats.append(pypt[2])
indices.append(vert_cnt)
vert_cnt+=1
vert_id = "ID"+str(edge_cnt+shell_cnt) + "1"
vert_src = source.FloatSource(vert_id, numpy.array(vert_floats), ('X', 'Y', 'Z'))
geom = geometry.Geometry(mesh, "geometry" + str(edge_cnt+ shell_cnt), "ge |
sivakuna-aap/superdesk-core | superdesk/io/commands/update_ingest.py | Python | agpl-3.0 | 22,300 | 0.003722 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import logging
from datetime import timedelta, timezone, datetime
from flask import current_app as app
from werkzeug.exceptions import HTTPException
import superdesk
from superdesk.activity import ACTIVITY_EVENT, notify_and_add_activity
from superdesk.celery_app import celery
from superdesk.celery_task_utils import get_lock_id, get_host_id
from superdesk.errors import ProviderError
from superdesk.io import registered_feeding_services, registered_feed_parsers
from superdesk.io.iptc import subject_codes
from superdesk.lock import lock, unlock
from superdesk.media.media_operations import download_file_from_url, process_file
from superdesk.media.renditions import generate_renditions
from superdesk.metadata.item import GUID_NEWSML, GUID_FIELD, FAMILY_ID, ITEM_TYPE, CONTENT_TYPE, CONTENT_STATE, \
ITEM_STATE
from superdesk.metadata.utils import generate_guid
from superdesk.notification import push_notification
from superdesk.stats import stats
from superdesk.upload import url_for_media
from superdesk.utc import utcnow, get_expiry_date
from superdesk.workflow import set_default_state
UPDATE_SCHEDULE_DEFAULT = {'minutes': 5}
LAST_UPDATED = 'last_updated'
LAST_ITEM_UPDATE = 'last_item_update'
IDLE_TIME_DEFAULT = {'hours': 0, 'minutes': 0}
logger = logging.getLogger(__name__)
def is_service_and_parser_registered(provider):
"""
Tests if the Feed Service and Feed Parser associated with are registered with application.
:param provider:
:type provider: dict :py:class:`superdesk.io.ingest_provider_model.IngestProviderResource`
:return: True if both Feed Service and Feed Parser are registered. False otherwise.
:rtype: bool
"""
return provider.get('feeding_service') in registered_feeding_services and \
provider.get('feed_parser') is None or provider.get('feed_parser') in registered_feed_parsers
def is_scheduled(provider):
"""Test if given provider should be scheduled for update.
:param provider: ingest provider
"""
now = utcnow()
last_updated = provider.get(LAST_UPDATED, now - timedelta(days=100)) # if never updated run now
update_schedule = provider.get('update_schedule', UPDATE_SCHEDULE_DEFAULT)
return last_updated + timedelta(**update_schedule) < now
def is_closed(provider):
"""Test if provider is closed.
:param provider: ingest provider
"""
return provider.get('is_closed', False)
def filter_expired_items(provider, items):
"""
Filters out the item from the list of articles to be ingested
if they are expired and item['type'] not in provider['content_types'].
:param provider: Ingest Provider Details.
:type provider: dict :py:class: `superdesk.io.ingest_provider_model.IngestProviderResource`
:param items: list of items received from the provider
:type items: list
:return: list of items which can be saved into ingest collection
:rtype: list
"""
def is_not_expired(item):
if item.get('expiry') or item.get('versioncreated'):
expiry = item.get('expiry', item['versioncreated'] + delta)
if expiry.tzinfo:
return expiry > utcnow()
return False
try:
delta = timedelta(minutes=provider.get('content_expiry', app.config['INGEST_EXPIRY_MINUTES']))
filtered_items = [item for item in items if is_not_expired(item) and
item[ITEM_TYPE] in provider['content_types']]
if len(items) != len(filtered_items):
logger.debug('Received {0} articles from provider {1}, but only {2} are eligible to be saved in ingest'
.format(len(items), provider['name'], len(filtered_items)))
return filtered_items
except Exception as ex:
raise ProviderError.providerFilterExpiredContentError(ex, provider)
def get_provider_rule_set(provider):
if provider.get('rule_set'):
return superdesk.get_resource_service('rule_sets').find_one(_id=provider['rule_set'], req=None)
def get_provider_routing_scheme(provider):
"""Returns the ingests provider's routing scheme configuration.
If provider has a routing scheme defined (i.e. scheme ID is not None), the
scheme is fetched from the database. If not, nothing is returned.
For all scheme rules that have a reference to a content filter defined,
that filter's configuration is fetched from the database as well and
embedded into the corresponding scheme rule.
:param dict provider: ingest provider configuration
:return: fetched provider's routing scheme configuration (if any)
:rtype: dict or None
"""
if not provider.get('routing_scheme'):
return None
schemes_service = superdesk.get_resource_service('routing_schemes')
filters_service = superdesk.get_resource_service('content_filters')
scheme = schemes_service.find_one(_id=provider['routing_scheme'], req=None)
# for those routing rules that have a content filter defined,
# get that filter from DB and embed it into the rule...
rules_filters = (
(rule, str(rule['filter']))
for rule in scheme['rules'] if rule.get('filter'))
for rule, filter_id in rules_filters:
content_filter = filters_service.find_one(_id=filter_id, req=None)
rule['filter'] = content_filter
return scheme
def get_task_ttl(provider):
update_schedule = provider.get('update_schedule', UPDATE_SCHEDULE_DEFAULT)
return update_schedule.get('minutes', 0) * 60 + update_schedule.get('hours', 0) * 3600
def get_is_idle(provider):
last_item = provider.get(LAST_ITEM_UPDATE)
idle_time = provider.get('idle_time', IDLE_TIME_DEFAULT)
if isinstance(idle_time['hours'], datetime):
idle_hours = 0
else:
idle_hours = idle_time['hours']
if isinstance(idle_time['minutes'], datetime):
idle_minutes = 0
else:
idle_minutes = idle_time['minutes']
# there is an update time and the idle time is none zero
if last_item and (idle_hours != 0 or idle_minutes != 0):
if utcnow() > last_item + timedelta(hours=idle_hours, minutes=idle_minutes):
return True
return False
def get_task_id(provider):
return 'update-ingest-{0}-{1}'.format(provider.get('name'), provider.get(superdesk.config.ID_FIELD))
class UpdateIngest(superdesk.Command):
"""Update ingest providers."""
option_list = {superdesk.Option('--provider', '-p', dest='provider_name')}
def run(self, provider_name=None):
lookup = {} if not provider_name else {'name': provider_name}
for provider in superdesk.get_resource_service('ingest_providers').get(req=None, lookup=lookup):
if not is_closed(provider) and is_service_and_parser_registered(provider) and is_scheduled(provider):
kwargs = {
'provider': provide | r,
'rule_set': get_provider_rule_set(provider),
'routing_scheme': get_provider_routing_scheme(provider)
}
update_provider.apply_async(expires=get_task_ttl(provider), kwargs=kwargs)
@celery.task(soft_time_limit=1800, bind=True)
def update_provider(self, provider, rule_set=None, routing_scheme=None):
"""
Fetches items from inges | t provider as per the configuration, ingests them into Superdesk and
updates the provider.
:param self:
:type self:
:param provider: Ingest Provider Details
:type provider: dict :py:class:`superdesk.io.ingest_provider_model.IngestProviderResource`
:param rule_set: Translation Rule Set if one is associated with Ingest Provider.
:type rule_set: dict :py:class:`apps.rules.rule_sets.RuleSetsResource`
:param routing_scheme: Routing Scheme if one is associated with Ingest Provider.
:type routing_scheme: dict :py:class:`apps.rules.routing_rules.RoutingRuleSchemeResour |
mshunshin/SegNetCMR | pydicom/contrib/dicom_dao.py | Python | mit | 14,663 | 0.000341 | #!/usr/bin/python
""" dicom_dao
Data Access Objects for persisting PyDicom DataSet objects.
Currently we support couchdb through the DicomCouch class.
Limitations:
- Private tags are discarded
TODO:
- Unit tests with multiple objects open at a time
- Unit tests with rtstruct objects
- Support for mongodb (mongo has more direct support for binary data)
Dependencies:
- PyDicom
- python-couchdb
Tested with:
- PyDicom 0.9.4-1
- python-couchdb 0.6
- couchdb 0.10.1
"""
#
# Copyright (c) 2010 Michael Wallace
# This file is released under the pydicom license.
# See the file license.txt included with the pydicom distribution, also
# available at https://github.com/darcymason/pydicom
#
import hashlib
import os
import string
import couchdb
import pydicom
def uid2str(uid):
""" Convert PyDicom uid to a string """
return repr(uid).strip("'")
# When reading files a VR of 'US or SS' is left as binary, because we
# don't know how to interpret the values different numbers. We therefore
# treat it as binary and will continue to until either pydicom works it out
# for us, or we figure out a test.
BINARY_VR_VALUES = ['OW', 'OB', 'OW/OB', 'US or SS']
class DicomCouch(dict):
""" A Data Access Object for persisting PyDicom objects into CouchDB
We follow the same pattern as the python-couchdb library for getting and
setting documents, for example storing pydicom.dataset.Dataset object dcm:
db = DicomCouch('http://localhost:5984/', 'dbname')
db[dcm.SeriesInstanceUID] = dcm
The only constraints on the key are that it must be json-serializable and
unique within the database instance. In theory it should be possible to
use any DICOM UID. Unfortunately I have written this code under the
assumption that SeriesInstanceUID will always be used. This will be fixed.
Retrieving object with key 'foo':
dcm = db['foo']
Deleting object with key 'foo':
dcm = db['foo']
db.delete(dcm)
TODO:
- It is possible to have couchdb assign a uid when adding objects. This
should be supported.
"""
def __init__(self, server, db):
""" Create connection to couchdb server/db """
super(DicomCouch, self).__init__()
self._meta = {}
server = couchdb.Server(server)
try:
self._db = server[db]
except couchdb.client.ResourceNotFound:
self._db = server.create(db)
def __getitem__(self, key):
""" Retrieve DICOM object with specified SeriesInstanceUID """
doc = self._db[key]
dcm = json2pydicom(doc)
if dcm.SeriesInstanceUID not in self._meta:
self._meta[dcm.SeriesInstanceUID] = {}
self._meta[dcm.SeriesInstanceUID]['hashes'] = {}
if '_attachments' in doc:
self.__get_attachments(dcm, doc)
_set_meta_info_dcm(dcm)
# Keep a copy of the couch doc for use in DELETE operations
self._meta[dcm.SeriesInstanceUID]['doc'] = doc
return dcm
def __setitem__(self, key, dcm):
""" Write the supplied DICOM object to the database """
try:
dcm.PixelData = dcm.pixel_array.tostring()
except AttributeError:
pass # Silently ignore errors due to pixel_array not existing
except NotImplementedError:
pass # Silently ignore attempts to modify compressed pixel data
except TypeError:
pass # Silently ignore errors due to PixelData not existing
jsn, binary_elements, file_meta_binary_elements = pydicom2json(dcm)
_strip_elements(jsn, binary_elements)
_strip_elements(jsn['file_meta'], file_meta_binary_elements)
if dcm.SeriesInstanceUID in self._meta:
self.__set_meta_info_jsn(jsn, dcm)
try: # Actually write to the db
self._db[key] = jsn
except TypeError as type_error:
if str(type_error) == 'string indices must be integers, not str':
pass
if dcm.SeriesInstanceUID not in self._meta:
self._meta[dcm.SeriesInstanceUID] = {}
self._meta[dcm.SeriesInstanceUID]['hashes'] = {}
self.__put_attachments(dcm, binary_elements, jsn)
# Get a local copy of the document
# We get this from couch because we get the _id, _rev and _attachments
# keys which will ensure we don't overwrite the attachments we just
# | uploaded.
# I don't really like the extra HTTP GET and I think we can generate
# what we need without doing it. Don't | have time to work out how yet.
self._meta[dcm.SeriesInstanceUID]['doc'] = \
self._db[dcm.SeriesInstanceUID]
def __str__(self):
""" Return the string representation of the couchdb client """
return str(self._db)
def __repr__(self):
""" Return the canonical string representation of the couchdb client """
return repr(self._db)
def __get_attachments(self, dcm, doc):
""" Set binary tags by retrieving attachments from couchdb.
Values are hashed so they are only updated if they have changed.
"""
for id in doc['_attachments'].keys():
tagstack = id.split(':')
value = self._db.get_attachment(doc['_id'], id)
_add_element(dcm, tagstack, value)
self._meta[dcm.SeriesInstanceUID]['hashes'][id] = hashlib.md5(value)
def __put_attachments(self, dcm, binary_elements, jsn):
""" Upload all new and modified attachments """
elements_to_update = \
[(tagstack, item)
for tagstack, item in binary_elements
if self.__attachment_update_needed(dcm,
_tagstack2id(tagstack + [item.tag]), item)
] # nopep8
for tagstack, element in elements_to_update:
id = _tagstack2id(tagstack + [element.tag])
self._db.put_attachment(jsn, element.value, id)
self._meta[dcm.SeriesInstanceUID]['hashes'][id] = \
hashlib.md5(element.value)
def delete(self, dcm):
""" Delete from database and remove meta info from the DAO """
self._db.delete(self._meta[dcm.SeriesInstanceUID]['doc'])
self._meta.pop(dcm.SeriesInstanceUID)
def __set_meta_info_jsn(self, jsn, dcm):
""" Set the couch-specific meta data for supplied dict """
jsn['_rev'] = self._meta[dcm.SeriesInstanceUID]['doc']['_rev']
if '_attachments' in self._meta[dcm.SeriesInstanceUID]['doc']:
jsn['_attachments'] = \
self._meta[dcm.SeriesInstanceUID]['doc']['_attachments']
def __attachment_update_needed(self, dcm, id, binary_element):
""" Compare hashes for binary element and return true if different """
try:
hashes = self._meta[dcm.SeriesInstanceUID]['hashes']
except KeyError:
return True # If no hashes dict then attachments do not exist
if id not in hashes or hashes[id].digest() != \
hashlib.md5(binary_element.value).digest():
return True
else:
return False
def _add_element(dcm, tagstack, value):
""" Add element with tag, vr and value to dcm at location tagstack """
current_node = dcm
for item in tagstack[:-1]:
try:
address = int(item)
except ValueError:
address = pydicom.tag.Tag(__str2tag(item))
current_node = current_node[address]
tag = __str2tag(tagstack[-1])
vr = pydicom.datadict.dictionaryVR(tag)
current_node[tag] = pydicom.dataelem.DataElement(tag, vr, value)
def _tagstack2id(tagstack):
""" Convert a list of tags to a unique (within document) attachment id """
return string.join([str(tag) for tag in tagstack], ':')
def _strip_elements(jsn, elements):
""" Remove supplied elements from the dict object
We use this with a list of binary elements so that we don't store
empty tags in couchdb when we are already storing the binary data as
attachments.
"""
for tagstack, element in elements:
if len(tagstac |
rebase-helper/rebase-helper | rebasehelper/helpers/koji_helper.py | Python | gpl-2.0 | 14,918 | 0.001006 | # -*- coding: utf-8 -*-
#
# This tool helps you rebase your package to the latest version
# Copyright (C) 2013-2019 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors: Petr Hráček <phracek@redhat.com>
# Tomáš Hozza <thozza@redhat.com>
# Nikola Forró <nforro@redhat.com>
# František Nečas <fifinecas@seznam.cz>
import logging
import os
import random
import string
import sys
import time
from typing import List, cast
from rebasehelper.exceptions import RebaseHelperError
from rebasehelper.helpers.console_helper import ConsoleHelper
from rebasehelper.helpers.rpm_helper import RpmHelper
from rebasehelper.helpers.download_helper import DownloadHelper
from rebasehelper.logger import CustomLogger
koji_helper_functional: bool
try:
import koji # type: ignore
from koji_cli.lib import TaskWatcher # type: ignore
except ImportError:
koji_helper_functional = False
else:
koji_helper_functional = True
logger: CustomLogger = cast(CustomLogger, logging.getLogger(__name__))
class KojiHelper:
functional: bool = koji_helper_functional
@classmethod
def create_session(cls, login=False, profile='koji'):
"""Creates new Koji session and immediately logs in to a Koji hub.
Args:
login (bool): Whether to perform a login.
profile (str): Koji profile to use.
Returns:
koji.ClientSession: Newly created session instance.
Raises:
RebaseHelperError: If login failed.
"""
config = koji.read_config(profile)
session = koji.ClientSession(config['server'], opts=config)
if not login:
return session
try:
session.gssapi_login()
except Exception: # pylint: disable=broad-except
pass
else:
return session
# fall back to kerberos login (doesn't work with python3)
exc = (koji.AuthError, koji.krbV.Krb5Error) if koji.krbV else koji.AuthError
try:
session.krb_login()
except exc as e:
raise RebaseHelperError('Login failed: {}'.format(str(e))) from e
else:
return session
@classmethod
def upload_srpm(cls, session, srpm):
"""Uploads SRPM to a Koji hub.
Args:
session (koji.ClientSession): Active Koji session instance.
srpm (str): Valid path to SRPM.
Returns:
str: Remote path to the uploaded SRPM.
Raises:
RebaseHelperError: If upload failed.
"""
def progress(uploaded, total, chunksize, t1, t2): # pylint: disable=unused-argument
DownloadHelper.progress(total, uploaded, upload_start)
suffix = ''.join(random.choice(string.ascii_letters) for _ in range(8))
path = os.path.join('cli-build', str(time.time()), suffix)
logger.info('Uploading SRPM')
try:
try:
upload_start = time.time()
session.uploadWrapper(srpm, path, callback=progress)
except koji.GenericError as e:
raise RebaseHelperError('Upload failed: {}'.format(str(e))) from e
finally:
sys.stdout.write('\n')
sys.stdout.flush()
return os.path.join(path, os.path.basename(srpm))
@classmethod
def get_task_url(cls, session, task_id):
return '/'.join([session.opts['weburl'], 'taskinfo?taskID={}'.format(task_id)])
@classmethod
def display_task_results(cls, tasks):
"""Prints states of Koji tasks.
Args:
tasks (list): List of koji.TaskWatcher instances.
"""
for task in [t for t in tasks if t.level == 0]:
state = task.info['state']
task_label = task.str()
logger.info('State %s (%s)', state, task_label)
if state == koji.TASK_STATES['CLOSED']:
logger.info('%s completed successfully', ta | sk_label)
elif state == koji.TASK_STATES['FAILED']:
logger.info('%s failed', task_label)
elif state == koji.TASK_STATES['CANCELED']:
logger.info(' | %s was canceled', task_label)
else:
# shouldn't happen
logger.info('%s has not completed', task_label)
@classmethod
def watch_koji_tasks(cls, session, tasklist):
"""Waits for Koji tasks to finish and prints their states.
Args:
session (koji.ClientSession): Active Koji session instance.
tasklist (list): List of task IDs.
Returns:
dict: Dictionary mapping task IDs to their states or None if interrupted.
"""
if not tasklist:
return None
sys.stdout.flush()
rh_tasks = {}
try:
tasks = {}
for task_id in tasklist:
task_id = int(task_id)
tasks[task_id] = TaskWatcher(task_id, session, quiet=False)
while True:
all_done = True
for task_id, task in list(tasks.items()):
with ConsoleHelper.Capturer(stdout=True) as capturer:
changed = task.update()
for line in capturer.stdout.split('\n'):
if line:
logger.info(line)
info = session.getTaskInfo(task_id)
state = task.info['state']
if state == koji.TASK_STATES['FAILED']:
return {info['id']: state}
else:
# FIXME: multiple arches
if info['arch'] == 'x86_64' or info['arch'] == 'noarch':
rh_tasks[info['id']] = state
if not task.is_done():
all_done = False
else:
if changed:
# task is done and state just changed
cls.display_task_results(list(tasks.values()))
if not task.is_success():
rh_tasks = None
for child in session.getTaskChildren(task_id):
child_id = child['id']
if child_id not in list(tasks.keys()):
tasks[child_id] = TaskWatcher(child_id, session, task.level + 1, quiet=False)
with ConsoleHelper.Capturer(stdout=True) as capturer:
tasks[child_id].update()
for line in capturer.stdout.split('\n'):
if line:
logger.info(line)
info = session.getTaskInfo(child_id)
state = task.info['state']
if state == koji.TASK_STATES['FAILED']:
return {info['id']: state}
else:
# FIXME: multiple arches
if info['arch'] == 'x86_64' or info['arch'] == 'noarch':
rh_tasks[info['id']] = state
# If we found new children, go through the list again,
# in case they have children also
all_done = False
if all_done:
cls.display_task_results(lis |
pierky/ripe-atlas-monitor | pierky/ripeatlasmonitor/Helpers.py | Python | gpl-3.0 | 8,807 | 0.000114 | # Copyright (C) 2016 Pier Carlo Chiodi
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import errno
import fcntl
import os
from multiprocessing.managers import BaseManager
from .Errors import ConfigError, ProgramError
from .Logging import logger
from pierky.ipdetailscache import IPDetailsCache, \
IPDetailsCacheIXPInformationError
class BasicConfigElement(object):
MANDATORY_CFG_FIELDS = []
OPTIONAL_CFG_FIELDS = []
@classmethod
def get_cfg_fields(cls):
m = set(cls.MANDATORY_CFG_FIELDS)
o = set(cls.OPTIONAL_CFG_FIELDS)
return m, o
def __init__(self, cfg):
self.cfg = cfg
@classmethod
def get_all_cfg_fields(cls):
m, o = cls.get_cfg_fields()
return m.union(o)
def _enforce_type(self, v, t):
if v is None:
return None
else:
if type(v) != t:
if type(v) == str and t == int:
if v.isdigit():
return int(v)
elif type(v) == str and t == bool:
if v.lower() in ["y", "yes", "t", "true", "on", "1"]:
return True
elif v.lower() in ["n", "no", "f", "false", "off", "0"]:
return False
raise ConfigError(
"Invalid type for '{}': "
"must be {}".format(v, t)
)
else:
return v
def _enforce_param(self, k, t):
try:
if k not in self.get_all_cfg_fields():
raise ConfigError("Unknown attribute: {}".format(k))
if k not in self.cfg:
return None
else:
if isinstance(self.cfg[k], str) and self.cfg[k].strip() == "":
return None
else:
return self._enforce_type(self.cfg[k], t)
except ConfigError as e:
raise ConfigError("Error processing '{}' attribute: {}".format(
k, str(e)
))
def _enforce_list(self, k, t):
if k not in self.cfg:
return []
if self.cfg[k] is None:
return []
else:
if isinstance(self.cfg[k], list):
for idx in range(0, len(self.cfg[k])):
try:
self.cfg[k][idx] = self._enforce_type(self.cfg[k][idx],
t)
except ConfigError as e:
raise ConfigError(
"Error processing '{}' attribute: {}".format(
k, str(e)
)
)
return self.cfg[k]
else:
return [self._enforce_type(self.cfg[k], t)]
def normalize_fields(self):
CFG_FIELDS = self.get_all_cfg_fields()
if self.cfg is None:
raise ConfigError("Invalid configuration: it's empty.")
if not isinstance(self.cfg, dict):
raise ConfigError(
"Invalid configuration element: {}".format(self.cfg)
)
# Any unknown field?
for f in self.cfg:
if f not in CFG_FIELDS:
raise ConfigError(
"Unknown configuration field: {}".format(f)
)
# Mandatory fields missing?
for f in sorted(self.get_cfg_fields()[0]):
if f not in self.cfg:
raise ConfigError("Missing mandatory field: {}".format(f))
if self.cfg[f] is None:
raise ConfigError("Mandatory field is null: {}".format(f))
# Missing attributes are set to None
for f in CFG_FIELDS:
if f not in self.cfg:
self.cfg[f] = None
@staticmethod
def _capitalize_first(s):
return s[0].upper() + s[1:]
class Probe(object):
def __init__(self, probe, af):
# probe is element of ripe-atlas-cousteau ProbeRequest
# (JSON format, https://atlas.ripe.net/docs/rest/#probe)
self.id = int(probe["id"])
self.country_code = probe["country_code"]
self.asn_v4 = probe["asn_v4"]
self.asn_v6 = probe["asn_v6"]
self.asn = probe["asn_v{}".format(af)] # A | SN for the curre | nt msm AF
def __str__(self):
if self.asn is not None:
tpl = "probe ID {id} (AS{asn}, {cc})"
else:
tpl = "probe ID {id} ({cc})"
return tpl.format(
id=self.id,
asn=self.asn,
cc=self.country_code
)
class ProbesFilter(object):
def __init__(self, probe_ids=None, countries=None):
self.probe_ids = probe_ids or []
self.countries = countries or []
def __contains__(self, probe):
# probe = Probe object
if not self.probe_ids and not self.countries:
return True
if self.probe_ids and probe.id not in self.probe_ids:
return False
if self.countries and probe.country_code not in self.countries:
return False
return True
class IPCache(object):
def __init__(self):
self.ip_cache = None
def setup(self, **kwargs):
logger.debug("Initializing IP cache...")
if "_dir" in kwargs:
_dir = kwargs["_dir"]
if "IP_ADDRESSES_CACHE_FILE" in kwargs:
IP_ADDRESSES_CACHE_FILE = kwargs["IP_ADDRESSES_CACHE_FILE"]
else:
IP_ADDRESSES_CACHE_FILE = "{}/ip_addr.cache".format(_dir)
if "IP_PREFIXES_CACHE_FILE" in kwargs:
IP_PREFIXES_CACHE_FILE = kwargs["IP_PREFIXES_CACHE_FILE"]
else:
IP_PREFIXES_CACHE_FILE = "{}/ip_pref.cache".format(_dir)
if "IXP_CACHE_FILE" in kwargs:
IXP_CACHE_FILE = kwargs["IXP_CACHE_FILE"]
else:
IXP_CACHE_FILE = "{}/ixps.cache".format(_dir)
try:
self.ip_cache = IPDetailsCache(
IP_ADDRESSES_CACHE_FILE=IP_ADDRESSES_CACHE_FILE,
IP_PREFIXES_CACHE_FILE=IP_PREFIXES_CACHE_FILE,
MAX_CACHE=kwargs["lifetime"],
dont_save_on_del=True
)
except Exception as e:
raise ProgramError(
"Error while setting up the IP cache: {}".format(str(e))
)
try:
if kwargs["use_ixps_info"]:
self.ip_cache.UseIXPs(
WhenUse=1,
IXP_CACHE_FILE=IXP_CACHE_FILE
)
except IPDetailsCacheIXPInformationError as e:
raise ConfigError(
"An error occurred while setting up the IP addresses cache: "
"{} - "
"IXPs information are not available at the moment; please "
"consider setting the ip_cache.use_ixps_info to False to "
"temporary avoid problems.".format(str(e))
)
except Exception as e:
raise ProgramError(
"Error while setting up the IXPs cache: {}".format(str(e))
)
except KeyboardInterrupt:
raise ConfigError(
"Aborting IP cache initialization"
)
logger.debug("IP cache initialized.")
def get_ip_info(self, IP):
return self.ip_cache.GetIPInformation(IP)
def save(self):
if self.ip_cache:
self.ip_cache.SaveCache()
class IPCacheManager(BaseManager):
pass
IPCacheManager.register( |
Aravinthu/odoo | addons/base_import/__manifest__.py | Python | agpl-3.0 | 1,164 | 0 | {
'name': 'Base import',
'description': """
New extensible file import for Odoo
======================================
Re-implement Odoo's file import system:
* Server side, the previous system forces most of the logic into the
client which duplicates the effort (between clients), makes the
import system much harder to use without a client (direct RPC or
other forms of automation) and makes knowledge about the
import/export system much harder to gather as it is spread over
3+ different projects.
* In a more extensible manner, so users and partners can bui | ld their
own front-end to import from other file formats (e.g. OpenDocument
files) which may be simpler to handle in their work flow or from
their data production sources.
* In a module, so that administrators and users of Odoo who do not
need or | want an online import can avoid it being available to users.
""",
'depends': ['web'],
'category': 'Extra Tools',
'installable': True,
'auto_install': True,
'data': [
'security/ir.model.access.csv',
'views/base_import_templates.xml',
],
'qweb': ['static/src/xml/base_import.xml'],
}
|
patrickhoefler/linked-data-wizards | ldva/apps/visbuilder/codevizbackup.py | Python | agpl-3.0 | 6,354 | 0.012118 | """
Copyright (C) 2014 Kompetenzzentrum fuer wissensbasierte Anwendungen und Systeme
Forschungs- und Entwicklungs GmbH (Know-Center), Graz, Austria
office@know-center.at
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import json
import mappingproposal
import inprocessor
import inprocessorauto
import parallelcoordinatesgenerator
import d3data
from ldva.libs.sparql.utils import SPARQLQuery
from django.http import HttpResponse
from django.template import Context, loader, RequestContext
from django.shortcuts import render_to_response, redirect
from django.views.decorators.csrf import csrf_exempt
@csrf_exempt # Ohne das funktioniert post nicht
def service(request):
cmd = ""
if request.method == 'GET':
cmd=request.GET['cmd']
else:
cmd=request.POST['cmd']
mappingProposalObject = mappingproposal.MappingProposal()
response = HttpResponse()
if(cmd=="getPreview"):
try:
ds=request.POST['dataset']
chart=request.POST['chart']
chartID=request.POST['chartid']
inProcessorObject=inprocessor.InProcessor(ds, chart, chartID)
inProcessorObject.process()
resultArray=inProcessorObject.resultArray
return HttpResponse(json.dumps(resultArray))
except Exception as inst:
msg = "ERROR (code - queryPreview): [%s] %s"%(type(inst),inst)
print msg
mappingProposalObject.reInit()
return HttpResponse(json.dumps({'error' : ''+msg+''}))
if(cmd=="getPreviewAuto"):
try:
ds=request.POST['dataset']
chart=request.POST['chart']
dimension= request.POST['dimension']
resultArray = []
inProcessorObject=inprocessorauto.InProcessorAuto(ds, chart, dimension )
resultArray = inProcessorObject.process()
'''for i in range(len(resultArray)):
supChart = resultArray[i]['chart']
supChartUri = resultArray[i]['charturi']
chartArray.append(supChart)'''
#print "KKKKKKKKKKKKKKKKKKKKKKKKKKKKKKKK", resultArray
return HttpResponse(json.dumps(resultArray))
except Exception as inst:
msg = "ERROR (code - queryPreviewAuto): [%s] %s"%(type(inst),inst)
print msg
#mappingProposalObject.reInit()
return HttpResponse(json.dumps({'error' : ''+msg+''}))
if(cmd=="getVisualization"):
try:
dimUriArray = []
ds=request.POST['dataset']
chart=request.POST['chart']
dimension= json.loads(request.POST['dimension'])
#print "HIERRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRr", dimension
| dimUri = ""
for elements in dimension:
dimUri = elements['dimensionuri']
dim = elements ['dim']
dimUriObject = {'dimensionuri':dimUri, 'dim': dim }
dimUriArray.append(dimUriObject)
inProcessorObject=inprocessorauto.InProcessorAuto(ds, char | t, dimUriArray)
resultArray = inProcessorObject.getVis()
#resultArray=inProcessorObject.resultArray
return HttpResponse(json.dumps(resultArray))
except Exception as inst:
msg = "ERROR (code - queryVisualization): [%s] %s"%(type(inst),inst)
print msg
mappingProposalObject.reInit()
return HttpResponse(json.dumps({'error' : ''+msg+''}))
if(cmd=="getDimension"):
try:
ds=request.POST['dataset']
sparqlqueryObjectD3 = ""
if ds == "http://data.lod2.eu/scoreboard/ds/indicator/i_iuolc_IND_TOTAL__ind_iu3":
sparqlqueryObjectD3=SPARQLQuery("http://open-data.europa.eu/en/sparqlep", 'regex')
else:
sparqlqueryObjectD3=SPARQLQuery('http://zaire.dimis.fim.uni-passau.de:8890/sparql', 'virtuoso')
dimensions=sparqlqueryObjectD3.get_cube_dimensions_for_auto_mapping(ds)
return HttpResponse(json.dumps(dimensions))
except Exception as inst:
msg = "ERROR (code - getDimension): [%s] %s"%(type(inst),inst)
print msg
mappingProposalObject.reInit()
return HttpResponse(json.dumps({'error' : ''+msg+''}))
if(cmd=="getMeasure"):
try:
ds = request.POST['dataset']
sparqlqueryObjectD3 = ""
if ds == "http://data.lod2.eu/scoreboard/ds/indicator/i_iuolc_IND_TOTAL__ind_iu3":
sparqlqueryObjectD3=SPARQLQuery("http://open-data.europa.eu/en/sparqlep", 'regex')
else:
sparqlqueryObjectD3=SPARQLQuery('http://zaire.dimis.fim.uni-passau.de:8890/sparql', 'virtuoso')
dimensions=sparqlqueryObjectD3.get_cube_measure_for_auto_mapping(ds)
#print "HIERRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRR", dimensions
return HttpResponse(json.dumps(dimensions))
except Exception as inst:
msg = "ERROR (code - getMeasure): [%s] %s"%(type(inst),inst)
print msg
mappingProposalObject.reInit()
return HttpResponse(json.dumps({'error' : ''+msg+''}))
if (cmd=="getD3Data"):
try:
#chart=request.GET['chart']
chartID=request.GET['chartid']
ds=request.GET['dataset']
inProcessorObject=inprocessor.InProcessor(ds, "d3data", chartID)
inProcessorObject.process()
resultArray=inProcessorObject.resultArray
return HttpResponse(json.dumps(resultArray))
except Exception as ex:
msg = "ERROR (code - getD3Data): [%s] %s"%(type(ex), ex)
print msg
|
flipjack/tecnoservicio | config/settings/celery.py | Python | bsd-3-clause | 588 | 0.003401 | from __future__ import absolute_import
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os | .environ.setdefault('DJANGO_S | ETTINGS_MODULE', 'config.settings.production')
from django.conf import settings
app = Celery('tecnoservicio.tareas')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request)) |
qedsoftware/commcare-hq | corehq/apps/programs/forms.py | Python | bsd-3-clause | 1,704 | 0.000587 | from crispy_forms.helper import FormHelper
from crispy_forms import layout as crispy
from django import forms
from corehq.apps.programs.models import Program
from django.utils.translation import ugettext as _
class ProgramForm(forms.Form):
name = forms.CharField(max_length=100)
def __init__(self, program, *args, **kwargs):
self.program = program
kwargs['initial'] = self.program._doc
super(ProgramForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.labe | l_class = 'col-sm-3 col-md-4 col-lg-2'
self.helper.field_class = 'col-sm-4 col-md-5 col-lg-3'
# don't let users rename the uncategorized
# program
if program.default:
self.fields['name'].required | = False
self.fields['name'].widget.attrs['readonly'] = True
self.helper.layout = crispy.Layout('name')
def clean_name(self):
name = self.cleaned_data['name'].strip()
if not name:
raise forms.ValidationError(_('This field is required.'))
other_program_names = [
p['name'] for p in Program.by_domain(self.program.domain, wrap=False)
if p['_id'] != self.program._id
]
if name in other_program_names:
raise forms.ValidationError(_('Name already in use'))
return name
def save(self, instance=None, commit=True):
if self.errors:
raise ValueError(_('Form does not validate'))
program = instance or self.program
setattr(program, 'name', self.cleaned_data['name'])
if commit:
program.save()
return program
|
jpardobl/monscale | monscale/mappings/aws.py | Python | bsd-3-clause | 295 | 0.020339 | from boto.sns import connect_to_region, SNSConnection |
def publish_msg_to_sns_topic(region, aws_access_key, aws_secret_key, topic, message, subject):
connect_to_re | gion(region)
conn = SNSConnection(aws_access_key, aws_secret_key)
conn.publish(topic, message, subject)
|
ghbenjamin/TestingGui | TestingGui/TestWidget.py | Python | mit | 3,581 | 0.056967 | import wx
import Globals
from BaseWidget import BaseWidget
class TestWidget ( BaseWidget ):
def __init__( self, parent ):
BaseWidget.__init__ ( self, parent )
m_mainSizer = wx.BoxSizer( wx.VERTICAL )
m_mainSizer.AddSpacer( ( 0, 30), 0, 0, 5 )
m_statusSizer = wx.BoxSizer( wx.VERTICAL )
m_statusCont = wx.BoxSizer( wx.HORIZONTAL )
self.m_testStatusIcon = wx.StaticBitmap( self, wx.ID_ANY, wx.NullBitmap, wx.DefaultPosition, wx.DefaultSize, 0 )
m_statusCont.Add( self.m_testStatusIcon, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.m_testStatusLabel = wx.StaticText( self, wx.ID_ANY, u"MyLabel", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_testStatusLabel.Wrap( -1 )
self.m_testStatusLabel.SetFont( wx.Font( 22, 70, 90, 90, False, wx.EmptyString ) )
m_statusCont.Add( self.m_testStatusLabel, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
m_statusSizer.Add( m_statusCont, 1, wx.EXPAND|wx.ALIGN_CENTER_HORIZONTAL, 5 )
m_mainSizer.Add( m_statusSizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 5 )
m_mainSizer.AddSpacer( ( 0, 20), 0, 0, 5 )
bSizer31 = wx.BoxSizer( wx.HORIZONTAL )
bSizer31.AddSpacer( ( 40, 0), 1, wx.EXPAND, 5 )
m_infoGrid = wx.FlexGridSizer( 4, 2, 0, 0 )
m_infoGrid.SetFlexibleDirection( wx.BOTH )
m_infoGrid.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED )
self.m_testLabel = wx.StaticText( self, wx.ID_ANY, u"Test:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_testLabel.Wrap( -1 )
m_infoGrid.Add( self.m_testLabel, 0, wx.ALL, 5 )
self.m_testValue = wx.StaticText( self, wx.ID_ANY, u"MyLabel", wx.DefaultPosition, wx.DefaultS | ize, 0 )
self.m_testValue.Wrap( -1 )
m_infoGrid.Add( self.m_testValue, 0, wx.ALL, 5 )
self.m_groupLabel = wx.StaticText( self, wx.ID_ANY, u"Group:", wx.DefaultPosition, wx.DefaultS | ize, 0 )
self.m_groupLabel.Wrap( -1 )
m_infoGrid.Add( self.m_groupLabel, 0, wx.ALL, 5 )
self.m_groupValue = wx.StaticText( self, wx.ID_ANY, u"MyLabel", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_groupValue.Wrap( -1 )
m_infoGrid.Add( self.m_groupValue, 0, wx.ALL, 5 )
self.m_descLabel = wx.StaticText( self, wx.ID_ANY, u"Description:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_descLabel.Wrap( -1 )
m_infoGrid.Add( self.m_descLabel, 0, wx.ALL, 5 )
self.m_descValue = wx.StaticText( self, wx.ID_ANY, u"MyLabel", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_descValue.Wrap( 100 )
m_infoGrid.Add( self.m_descValue, 0, wx.ALL, 5 )
self.m_fbLabel = wx.StaticText( self, wx.ID_ANY, u"FB:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_fbLabel.Wrap( -1 )
m_infoGrid.Add( self.m_fbLabel, 0, wx.ALL, 5 )
self.m_fbValue = wx.StaticText( self, wx.ID_ANY, u"MyLabel", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_fbValue.Wrap( -1 )
m_infoGrid.Add( self.m_fbValue, 0, wx.ALL, 5 )
bSizer31.Add( m_infoGrid, 0, 0, 5 )
m_mainSizer.Add( bSizer31, 0, 0, 5 )
self.SetSizer( m_mainSizer )
self.Layout()
def __del__( self ):
pass
def setNode(self, node):
BaseWidget.setNode(self, node)
self.m_testValue.SetLabel( node.name )
self.m_groupValue.SetLabel( node.group )
self.m_descValue.SetLabel( node.description )
if node.fb:
self.m_fbValue.SetLabel( node.fb )
else:
self.m_fbValue.SetLabel( "None" )
if node.status == Globals.TestStatus.PASSED:
self.m_testStatusLabel.SetLabel("Passed")
elif node.status == Globals.TestStatus.DISABLED:
self.m_testStatusLabel.SetLabel("Disabled")
elif node.status == Globals.TestStatus.FAILED:
self.m_testStatusLabel.SetLabel("FAILED")
else:
assert False
self.Layout() |
raghuraju/Simple-Project-Management | src/users/migrations/0003_auto_20170414_1807.py | Python | apache-2.0 | 561 | 0.001783 | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-14 18: | 07
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0002_auto_20170414_1806'),
]
operations = [
migrations.AlterField(
model_name='manager',
name='reports_to',
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='users.Manager'),
),
| ]
|
petrutlucian94/nova | nova/tests/functional/v3/test_remote_consoles.py | Python | apache-2.0 | 3,556 | 0.005624 | # Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.tests.functional.v3 import test_servers
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.extensions')
class ConsolesSampleJsonTests(t | est_servers.ServersSampleBase):
extension_name = "os-remote-consoles"
extra_extensions_to_load = ["os-access-ips"]
_api_version = 'v2'
def _get_flags(self):
f = super(ConsolesSampleJsonTests, self)._get_flags()
f['osap | i_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.consoles.Consoles')
return f
def setUp(self):
super(ConsolesSampleJsonTests, self).setUp()
self.flags(vnc_enabled=True)
self.flags(enabled=True, group='spice')
self.flags(enabled=True, group='rdp')
self.flags(enabled=True, group='serial_console')
def test_get_vnc_console(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'get-vnc-console-post-req',
{'action': 'os-getVNCConsole'})
subs = self._get_regexes()
subs["url"] = \
"((https?):((//)|(\\\\))+([\w\d:#@%/;$()~_?\+-=\\\.&](#!)?)*)"
self._verify_response('get-vnc-console-post-resp', subs, response, 200)
def test_get_spice_console(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'get-spice-console-post-req',
{'action': 'os-getSPICEConsole'})
subs = self._get_regexes()
subs["url"] = \
"((https?):((//)|(\\\\))+([\w\d:#@%/;$()~_?\+-=\\\.&](#!)?)*)"
self._verify_response('get-spice-console-post-resp', subs,
response, 200)
def test_get_rdp_console(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'get-rdp-console-post-req',
{'action': 'os-getRDPConsole'})
subs = self._get_regexes()
subs["url"] = \
"((https?):((//)|(\\\\))+([\w\d:#@%/;$()~_?\+-=\\\.&](#!)?)*)"
self._verify_response('get-rdp-console-post-resp', subs,
response, 200)
def test_get_serial_console(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'get-serial-console-post-req',
{'action': 'os-getSerialConsole'})
subs = self._get_regexes()
subs["url"] = \
"((ws?):((//)|(\\\\))+([\w\d:#@%/;$()~_?\+-=\\\.&](#!)?)*)"
self._verify_response('get-serial-console-post-resp', subs,
response, 200)
|
hlin117/statsmodels | statsmodels/datasets/stackloss/data.py | Python | bsd-3-clause | 1,907 | 0.007341 | """Stack loss data"""
__docformat__ = 'restructuredtext'
COPYRIGHT = """This is public domain. """
TITLE = __doc__
SOURCE = """
Brownlee, K. A. (1965), "Statistical Theory and Methodology in
Science and Engineering", 2nd edition, New York:Wiley.
"""
DESCRSHORT = """Stack loss plant data of Brownlee (1965)"""
DESCRLONG = """The stack loss plant data of Brownlee (1965) contains
21 days of measurements from a plant's oxidation of ammonia to nitric acid.
The nitric oxide pollutants are captured in an absorption tower."""
NOTE = """::
Number of Observations - 21
Number of Variables - 4
Variable name definitions::
STACKLOSS - 10 times the percentage of ammonia going into the plant
that escapes from the absoroption column
AIRFLOW - Rate of operation of the plant
WATERTEMP - Cooling water temperature in the absorption tower
ACIDCONC - Acid concentration of circulating acid minus 50 times 10.
"""
from numpy import recfromtxt, column_stack, array
from statsmodels.datasets import utils as du
from os.path import dirname, abspath
def load():
"""
Load the stack loss data and returns a Dataset class instance.
Returns
| --------
Dataset instance:
See DATASET_PROPOSAL.txt for more information.
"""
data = _get_data()
return du.process_recarray(data, | endog_idx=0, dtype=float)
def load_pandas():
"""
Load the stack loss data and returns a Dataset class instance.
Returns
--------
Dataset instance:
See DATASET_PROPOSAL.txt for more information.
"""
data = _get_data()
return du.process_recarray_pandas(data, endog_idx=0, dtype=float)
def _get_data():
filepath = dirname(abspath(__file__))
data = recfromtxt(open(filepath + '/stackloss.csv',"rb"), delimiter=",",
names=True, dtype=float)
return data
|
BTY2684/gitPy-snippets | testProj/ggplot_test.py | Python | gpl-2.0 | 2,273 | 0.042675 | #!/usr/bin/env python
# - | *- coding: utf-8 -*-
#
# ggplot_test.py
#
# Copyright 2014 Yang <yang@Leo-FamilyGuy>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it wil | l be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import numpy as np
import pandas as pd
from ggplot import *
def main():
# testing ggplot
#~ print ggplot(mtcars, aes('mpg', 'qsec')) + \
#~ geom_point(colour='steelblue') + \
#~ scale_x_continuous(breaks=[10,20,30], \
#~ labels=["horrible", "ok", "awesome"])
print type(mtcars)
randn = np.random.randn
s = pd.Series(randn(100))
d = {'one' : pd.Series(randn(100)), 'two' : pd.Series(randn(100)), 'three' : pd.Series(randn(100)), 'four' : pd.Series(randn(100))}
df = pd.DataFrame(d)
melt_df = pd.melt(df)
# scatter plot
p_scatter = ggplot(df, aes('one', 'two')) + \
geom_point(colour='steelblue')
# Histogram plot
#~ p_hist = ggplot(aes('variable', 'value', fill = 'variable'), \
#~ data=melt_df) + geom_histogram() + facet_wrap('variable')
p = ggplot(melt_df, aes('value')) + geom_density() + \
facet_grid("variable")
#~ meat_lng = pd.melt(meat, id_vars=['date'])
#~
#~ p = ggplot(aes(x='date', y='value'), data=meat_lng)
#~ p + geom_point() + \
#~ stat_smooth(colour="red") + \
#~ facet_wrap("variable")
#~
#~ p + geom_hist() + facet_wrap("color")
#~
#~ p = ggplot(diamonds, aes(x='price'))
#~ p + geom_density() + \
#~ facet_grid("cut", "clarity")
#~
#~ p = ggplot(diamonds, aes(x='carat', y='price'))
#~ p + geom_point(alpha=0.25) + \
#~ facet_grid("cut", "clarity")
print p
return 0
if __name__ == '__main__':
main()
|
datakortet/django-cms | cms/test_utils/cli.py | Python | bsd-3-clause | 9,013 | 0.004217 | # -*- coding: utf-8 -*-
from __future__ import with_statement
import os
import dj_database_url
gettext = lambda s: s
urlpatterns = []
def configure(db_url, **extra):
from django.conf import settings
os.environ['DJANGO_SETTINGS_MODULE'] = 'cms.test_utils.cli'
if not 'DATABASES' in extra:
DB = dj_database_url.parse(db_url)
else:
DB = {}
defaults = dict(
CACHE_BACKEND='locmem:///',
DEBUG=True,
TEMPLATE_DEBUG=True,
DATABASE_SUPPORTS_TRANSACTIONS=True,
DATABASES={
'default': DB
},
SITE_ID=1,
USE_I18N=True,
MEDIA_ROOT='/media/',
STATIC_ROOT='/static/',
CMS_MEDIA_ROOT='/cms-media/',
CMS_MEDIA_URL='/cms-media/',
MEDIA_URL='/media/',
STATIC_URL='/static/',
ADMIN_MEDIA_PREFIX='/static/admin/',
EMAIL_BACKEND='django.core.mail.backends.locmem.EmailBackend',
SECRET_KEY='key',
TEMPLATE_LOADERS=(
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
),
TEMPLATE_CONTEXT_PROCESSORS=[
"django.contrib.auth.context_processors.auth",
'django.contrib.messages.context_processors.messages',
"django.core.context_processors.i18n",
"django.core.context_processors.debug",
"django.core.context_processors.request",
"django.core.context_processors.media",
'django.core.context_processors.csrf',
"cms.context_processors.media",
"sekizai.context_processors.sekizai",
"django.core.context_processors.static",
],
TEMPLATE_DIRS=[
os.path.abspath(os.path.join(os.path.dirname(__file__), 'project', 'templates'))
],
MIDDLEWARE_CLASSES=[
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.doc.XViewMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.transaction.TransactionMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
'cms.middleware.language.LanguageCookieMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
'cms.middleware.page.CurrentPageMiddleware',
'cms.middleware.toolbar.ToolbarMiddleware',
],
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'django.contrib.sites',
'django.contrib.staticfiles',
'django.contrib.messages',
'cms',
'menus',
'mptt',
'cms.plugins.text',
'cms.plugins.picture',
'cms.plugins.file',
'cms.plugins.flash',
'cms.plugins.link',
'cms.plugins.snippet',
'cms.plugins.googlemap',
'cms.plugins.teaser',
'cms.plugins.video',
'cms.plugins.twitter',
'cms.plugins.inherit',
'cms.test_utils.project.sampleapp',
'cms.test_utils.project.placeholderapp',
'cms.test_utils.project.pluginapp',
'cms.test_utils.project.pluginapp.plugins.manytomany_rel',
'cms.test_utils.project.pluginapp.plugins.extra_context',
'cms.test_utils.project.fakemlng',
'cms.test_utils.project.fileapp',
'south',
'reversion',
'sekizai',
'hvad',
],
LANGUAGE_CODE="en",
LANGUAGES=(
('en', gettext('English')),
('fr', gettext('French')),
('de', gettext('Germ | an')),
('pt-br', gettext('Brazilian Portuguese')),
('nl', gettext("Dutch")),
('es-mx', u'Español'),
),
CMS_LANGUAGES={
1: [
{
'code':'en',
'name':gettext | ('English'),
'fallbacks':['fr', 'de'],
'public':True,
},
{
'code':'de',
'name':gettext('German'),
'fallbacks':['fr', 'en'],
'public':True,
},
{
'code':'fr',
'name':gettext('French'),
'public':True,
},
{
'code':'pt-br',
'name':gettext('Brazilian Portuguese'),
'public':False,
},
{
'code':'es-mx',
'name':u'Español',
'public':True,
},
],
2: [
{
'code':'de',
'name':gettext('German'),
'fallbacks':['fr', 'en'],
'public':True,
},
{
'code':'fr',
'name':gettext('French'),
'public':True,
},
],
3: [
{
'code':'nl',
'name':gettext('Dutch'),
'fallbacks':['fr', 'en'],
'public':True,
},
{
'code':'de',
'name':gettext('German'),
'fallbacks':['fr', 'en'],
'public':False,
},
],
'default': {
'hide_untranslated':False,
},
},
CMS_TEMPLATES=(
('col_two.html', gettext('two columns')),
('col_three.html', gettext('three columns')),
('nav_playground.html', gettext('navigation examples')),
),
CMS_PLACEHOLDER_CONF={
'col_sidebar': {
'plugins': ('FilePlugin', 'FlashPlugin', 'LinkPlugin', 'PicturePlugin',
'TextPlugin', 'SnippetPlugin'),
'name': gettext("sidebar column")
},
'col_left': {
'plugins': ('FilePlugin', 'FlashPlugin', 'LinkPlugin', 'PicturePlugin',
'TextPlugin', 'SnippetPlugin', 'GoogleMapPlugin', 'MultiColumnPlugin'),
'name': gettext("left column")
},
'col_right': {
'plugins': ('FilePlugin', 'FlashPlugin', 'LinkPlugin', 'PicturePlugin',
'TextPlugin', 'SnippetPlugin', 'GoogleMapPlugin', 'MultiColumnPlugin'),
'name': gettext("right column")
},
'extra_context': {
"plugins": ('TextPlugin',),
"extra_context": {"width": 250},
"name": "extra context"
},
},
CMS_SOFTROOT=True,
CMS_PERMISSION=True,
CMS_PUBLIC_FOR='all',
CMS_CACHE_DURATIONS={
'menus': 0,
'content': 0,
'permissions': 0,
},
CMS_APPHOOKS=[],
CMS_REDIRECTS=True,
CMS_SEO_FIELDS=True,
CMS_MENU_TITLE_OVERWRITE=True,
CMS_URL_OVERWRITE=True,
CMS_SHOW_END_DATE=True,
CMS_SHOW_START_DATE=True,
CMS_PLUGIN_PROCESSORS=tuple(),
CMS_PLUGIN_CONTEXT_PROCESSORS=tuple(),
CMS_SITE_CHOICES_CACHE_KEY='CMS:site_choices',
CMS_PAGE_CHOICES_CACHE_KEY='CMS:page_choices',
SOUTH_TESTS_MIGRATE=False,
CMS_NAVIGATION_EXTENDERS=(
('cms.test_utils.project.sampleapp.menu_extender.get_nodes', 'SampleApp Menu'),
),
TEST_RUNNER='cms.test_utils.runners.NormalTestRunner',
JUNIT_OUTPUT_DIR='.',
|
Qwaz/solved-hacking-problem | SharifCTF/2016/elliptic.py | Python | gpl-2.0 | 1,402 | 0.009272 | p = 16857450949524777441941817393974784044780411511252189319
A = 16857450949524777441941817393974784044780411507861094535
B = 77986137112576
P = (5732560139258194764535999929325388041568732716579308775, 14532336890195013837874850588152996214121327870156054248)
Q = (2609506039090139098835068603396546214836589143940493046, 8637771092812212464887027788957801177574860926032421582)
def egcd(a, b):
if a == 0:
return (b, 0, 1)
else:
g, y, x = egcd(b % a, a)
return (g, x - (b // a) * y, y)
def modinv(a, m):
g, x, y = egc | d(a, m)
if g != 1:
raise Exception('modular inverse does not exist')
else:
return x % m
def addPoint(P, Q):
if P == (0, 0) or Q == (0, 0):
return (P[0]+Q[0], P[1]+Q[1])
x_1, y_1, x_2, y_2 = P[0], P[1], Q[0], Q[1]
if (x_1, y_1) == (x_2, y_2):
| if y_1 == 0:
return (0, 0)
# slope of the tangent line
m = (3 * x_1 * x_1 + A) * modinv(2 * y_1, p)
else:
if x_1 == x_2:
return (0, 0)
# slope of the secant line
m = (y_2 - y_1) * modinv((x_2 - x_1 + p) % p, p)
x_3 = (m*m - x_2 - x_1) % p
y_3 = (m*(x_1 - x_3) - y_1) % p
return (x_3, y_3)
def mulPoint(n, P):
r = (0, 0)
t = P
while n:
if n & 1:
r = addPoint(r, t)
t = addPoint(t, t)
n >>= 1
return r
|
gwangjin2/gwangcoin-core | share/qt/clean_mac_info_plist.py | Python | mit | 900 | 0.016667 | #!/usr/bin/env python
# Jonas Schnelli, 2013
# make sure the Litecoin-Qt.app contains the right plist (including the right version)
# fix made because of serval bugs in Qt mac deployment (https://bugreports.qt-project.org/browse/QTBUG-21267)
from string import Template
from datetime import date
bitcoinDir = "./";
inFile = bitcoinDir+"/share/qt/Info.plist"
outFile = "Gwangcoin-Qt.app/Contents | /Info.plist"
version = "unknown";
fileForGrabbingVersion = bitcoinDir+"gwangcoin-qt.pro"
for line in open(fileForGrabbingVersion):
lineArr = line.replace(" ", "").split("=");
if lineArr[0].startswith("VERSION"):
version = lineArr[1].replace("\n", "");
fIn = open(inFile, "r")
fileContent = fIn.read()
s = Template(fileContent)
newFileContent = s.substitute(VERSION=version,YEAR=da | te.today().year)
fOut = open(outFile, "w");
fOut.write(newFileContent);
print "Info.plist fresh created"
|
BlackVegetable/starcraft-oracle | sc2reader-master/sc2reader/engine/plugins/selection.py | Python | mit | 3,813 | 0.002098 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals, division
class SelectionTracker(object):
""" Tracks a player's active selection as an input into other plugins.
In some situations selection tracking isn't perfect. The plugin will
detect these situations and report errors. For a player will a high
level of selection errors, it may be best to ignore the selection
results as they could have been severely compromised.
Exposes the following interface, directly integrated into the player:
for person in replay.entities:
total_errors = person.selection_errors
selection = person.selection
control_group_0 = selection[0]
...
control_group_9 = selection[9]
active_selection = selection[10]
# TODO: list a few error inducing sitations
"""
name = 'SelectionTracker'
def handleInitGame(self, event, replay):
for person in replay.entities:
person.selection = dict()
for i in range(11):
person.selection[i] = list()
person.selection_errors = 0
def handleSelectionEvent(self, event, replay):
selection = event.player.selection[event.control_group]
new_selection, error = self._deselect(selection, event.mask_type, event.mask_data)
new_selection = self._select(new_selection, event.objects)
event.player.selection[event.control_group] = new_selection
if error:
event.player.selection_errors += 1
def handleGetFromHotkeyEvent(self, event, replay):
selection = event.player.selection[event.control_group]
new_selection, error = self._deselect(selection, event.mask_type, event.mask_data)
event.player.selection[10] = new_selection
if error:
event.player.selection_errors += 1
def handleSetToHotkeyEvent(self, event, replay):
event.player.selection[event.control_group] = event.player.selection[10]
def handleAddToHotkeyEvent(self, event, replay):
selection = event.player.selection[event.control_group]
new_selection, error = self._deselect(selection, event.mask_type, event.mask_data)
new_selection = self._select(new_selection, event.player.selection[10])
event.player.selection[event.control_group] = new_selection
if error:
event.player.selection_errors += 1
def _select(self, selection, units):
return sorted(set(selection+units))
def _deselect(self, selection, mode, data):
"""Returns false if there was a data error when deselecting"""
if mode == 'None':
return selection, False
selection_size, data_size = len(selection), len(data)
if mode == 'Mask':
# Deselect objects according to deselect mask
sfilter = lambda bit_u: not bit_u[0]
mask = data+[False]*(selection_size-data_size)
new_selection = [u for (bit, u) in filter(sfilter, zip(mask, selection))]
error = data_size > s | election_size
elif mode == 'OneIndices':
# Deselect objects according to indexes
clean_data = list(filter(lambda i: i < selection_size, data))
| new_selection = [u for i, u in enumerate(selection) if i < selection_size]
error = len(list(filter(lambda i: i >= selection_size, data))) != 0
elif mode == 'ZeroIndices':
# Select objects according to indexes
clean_data = list(filter(lambda i: i < selection_size, data))
new_selection = [selection[i] for i in clean_data]
error = len(clean_data) != data_size
return new_selection, error
|
heyfaraday/rustcmb | py/examples/mcmc/image.py | Python | mit | 226 | 0 | from numpy import *
from pylab import *
plt.figure(figsize=(10, 8))
phi_1, phi_2, v1, v2 = genfromtxt(
'../../../../data/out/rust-examples/mcmc/sample.dat').T
plt.hist2d(phi_1, phi_2, bins=100)
plt.color | bar()
plt.show()
| |
yt-project/unyt | unyt/_version.py | Python | bsd-3-clause | 18,446 | 0 |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = "unyt-"
cfg.versionfile_source = "unyt/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
con | tinue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys | .version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"d |
nwangtw/statsite | integ/test_integ.py | Python | bsd-3-clause | 15,415 | 0.000843 | import os
import os.path
import socket
import textwrap
import shutil
import subprocess
import contextlib
import sys
import tempfile
import time
import random
try:
import pytest
except ImportError:
print >> sys.stderr, "Integ tests require pytests!"
sys.exit(1)
def pytest_funcarg__servers(request):
"Returns a new APIHandler with a filter manager"
# Create tmpdir and delete after
tmpdir = tempfile.mkdtemp()
# Make the command
output = "%s/output" % tmpdir
cmd = "cat >> %s" % output
# Write the configuration
port = random.randrange(10000, 65000)
config_path = os.path.join(tmpdir, "config.cfg")
conf = """[statsite]
flush_interval = 1
port = %d
udp_port = %d
quantiles = 0.5, 0.9, 0.95, 0.99
[sink_stream_default]
command = %s
[histogram1]
prefix=has_hist
min=10
max=90
width=10
""" % (port, port, cmd)
open(config_path, "w").write(conf)
# Start the process
proc = subprocess.Popen(['./statsite', '-f', config_path])
proc.poll()
assert proc.returncode is None
# Define a cleanup handler
def cleanup():
try:
proc.kill()
proc.wait()
shutil.rmtree(tmpdir)
except:
print proc
pass
request.addfinalizer(cleanup)
# Make a connection to the server
connected = False
for x in xrange(3):
try:
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
conn.settimeout(1)
conn.connect(("localhost", port))
connected = True
break
except Exception, e:
print e
time.sleep(0.5)
# Die now
if not connected:
raise EnvironmentError("Failed to connect!")
# Make a second connection
conn2 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
conn2.connect(("localhost", port))
# Return the connection
return conn, conn2, output
def wait_file(path, timeout=5):
"Waits on a file to be make"
start = time.time()
while not os.path.isfile(path) and time.time() - start < timeout:
time.sleep(0.1)
if not os.path.isfile(path):
raise Exception("Timed out waiting for file %s" % path)
while os.path.getsize(path) == 0 and time.time() - start < timeout:
time.sleep(0.1)
class TestInteg(object):
def test_kv(self, servers):
"Tests adding kv pairs"
server, _, output = servers
server.sendall("tubez:100|kv\n")
wait_file(output)
now = time.time()
out = open(output).read()
assert out in ("kv.tubez|100.000000|%d\n" % now, "kv.tubez|100.000000|%d\n" % (now - 1))
def test_gauges(self, servers):
"Tests adding gauges"
server, _, output = servers
server.sendall("g1:1|g\n")
server.sendall("g1:50|g\n")
wait_file(output)
now = time.time()
out = open(output).read()
assert out in ("gauges.g1|50.000000|%d\n" % now, "gauges.g1|50.000000|%d\n" % (now - 1))
def test_gauges_delta(self, servers):
"Tests adding gauges"
server, _, output = servers
server.sendall("gd:+50|g\n")
server.sendall("gd:+50|g\n")
wait_file(output)
now = time.time()
out = open(output).read()
assert out in ("gauges.gd|100.000000|%d\n" % now, "gauges.gd|100.000000|%d\n" % (now - 1))
def test_gauges_delta_neg(self, servers):
"Tests adding gauges"
server, _, o | utput = servers
server.sendall(" | gd:-50|g\n")
server.sendall("gd:-50|g\n")
wait_file(output)
now = time.time()
out = open(output).read()
assert out in ("gauges.gd|-100.000000|%d\n" % now, "gauges.gd|-100.000000|%d\n" % (now - 1))
def test_counters(self, servers):
"Tests adding kv pairs"
server, _, output = servers
server.sendall("foobar:100|c\n")
server.sendall("foobar:200|c\n")
server.sendall("foobar:300|c\n")
wait_file(output)
now = time.time()
out = open(output).read()
assert out in ("counts.foobar|600.000000|%d\n" % (now),
"counts.foobar|600.000000|%d\n" % (now - 1))
def test_counters_sample(self, servers):
"Tests adding kv pairs"
server, _, output = servers
server.sendall("foobar:100|c|@0.1\n")
server.sendall("foobar:200|c|@0.1\n")
server.sendall("foobar:300|c|@0.1\n")
wait_file(output)
now = time.time()
out = open(output).read()
assert out in ("counts.foobar|6000.000000|%d\n" % (now),
"counts.foobar|6000.000000|%d\n" % (now - 1))
def test_meters_alias(self, servers):
"Tests adding timing data with the 'h' alias"
server, _, output = servers
msg = ""
for x in xrange(100):
msg += "val:%d|h\n" % x
server.sendall(msg)
wait_file(output)
out = open(output).read()
assert "timers.val.sum|4950" in out
assert "timers.val.sum_sq|328350" in out
assert "timers.val.mean|49.500000" in out
assert "timers.val.lower|0.000000" in out
assert "timers.val.upper|99.000000" in out
assert "timers.val.count|100" in out
assert "timers.val.stdev|29.011492" in out
assert "timers.val.median|49.000000" in out
assert "timers.val.p90|90.000000" in out
assert "timers.val.p95|95.000000" in out
assert "timers.val.p99|99.000000" in out
assert "timers.val.rate|4950" in out
assert "timers.val.sample_rate|100" in out
def test_meters(self, servers):
"Tests adding kv pairs"
server, _, output = servers
msg = ""
for x in xrange(100):
msg += "noobs:%d|ms\n" % x
server.sendall(msg)
wait_file(output)
out = open(output).read()
assert "timers.noobs.sum|4950" in out
assert "timers.noobs.sum_sq|328350" in out
assert "timers.noobs.mean|49.500000" in out
assert "timers.noobs.lower|0.000000" in out
assert "timers.noobs.upper|99.000000" in out
assert "timers.noobs.count|100" in out
assert "timers.noobs.stdev|29.011492" in out
assert "timers.noobs.median|49.000000" in out
assert "timers.noobs.p90|90.000000" in out
assert "timers.noobs.p95|95.000000" in out
assert "timers.noobs.p99|99.000000" in out
assert "timers.noobs.rate|4950" in out
assert "timers.noobs.sample_rate|100" in out
def test_histogram(self, servers):
"Tests adding keys with histograms"
server, _, output = servers
msg = ""
for x in xrange(100):
msg += "has_hist.test:%d|ms\n" % x
server.sendall(msg)
wait_file(output)
out = open(output).read()
assert "timers.has_hist.test.histogram.bin_<10.00|10" in out
assert "timers.has_hist.test.histogram.bin_10.00|10" in out
assert "timers.has_hist.test.histogram.bin_20.00|10" in out
assert "timers.has_hist.test.histogram.bin_30.00|10" in out
assert "timers.has_hist.test.histogram.bin_40.00|10" in out
assert "timers.has_hist.test.histogram.bin_50.00|10" in out
assert "timers.has_hist.test.histogram.bin_60.00|10" in out
assert "timers.has_hist.test.histogram.bin_70.00|10" in out
assert "timers.has_hist.test.histogram.bin_80.00|10" in out
assert "timers.has_hist.test.histogram.bin_>90.00|10" in out
def test_sets(self, servers):
"Tests adding kv pairs"
server, _, output = servers
server.sendall("zip:foo|s\n")
server.sendall("zip:bar|s\n")
server.sendall("zip:baz|s\n")
wait_file(output)
now = time.time()
out = open(output).read()
assert out in ("sets.zip|3|%d\n" % now, "sets.zip|3|%d\n" % (now - 1))
def test_double_parsing(self, servers):
"Tests string to double parsing"
server, _, output = servers
server.sendall("int1:1|c\n")
server.sendall("decimal1:1.0|c\n")
server.sendall("decimal2:2.3456789|c\n") |
MACBIO/GIS-Scripts | EnvDatabaseFields.py | Python | gpl-3.0 | 2,669 | 0.002623 | print "loading arcpy"
import arcpy
import os
inFolder = r"C:\temp\shapes"
intersectFolder = os.path.join(inFolder, "int")
gridFile = r"C:\Users\Jonah\Documents\ArcGIS\Default.gdb\grid"
arcpy.env.workspace = r"C:\Users\Jonah\Documents\ArcGIS\Default.gdb"
layerName = r"C:\Users\Jonah\Documents\ArcGIS\Default.gdb\gridLayer"
arcpy.MakeFeatureLayer_management(gridFile, layerName)
if not os.path.exists(intersectFolder):
os.makedirs(intersectFolder)
for f in os.listdir(inFolder):
if f.endswith(".shp"):
print "processing", f
inFile = os.path.join(inFolder, f)
intFile = os.path.join(intersectFolder, f)
if not os.path.exists(intFile):
try:
print "intersecting", f
arcpy.Intersect_analysis([inFile, gridFile], intFile, "ALL")
except BaseException as e:
print e
if "area" not in [field.baseName for field in arcpy.ListFields(intFile)]:
try:
print "adding area field to intersected shapefile"
arcpy.AddField_management(intFile, "area", "DOUBLE")
except BaseException as e:
print e
try:
arcpy.CalculateField_management(intFile | , "area", "!shape.area!" , "PYTHON_9.3")
except BaseException as e:
print e
oldfieldName = f.split(os.extsep)[0]
newfieldName = os.path.basename(gridFile).split(os.extsep)[0] + "." + f.split(os.extsep)[0]
fieldList = [f.baseName for f in arcpy.ListFields(gridFile)]
if oldfieldName not in fieldList:
try:
| print "adding shapefile name field to grid"
arcpy.AddField_management(layerName, oldfieldName, "DOUBLE")
except BaseException as e:
print e
try:
print "joining shapefile to grid"
arcpy.AddJoin_management(layerName, "ID", intFile, "ID")
except BaseException as e:
print e
try:
print "calculating proportional area"
gridFilename = os.path.basename(gridFile).split(os.extsep)[0]
arcpy.CalculateField_management(layerName, newfieldName, "[" + str(oldfieldName) + ".area]/[" + gridFilename + ".Shape_Area]", "VB")
except BaseException as e:
print e
try:
print "removing join"
arcpy.RemoveJoin_management(layerName, oldfieldName)
except BaseException as e:
print e
print ""
|
tiredpixel/pikka-bird-server-py | pikka_bird_server/migrations/versions/d6ebfd0a1b_create_services.py | Python | mit | 653 | 0.01072 | """create_services
Revision ID: d6ebfd0a1b
Revises: 37440ef063
Create Date: 2015-04-04 16:57:3 | 4.406021
"""
# revision identifiers, used by Alembic.
revision = 'd6ebfd0a1b'
down_revision = '37440ef063'
from alembic import op
import sqlalchemy as sa
def upgrade():
sql_now = sa.text("timezone('utc'::text, now())")
op.create_table(
'services',
sa.Column('id', sa.Integer,
primary_key=True),
sa.Column('created_at', sa.DateTime,
nullable=False, server_default=sql_now),
| sa.Column('code', sa.String,
nullable=False, index=True, unique=True))
def downgrade():
op.drop_table('services')
|
accraze/bitcoin | qa/rpc-tests/replace-by-fee.py | Python | mit | 21,993 | 0.001273 | #!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test replace by fee code
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
import binascii
MAX_REPLACEMENT_LIMIT = 100
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
def txToHex(tx):
return binascii.hexlify(tx.serialize()).decode('utf- | 8')
def make_utxo(node, amount, confirmed=True, scriptPubKey=CScript([1])):
"""Create a txout with a given amount and scriptPubKey
Mines coins as needed.
confirmed - txouts created will be confirmed in the blockchain;
unconfirmed otherwise.
"""
fee = 1*COIN
while node.getbalance() < satoshi_round((amount + fe | e)/COIN):
node.generate(100)
#print (node.getbalance(), amount, fee)
new_addr = node.getnewaddress()
#print new_addr
txid = node.sendtoaddress(new_addr, satoshi_round((amount+fee)/COIN))
tx1 = node.getrawtransaction(txid, 1)
txid = int(txid, 16)
i = None
for i, txout in enumerate(tx1['vout']):
#print i, txout['scriptPubKey']['addresses']
if txout['scriptPubKey']['addresses'] == [new_addr]:
#print i
break
assert i is not None
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(txid, i))]
tx2.vout = [CTxOut(amount, scriptPubKey)]
tx2.rehash()
binascii.hexlify(tx2.serialize()).decode('utf-8')
signed_tx = node.signrawtransaction(binascii.hexlify(tx2.serialize()).decode('utf-8'))
txid = node.sendrawtransaction(signed_tx['hex'], True)
# If requested, ensure txouts are confirmed.
if confirmed:
mempool_size = len(node.getrawmempool())
while mempool_size > 0:
node.generate(1)
new_size = len(node.getrawmempool())
# Error out if we have something stuck in the mempool, as this
# would likely be a bug.
assert(new_size < mempool_size)
mempool_size = new_size
return COutPoint(int(txid, 16), 0)
class ReplaceByFeeTest(BitcoinTestFramework):
def setup_network(self):
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-maxorphantx=1000", "-debug",
"-relaypriority=0", "-whitelist=127.0.0.1",
"-limitancestorcount=50",
"-limitancestorsize=101",
"-limitdescendantcount=200",
"-limitdescendantsize=101"
]))
self.is_network_split = False
def run_test(self):
make_utxo(self.nodes[0], 1*COIN)
print "Running test simple doublespend..."
self.test_simple_doublespend()
print "Running test doublespend chain..."
self.test_doublespend_chain()
print "Running test doublespend tree..."
self.test_doublespend_tree()
print "Running test replacement feeperkb..."
self.test_replacement_feeperkb()
print "Running test spends of conflicting outputs..."
self.test_spends_of_conflicting_outputs()
print "Running test new unconfirmed inputs..."
self.test_new_unconfirmed_inputs()
print "Running test too many replacements..."
self.test_too_many_replacements()
print "Running test opt-in..."
self.test_opt_in()
print "Running test prioritised transactions..."
self.test_prioritised_transactions()
print "Passed\n"
def test_simple_doublespend(self):
"""Simple doublespend"""
tx0_outpoint = make_utxo(self.nodes[0], 1.1*COIN)
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
# Should fail because we haven't changed the fee
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(1*COIN, CScript([b'b']))]
tx1b_hex = txToHex(tx1b)
try:
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26) # insufficient fee
else:
assert(False)
# Extra 0.1 BTC fee
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(0.9*COIN, CScript([b'b']))]
tx1b_hex = txToHex(tx1b)
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
mempool = self.nodes[0].getrawmempool()
assert (tx1a_txid not in mempool)
assert (tx1b_txid in mempool)
assert_equal(tx1b_hex, self.nodes[0].getrawtransaction(tx1b_txid))
def test_doublespend_chain(self):
"""Doublespend of a long chain"""
initial_nValue = 50*COIN
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
prevout = tx0_outpoint
remaining_value = initial_nValue
chain_txids = []
while remaining_value > 10*COIN:
remaining_value -= 1*COIN
tx = CTransaction()
tx.vin = [CTxIn(prevout, nSequence=0)]
tx.vout = [CTxOut(remaining_value, CScript([1]))]
tx_hex = txToHex(tx)
txid = self.nodes[0].sendrawtransaction(tx_hex, True)
chain_txids.append(txid)
prevout = COutPoint(int(txid, 16), 0)
# Whether the double-spend is allowed is evaluated by including all
# child fees - 40 BTC - so this attempt is rejected.
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - 30*COIN, CScript([1]))]
dbl_tx_hex = txToHex(dbl_tx)
try:
self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
except JSONRPCException as exp:
assert_equal(exp.error['code'], -26) # insufficient fee
else:
assert(False) # transaction mistakenly accepted!
# Accepted with sufficient fee
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(1*COIN, CScript([1]))]
dbl_tx_hex = txToHex(dbl_tx)
self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
mempool = self.nodes[0].getrawmempool()
for doublespent_txid in chain_txids:
assert(doublespent_txid not in mempool)
def test_doublespend_tree(self):
"""Doublespend of a big tree of transactions"""
initial_nValue = 50*COIN
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
def branch(prevout, initial_value, max_txs, tree_width=5, fee=0.0001*COIN, _total_txs=None):
if _total_txs is None:
_total_txs = [0]
if _total_txs[0] >= max_txs:
return
txout_value = (initial_value - fee) // tree_width
if txout_value < fee:
return
vout = [CTxOut(txout_value, CScript([i+1]))
for i in range(tree_width)]
tx = CTransaction()
tx.vin = [CTxIn(prevout, nSequence=0)]
tx.vout = vout
tx_hex = txToHex(tx)
assert(len(tx.serialize()) < 100000)
txid = self.nodes[0].sendrawtransaction(tx_hex, True)
yield tx
_total_txs[0] += 1
txid = int(txid, 16)
for i, txout in enumerate(tx.vout):
for x in branch(COutPoint(txid, i), txout_value,
max_txs,
|
uilianries/conan-libusb | test_package/conanfile.py | Python | lgpl-2.1 | 829 | 0.002413 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from conans import ConanFile, CMake, tools, RunEnvironment
import os
class TestPackageConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
generators = "cmake"
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def test(self):
| with tools.environment_append(RunEnvironment(self).vars):
bin_path = os.path.join("bin", "test_package")
if self.settings.os == "Windows":
self.run(bin_path)
elif self | .settings.os == "Macos":
self.run("DYLD_LIBRARY_PATH=%s %s" % (os.environ.get('DYLD_LIBRARY_PATH', ''), bin_path))
else:
self.run("LD_LIBRARY_PATH=%s %s" % (os.environ.get('LD_LIBRARY_PATH', ''), bin_path))
|
Datateknologerna-vid-Abo-Akademi/date-website | members/tokens.py | Python | cc0-1.0 | 358 | 0 | import six
f | rom django.contrib.auth.tokens import PasswordResetTokenGenerator
class TokenGenerator(PasswordResetTokenGenerator):
def _make_hash_value(self, user, timestamp):
return (
six.text_type(user.pk) + six.text_type(timestamp) +
six.text_type(user.username)
)
account_activation_token = TokenGenerator( | )
|
esistgut/django-content-toolkit | accounts/forms.py | Python | mit | 2,072 | 0.000483 | from django import forms
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from django.utils.translation import ugettext_lazy as _
from .models import User
class UserCreationForm(forms.ModelForm):
password1 = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
password2 = forms.CharField(
label=_("Password confirmation"), widget=forms.PasswordInput,
help_text=_("Enter the same password as above, for verification.")
)
class Meta:
model = User
fields = ("email",)
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
_("The two password fields didn't match."),
code='password_mismatch',
)
return password2
def save(self, commit=True):
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
password = ReadOnlyPasswordHashField(
label=_("Password"),
help_text=_("Raw passwords are not stored, so there is no way to see "
"this user's password, but you can change the password "
"using <a href=\"password/\">this form</a>."))
class Meta:
model = User
fields = '__all__'
def __init__(self, *args, **kwargs):
super(UserChangeForm, self).__init__(*args, **kwargs)
f = self.fields.get('user_permissions', None)
if f is not None:
f.queryset = f.queryset.select_related('content_type')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
| # field does not have access to the initial value
return self.initial["passwor | d"]
|
Jusedawg/SickRage | tests/sickrage_tests/providers/torrent/parsing_tests.py | Python | gpl-3.0 | 9,387 | 0.002983 | # coding=utf-8
# This file is part of SickRage.
#
# URL: https://SickRage.GitHub.io
# Git: https://github.com/SickRage/SickRage.git
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
# pylint: disable=fixme
"""
Test Provider Result Parsing
When recording new cassettes:
Set overwrite_cassettes = True on line 43
Delete the cassette yml file with the same base filename as this file in the cassettes dir next to this file
Be sure to adjust the self.search_strings so they return results. They must be identical to search strings generated by SickRage
"""
from __future__ import | print_function, unicode_literals
from functools import wraps
import re
import sys
import unittest
from vcr_unittest impo | rt VCRTestCase
# Have to do this before importing sickbeard
sys.path.insert(1, 'lib')
import sickbeard
sickbeard.CPU_PRESET = 'NORMAL'
import validators
overwrite_cassettes = False
disabled_provider_tests = {
# ???
'Cpasbien': ['test_rss_search', 'test_episode_search', 'test_season_search'],
# api_maintenance still
'TorrentProject': ['test_rss_search', 'test_episode_search', 'test_season_search'],
# Have to trick it into thinking is an anime search, and add string overrides
'TokyoToshokan': ['test_rss_search', 'test_episode_search', 'test_season_search'],
# 'Torrrentz': ['test_rss_search', 'test_episode_search', 'test_season_search'],
}
test_string_overrides = {
'Cpasbien': {'Episode': ['The 100 S02E16'], 'Season': ['The 100 S02']},
'NyaaTorrents': {'Episode': ['Fairy Tail S2'], 'Season': ['Fairy Tail S2']},
'TokyoToshokan': {'Episode': ['Fairy Tail S2'], 'Season': ['Fairy Tail S2']},
'HorribleSubs': {'Episode': ['Fairy Tail S2'], 'Season': ['Fairy Tail S2']},
}
magnet_regex = re.compile(r'magnet:\?xt=urn:btih:\w{32,40}(:?&dn=[\w. %+-]+)*(:?&tr=(:?tcp|https?|udp)[\w%. +-]+)*')
class BaseParser(type):
class TestCase(VCRTestCase):
provider = None
def __init__(self, test):
"""Initialize the test suite"""
VCRTestCase.__init__(self, test)
self.provider.session.verify = False
self.provider.username = self.username
self.provider.password = self.password
@property
def username(self): # pylint: disable=no-self-use
# TODO: Make this read usernames from somewhere
return ''
@property
def password(self): # pylint: disable=no-self-use
# TODO: Make this read passwords from somewhere
return ''
def search_strings(self, mode):
_search_strings = {
'RSS': [''],
'Episode': ['Game of Thrones S05E08'],
'Season': ['Game of Thrones S05']
}
_search_strings.update(self.provider.cache.search_params)
_search_strings.update(test_string_overrides.get(self.provider.name, {}))
return {mode: _search_strings[mode]}
def magic_skip(func): # pylint:disable=no-self-argument
@wraps(func)
def magic(self, *args, **kwargs):
# pylint:disable=no-member
if func.func_name in disabled_provider_tests.get(self.provider.name, []):
print("skipped")
return unittest.skip(str(self.provider.name))
func(self, *args, **kwargs)
return magic
def _get_vcr_kwargs(self):
"""Don't allow the suite to write to cassettes unless we say so"""
if overwrite_cassettes:
return {'record_mode': 'new_episodes'}
return {'record_mode': 'once'}
def _get_cassette_name(self):
"""Returns the filename to use for the cassette"""
return self.provider.get_id() + '.yaml'
def shortDescription(self):
if self._testMethodDoc:
return self._testMethodDoc.replace('the provider', self.provider.name)
return None
@magic_skip
def test_rss_search(self):
"""Check that the provider parses rss search results"""
results = self.provider.search(self.search_strings('RSS'))
if self.provider.enable_daily:
self.assertTrue(self.cassette.requests)
self.assertTrue(results, self.cassette.requests[-1].url)
self.assertTrue(len(self.cassette))
@magic_skip
def test_episode_search(self):
"""Check that the provider parses episode search results"""
results = self.provider.search(self.search_strings('Episode'))
self.assertTrue(self.cassette.requests)
self.assertTrue(results, self.cassette.requests[-1].url)
self.assertTrue(len(self.cassette))
@magic_skip
def test_season_search(self):
"""Check that the provider parses season search results"""
results = self.provider.search(self.search_strings('Season'))
self.assertTrue(self.cassette.requests)
self.assertTrue(results, self.cassette.requests[-1].url)
self.assertTrue(len(self.cassette))
@magic_skip
def test_cache_update(self):
"""Check that the provider's cache parses rss search results"""
self.provider.cache.updateCache()
def test_result_values(self):
"""Check that the provider returns results in proper format"""
results = self.provider.search(self.search_strings('Episode'))
for result in results:
self.assertIsInstance(result, dict)
self.assertEqual(sorted(result.keys()), ['hash', 'leechers', 'link', 'seeders', 'size', 'title'])
self.assertIsInstance(result['title'], unicode)
self.assertIsInstance(result['link'], unicode)
self.assertIsInstance(result['hash'], basestring)
self.assertIsInstance(result['seeders'], (int, long))
self.assertIsInstance(result['leechers'], (int, long))
self.assertIsInstance(result['size'], (int, long))
self.assertTrue(len(result['title']))
self.assertTrue(len(result['link']))
self.assertTrue(len(result['hash']) in (0, 32, 40))
self.assertTrue(result['seeders'] >= 0)
self.assertTrue(result['leechers'] >= 0)
self.assertTrue(result['size'] >= -1)
if result['link'].startswith('magnet'):
self.assertTrue(magnet_regex.match(result['link']))
else:
self.assertTrue(validators.url(result['link'], require_tld=False))
self.assertIsInstance(self.provider._get_size(result), (int, long)) # pylint: disable=protected-access
self.assertTrue(all(self.provider._get_title_and_url(result))) # pylint: disable=protected-access
self.assertTrue(self.provider._get_size(result)) # pylint: disable=protected-access
@unittest.skip('Not yet implemented')
def test_season_search_strings_format(self): # pylint: disable=no-self-use, unused-argument, unused-variable
"""Check format of the provider's season search strings"""
pass
@unittest.skip('Not yet implemented')
def test_episode_search_strings_format(self): # pylint: disable=no-self-use, unused-argument, unused-variable
"""Check format of the provider's season search strings"""
pass
def generate_ |
altenia/taskmator | taskmator/task/text.py | Python | mit | 4,755 | 0.002313 | __author__ = 'ysahn'
import logging
import json
import os
import glob
import collections
from mako.lookup import TemplateLookup
from mako.template import Template
from taskmator.task.core import Task
class TransformTask(Task):
"""
Class that transform a json into code using a template
Uses mako as template engine for transformation
"""
logger = logging.getLogger(__name__)
ATTR_TEMPLATE_DIR = u'template_dir'
ATTR_TEMPLATES = u'templates'
ATTR_SRC_DIR = u'src_dir'
ATTR_SRC_FILES = u'src_files'
ATTR_DEST_DIR = u'dest_dir'
ATTR_FILE_PREFIX = u'file_prefix'
ATTR_FILE_EXT = u'file_ext'
__VALID_ATTRS = [ATTR_TEMPLATE_DIR, ATTR_TEMPLATES, ATTR_SRC_DIR, ATTR_SRC_FILES,
ATTR_DEST_DIR, ATTR_FILE_PREFIX, ATTR_FILE_EXT]
def __init__(self, name, parent=None):
"""
Constructor
"""
super(TransformTask, self).__init__(name, parent)
self.template_dir = None
self.templates = collections.OrderedDict()
def setAttribute(self, attrKey, attrVal):
if (attrKey in self.__VALID_ATTRS):
self.attribs[attrKey] = attrVal
else:
super(TransformTask, self).setAttribute(attrKey, attrVal)
def init(self):
super(TransformTask, self).init()
template_dir = self._normalize_dir(self.getAttribute(self.ATTR_TEMPLATE_DIR, './'), './')
template_names = self.getAttribute(self.ATTR_TEMPLATES)
if not template_names:
raise ("Attribute '" + self.ATTR_TEMPLATES + "' is required")
if (isinstance(template_names, basestring)):
template_names = [template_names]
tpl_lookup = TemplateLookup(directories=[template_dir])
for template_name in template_names:
template_paths = glob.glob(template_dir + template_name + '.tpl')
for template_path in template_paths:
atemplate = Template(filename=template_path, lookup=tpl_lookup)
self.templates[template_path] = atemplate
def executeInternal(self, execution_context):
"""
@t | ype execution_context: ExecutionContext
| """
self.logger.info("Executing " + str(self))
src_dir = self._normalize_dir(self.getAttribute(self.ATTR_SRC_DIR, './'), './')
file_patterns = self.getAttribute(self.ATTR_SRC_FILES, '*.json')
file_patterns = file_patterns if file_patterns else '*.json'
# Convert to an array
if (isinstance(file_patterns, basestring)):
file_patterns = [file_patterns]
outputs = {}
for file_pattern in file_patterns:
file_paths = glob.glob(src_dir + file_pattern)
for file_path in file_paths:
model = self._load_model(file_path)
fname = self._get_filaname(file_path, False)
for tpl_path, tpl in self.templates.iteritems():
tpl_name = self._get_filaname(tpl_path, False)
outputs[fname + '.' + tpl_name] = self._transform(tpl, model, self.getParams())
# write to a file
dest_dir = self._normalize_dir(self.getAttribute(self.ATTR_DEST_DIR, './'), './')
file_ext = '.' + self.getAttribute(self.ATTR_FILE_EXT)
for name, output in outputs.iteritems():
self._write(output, dest_dir + name + file_ext)
return (Task.CODE_OK, outputs)
# Private methods
def _normalize_dir(self, dir, default):
dir = dir if dir else default
dir = dir if dir.startswith('/') else os.getcwd() + '/' + dir
return dir if dir.endswith('/') else dir + '/'
def _load_model(self, model_uri):
file = open(model_uri, "r")
file_content = file.read()
model = json.loads(file_content, object_pairs_hook=collections.OrderedDict)
return model
def _transform(self, thetemplate, model, params):
return thetemplate.render_unicode(model=model, params=params)
def _get_filaname(self, file_path, include_ext = True):
"""
Returns the filename
@param file_path: string The path
@param include_ext: boolean Whether or not to include extension
@return: string
"""
retval = file_path
last_sep_pos = file_path.rfind('/')
if (last_sep_pos > -1):
retval = file_path[last_sep_pos+1:]
if (not include_ext):
last_dot_pos = retval.rfind('.')
if (last_dot_pos > -1):
retval = retval[:last_dot_pos]
return retval
def _write(self, data, dest_path):
self._normalize_dir(dest_path, './')
with open(dest_path, "w") as text_file:
text_file.write(data)
|
CareerVillage/slack-moderation | src/moderations/serializers.py | Python | mit | 701 | 0 | from rest_framework import serializers
from .models import Mode | rationAction
class ModerationSerializer(serializers.ModelSerializer):
content_key = serializers.CharField | (write_only=True)
content = serializers.CharField(write_only=True)
content_author_id = serializers.CharField(write_only=True)
auto_approve = serializers.NullBooleanField(default=False)
auto_flag = serializers.NullBooleanField(default=False)
class Meta:
model = ModerationAction
fields = (
'content_key',
'content',
'content_author_id',
'action',
'action_author_id',
'auto_approve',
'auto_flag',
)
|
bukun/bkcase | script/mappad_script/script/place_file_by_sig.py | Python | mit | 563 | 0.001776 | # -*- coding:cp936 -*-
import os
import shutil
inws = r'D:\maplet_dvk\MapPicDir\raw\ÖйúÀúÊ·µØÍ¼¼¯_Ì·_8²á_dd006dd\ÖйúÀúÊ·µØÍ¼¼¯_·ÇÆ´½Ó\µÚ3²á\08Î÷½ú'
wfiles = os.listdir(inws) |
for wfile in wfiles:
infile = os.path.join(inws, wfile)
| if os.path.isfile(infile):
pass
else:
continue
sig_arr = wfile.split('_')
outws = os.path.join(inws, sig_arr[0])
if os.path.exists(outws):
pass
else:
os.mkdir(outws)
outfile = os.path.join(outws, wfile)
shutil.move(infile, outfile)
|
jvicu2001/alexis-bot | modules/simsimi.py | Python | mit | 3,655 | 0.001643 | from aiohttp import ClientSession, ContentTypeError
from bot import Command, categories, BotMentionEvent
class SimSimiException(Exception):
def __init__(self, msg=None, code=None):
super().__init__(msg)
self.code = code
class SimSimiCmd(Command):
__version__ = '1.0.1'
__author__ = 'makzk'
def __init__(self, bot):
super().__init__(bot)
self.name = 'simsimi'
self.aliases = ['s']
self.help = '$[simsimi-help]'
self.category = categories.FUN
self.user_delay = 5
self.allow_pm = False
self.mention_handler = True
self.enabled = False
self.default_config = {
'simsimi_apikey': '',
'simsimi_lang': 'es'
}
def on_loaded(self):
if not self.bot.config.get('simsimi_apikey', ''):
self.log.warn('No API keys added for SimSimi, you can add them to the simsimi_apikeys value on the config.')
self.enabled = True
async def handle(self, cmd):
first = cmd.args[0] if len(cmd.args) > 0 else ''
if not first:
return
if isinstance(cmd, BotMentionEvent) and (not cmd.starts_with or first == 'prefix'):
return
if not self.enabled:
await cmd.answer('$[simsimi-not-available]')
return
if cmd.text in ['off', 'on'] and cmd.owner:
await self.handle_toggle(cmd)
return
await self.handle_talk(cmd)
async def handle_toggle(self, cmd):
if not self.key:
await cmd.answer('$[simsimi-no-apikey]')
return
self.enabled = cmd.text == 'on'
await cmd.answer('ok')
async def handle_talk(self, cmd):
await cmd.typing()
try:
lang = cmd.lng('simsimi-lang') or self.lang
country = cmd.lng('simsimi-country') or None
message = cmd.no_tags()
self.log.debug('Received message: "%s"', message)
resp = await self.talk(cmd.channel, lang, country, message)
await cmd.answer(resp or '$[simsimi-no-answer]', withname=False)
except SimSimiException as e:
if e.code == 228:
await cmd.answer(':speech_balloon: $[simsimi-do-not-understand]', withname=False)
else:
await cmd.answer('$[simsimi-error]', locales={'error': str(e)})
except ContentTypeError as e:
await cmd.answer('⚠️ $[simsimi-cant-answer]')
self.log.exception(e)
@property
def key(self):
return self.bot.config['simsimi_apikey']
@property
def lang(self):
return self.bot.config['simsimi_lang']
_sessions = {}
api_url = 'https://wsapi.simsimi.com/190410/talk'
def get_session(self, channel=None):
channelid = 'global' if not channel else channel.id
if channelid not in self._sessions:
self._sessions[channelid] = ClientSession(headers={'x-api-key': self.key})
return self._sessions[channelid]
async def talk(self, channel, language, country, text):
session = self.get_session(channel)
data = {'lang': language, 'utext': text}
if country:
data['country'] = country if isinstance(country, list) else [country]
async with session.post(self.api_url, json=data) as r:
resp = await r.json()
if r.status != 200:
raise SimSimiException(resp.get('statusMessage', resp.get('message', 'Unknown error')), r.status)
| return ':speech_balloon: ' + resp['atext']
def load | _config(self):
self.on_loaded()
|
ovresko/erpnext | erpnext/selling/report/etude_des_prix_de_vente/etude_des_prix_de_vente.py | Python | gpl-3.0 | 16,610 | 0.045816 | # Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, erpnext, json
from frappe import _, _dict
from erpnext.stock.get_item_details import get_item_details
from frappe.utils import getdate, cstr, flt, fmt_money
def execute(filters=None):
columns, data = [], []
if not filters.group and not filters.receipt and not filters.prix_traite and not filters.ref_fabricant and not filters.item_code and not filters.generation_v and not filters.marque_v and not filters.variant_of and not filters.modele_v and not filters.version and not filters.price_list and not filters.manufacturer:
#frappe.msgprint("Appliquer un filtre")
return columns, data
if filters.get('manufacturer'):
manufacturers = cstr(filters.get("manufacturer")).strip()
filters.manufacturer = [d.strip() for d in manufacturers.split(',') if d]
columns.append({
"fieldname": "item_code",
"label": _("Item Code"),
"width": 150
})
columns.append({
"fieldname": "info",
"label": "Info",
"width": 100
})
columns.append({
"fieldname": "item_name",
"label": _("Item Name"),
"width": 150
})
columns.append({
"fieldname": "uom",
"label": "Unite Mesure",
"width": 150
})
columns.append({
"fieldname": "fabricant",
"label": "Fabricant",
"width": 150
})
columns.append({
"fieldname": "ref_fabricant",
"label": "Ref Fabricant",
"width": 150
})
columns.append({
"fieldname": "perfection",
"label": "Perfection",
"width": 150
})
columns.append({
"fieldname": "receipt",
"label": "Recu d'achat",
"width": 150
})
columns.append({
"fieldname": "last_qty",
"label": "Derniere Qts Achetee",
"width": 250
})
columns.append({
"fieldname": "qts",
"label": "Qte En stock",
"width": 250
})
columns.append({
"fieldname": "qts_projete",
"label": "Qte Projete",
"width": 250
})
columns.append({
"fieldname": "last_purchase_rc",
"label": "Dernier Prix recu",
"width": 250
})
columns.append({
"fieldname": "last_purchase_devise",
"label": "Dernier Prix d'achat (Devise)",
"width": 250
})
columns.append({
"fieldname": "last_purchase_rate",
"label": "Dernier Prix d'achat (DZD)",
"width": 250
})
columns.append({
"fieldname": "taux_change",
"label": "Taux de change",
"width": 200
})
columns.append({
"fieldname": "charge",
"label": "Montant Charges",
"width": 200
})
columns.append({
"fieldname": "pond_valuation",
"label": "Taux valorisation Moyen HT",
"width": 250
})
columns.append({
"fieldname": "pond_valuation_ttc",
"label": "Taux valorisation Moyen TTC",
"width": 250
})
columns.append({
"fieldname": "last_valuation",
"label": "Derniere taux de valorisation HT",
"width": 250
})
columns.append({
"fieldname": "taux_valuation_ttc",
"label": "Taux TVA 19% (DZD)",
"width": 250
})
columns.append({
"fieldname": "last_valuation_ttc",
"label": "Derniere Taux de valorisation TTC",
"width": 250
})
columns.append({
"fieldname": "btn_info",
"label": "Info",
"width": 50
})
columns.append({
"fieldname": "last_qty",
"label": "Derniere Qts achetee",
"width": 130
})
columns.append({
"fieldname": "stock_info",
"label": "Qts Stock",
"width": 100
})
columns.append({
"fieldname": "manufacturer2",
"label": "Fabricant",
"width": 180
})
columns.append({
"fieldname": "prix_traite",
"label": "Etat Etude Prix",
"width": 120
})
columns.append({
"fieldname": "etid_traite",
"label": "Changer Etat",
"width": 120
})
#item_code
#item_name
#uom
#fabricant
#ref_fabricant
#perfection
#receipt
#last_qty
#qts
#qts_projete
#last_purchase_rate
#last_purchase_devise
#last_valuation
#benefice
price_lists = []
price_lists= frappe.get_all("Price List",filters={"selling":1,"buying":0,"enabled":1},fields=["name","currency"])
if price_lists:
for pl in price_lists:
columns.append({
"fieldname": pl.name,
"label": "%s (%s)" % (pl.name,pl.currency),
"width": 450
})
columns.append({
"fieldname": "all_prices",
"label": "Tous les prix",
"width": 280
})
columns.append({
"fieldname": "empty",
"label": "__",
"width": 250
})
mris = []
order_by_statement = "order by it.item_code"
#parent material_request_item - material_request - qty - variant_of - creation
items = frappe.db.sql(
"""
select
it.item_code,
it.prix_traite,
it.item_name,
it.stock_uom,
it.weight_per_unit,
it.item_group,
it.variant_of,
it.perfection,
it.weight_per_unit,
it.is_purchase_item,
it.variant_of,
it.has_variants,
it.manufacturer,
it.last_purchase_rate ,
it.manufacturer_part_no,
it.last_purchase_devise,
it.max_order_qty,
it.max_ordered_variante
from `tabItem` it
where it.disabled=0 and it.has_variants=0 {conditions}
{order_by_statement}
""".format(
conditions=get_conditions(filters),
order_by_statement=order_by_statement
),
filters, as_dict=1)
all_items = []
item_dc = {}
mitems=[]
mcomplements = []
models = []
_models= {item.variant_of for item in items if item.variant_of}
models_copy = []
models_copy.extend(_model | s)
for m in models_copy:
if m in models:
pass
else:
models.insert(len(models),m)
complements = frappe.get_all("Composant",filters={"parent":m,"parentfield":"articles"},fields=["parent","item"])
if complements:
parents = {i.item for i in complements}
if parents:
for t in parents:
_models.discard(t)
if t in models:
models.remove(t)
models.insert(len(models),t)
mcomplements.append(t)
if not models or len(models) <= 0:
frappe.msgprint("Auc | une resultat")
return columns, data
#models = list(set(models))
#models.sort()
for model in models:
_mitems = [item for item in items if item.variant_of == model]
origin_model = frappe.get_doc("Item",model)
mitems.append(origin_model)
mitems.extend(_mitems)
oids = {o.item_code for o in mitems if item.item_code}
others = frappe.get_all("Item",filters={"variant_of":model,"item_code":("not in",oids)},fields=[
"variant_of",
"stock_uom",
"prix_traite",
"perfection",
"is_purchase_item",
"weight_per_unit",
"variant_of",
"has_variants",
"item_name",
"item_code",
"manufacturer",
"last_purchase_rate" ,
"manufacturer_part_no",
"item_group",
"last_purchase_devise",
"max_order_qty",
"max_ordered_variante"])
mitems.extend(others)
#item_code
#item_name
#uom
#fabricant
#ref_fabricant
#perfection
#receipt
#last_qty
#qts
#qts_projete
#last_purchase_rate
#last_purchase_devise
#last_valuation
#benefice
for mri in mitems:
receipt = ''
#if hasattr(mri, 'parent') and mri.parent:
# receipt = mri.parent
global info
qts_max_achat = 0
last_qty = 0
qr_last_qty = frappe.db.sql("""select qty from `tabPurchase Receipt Item`
where item_code=%s and qty>0 and docstatus=1
order by creation desc limit 1""", (mri.item_code), as_dict=1)
if qr_last_qty:
last_qty = qr_last_qty[0].qty
last_valuation = 0
if mri.variant_of:
#variante
info = info_variante(mri.item_code)
qts_max_achat = mri.max_ordered_variante
elif mri.has_variants:
info = info_modele(mri.item_code)
qts_max_achat = mri.max_order_qty
if filters.get("with_qty") and (not info or info[0] <= 0):
continue
sqllast_qty = frappe.db.sql("""select incoming_rate,actual_qty,valuation_rate,voucher_type, voucher_no from `tabStock Ledger Entry`
where item_code=%s and (voucher_type = 'Stock Reconciliation' or voucher_type = 'Purchase Receipt')
order by posting_date desc, posting_time desc limit 1""", (mri.item_code), as_dict=1)
pondere = 0
if sqllast_qty:
receipt = "%s %s" % (sqllast_qty[0].voucher_type, sqllast_qty[0].voucher_no)
last_valuation = sqllast_qty[0].incoming_rate
pondere = sqllast_qty[0].valuation_rate
if last_valuation:
last_valuation = round(last_valuation)
if not pondere:
pondere = frappe.db.g |
lanyudhy/Halite-II | apiserver/apiserver/coordinator/storage.py | Python | mit | 4,017 | 0.000498 | import base64
import binascii
import io
import tempfile
import flask
import google.cloud.storage as gcloud_storage
import google.cloud.exceptions as gcloud_exceptions
from werkzeug.contrib.cache import FileSystemCache
from .. import config, model, util
from .blueprint import coordinator_api
# Cache the worker blob to avoid repeated requests to object storage
cache_dir = tempfile.TemporaryDirectory()
cache = FileSystemCache(cache_dir.name, default_timeout=60*5)
@coordinator_api.route("/download/worker", methods=["GET"])
def download_source_blob():
"""Retrieve the worker blob from object storage."""
cached_blob = cache.get(config.WORKER_ARTIFACT_KEY)
if cached_blob is None:
print("Getting from GCloud", config.WORKER_ARTIFACT_KEY)
# Retrieve from GCloud
try:
gcloud_blob = gcloud_storage.Blob(
config.WORKER_ARTIFACT_KEY,
model.get_deployed_artifacts_bucket(),
chunk_size=262144)
cached_blob = gcloud_blob.download_as_string()
cache.set(config.WORKER_ARTIFACT_KEY, cached_blob)
except gcloud_exceptions.NotFound:
raise util.APIError(404, message="Worker blob not found.")
if cached_blob is None:
raise util.APIError(404, message="Worker blob not found.")
print("Building buffer")
buffer = io.BytesIO()
buffer.write(cached_blob)
buffer.seek(0)
return flask.send_file(buffer, mimetype="application/gzip",
as_attachment=True,
attachment_filename="Halite.tgz")
@coordinator_api.route("/botFile", methods=["POST"])
def upload_bot():
"""Save a compiled bot to object storage."""
user_id = flask.request.form.get("user_id", None)
bot_id = flask.request.form.get("bot_id", None)
if "bot.zip" not in flask.request.files:
raise util.APIError(400, message="Please provide the bot file.")
uploaded_file = flask.request.files["bot.zip"]
# Save to GCloud
blob = gcloud_storage.Blob("{}_{}".format(user_id, bot_id),
model.get_bot_bucket(),
chunk_size=262144)
blob.upload_from_file(uploaded_file)
return util.response_success()
@coordinator_api.route("/botFile", methods=["GET"])
def download_bot():
"""Retrieve a compiled or uncompiled bot from object storage."""
user_id = flask.request | .values.get("user_id", None)
bot_id = flask.request.values.get("bot_id", None)
compile = flask.request.values.get("compile", False)
if compile:
bucket = model.get_compilation_bucket()
else:
bucket = model.get_bot_bucket()
# Retrieve from GCloud
t | ry:
botname = "{}_{}".format(user_id, bot_id)
blob = gcloud_storage.Blob(botname,
bucket, chunk_size=262144)
buffer = io.BytesIO()
blob.download_to_file(buffer)
buffer.seek(0)
return flask.send_file(buffer, mimetype="application/zip",
as_attachment=True,
attachment_filename=botname + ".zip")
except gcloud_exceptions.NotFound:
raise util.APIError(404, message="Bot not found.")
@coordinator_api.route("/botHash")
def hash_bot():
"""Get the MD5 hash of a compiled bot."""
user_id = flask.request.args.get("user_id", None)
bot_id = flask.request.args.get("bot_id", None)
compile = flask.request.args.get("compile", False)
if not user_id or not bot_id:
raise util.APIError(400, message="Please provide user and bot ID.")
if compile:
bucket = model.get_compilation_bucket()
else:
bucket = model.get_bot_bucket()
blob = bucket.get_blob("{}_{}".format(user_id, bot_id))
if blob is None:
raise util.APIError(400, message="Bot does not exist.")
return util.response_success({
"hash": binascii.hexlify(base64.b64decode(blob.md5_hash)).decode('utf-8'),
}) |
dougwig/acos-client | acos_client/v30/slb/hm.py | Python | apache-2.0 | 3,696 | 0 | # Copyright 2014, Jeff Buttars, A10 Networks.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import acos_client.errors as acos_errors
import acos_client.v30.base as base
class HealthMonitor(base.BaseV30):
# Valid method objects
ICMP = 'icmp'
TCP = 'tcp'
HTTP = 'http'
HTTPS = 'https'
url_prefix = "/health/monitor/"
_method_objects = {
ICMP: {
"icmp": 1
},
HTTP: {
"http": 1,
"http-port": 80,
"http-expect": 1,
"http-response-code": "200",
"http-url": 1,
"url-type": "GET",
"u | rl-path": "/",
},
HTTPS: {
"https": 1,
"web-port": 443,
"https-expect": 1,
"https-response-code": "200",
"https-url": 1,
"url-type": "GET",
"url-path": "/",
"disable-sslv2hello": 0
} | ,
TCP: {
"method-tcp": 1,
"tcp-port": 80
},
}
def get(self, name, **kwargs):
return self._get(self.url_prefix + name, **kwargs)
def _set(self, action, name, mon_method, interval, timeout, max_retries,
method=None, url=None, expect_code=None, port=None, update=False,
**kwargs):
params = {
"monitor": {
"name": name,
"retry": int(max_retries),
"interval": int(interval),
"timeout": int(timeout),
"method": {
mon_method: self._method_objects[mon_method]
}
}
}
if method:
params['monitor']['method'][mon_method]['url-type'] = method
if url:
params['monitor']['method'][mon_method]['url-path'] = url
if expect_code:
k = "%s-response-code" % mon_method
params['monitor']['method'][mon_method][k] = str(expect_code)
if port:
if mon_method == self.HTTPS:
k = 'web-port'
else:
k = '%s-port' % mon_method
params['monitor']['method'][mon_method][k] = int(port)
if update:
action += name
self._post(action, params, **kwargs)
def create(self, name, mon_type, interval, timeout, max_retries,
method=None, url=None, expect_code=None, port=None, **kwargs):
try:
self.get(name)
except acos_errors.NotFound:
pass
else:
raise acos_errors.Exists()
self._set(self.url_prefix, name, mon_type, interval, timeout,
max_retries, method, url, expect_code, port, **kwargs)
def update(self, name, mon_type, interval, timeout, max_retries,
method=None, url=None, expect_code=None, port=None, **kwargs):
self.get(name) # We want a NotFound if it does not exist
self._set(self.url_prefix, name, mon_type, interval, timeout,
max_retries, method, url, expect_code, port, update=True,
**kwargs)
def delete(self, name):
self._delete(self.url_prefix + name)
|
clld/pycdstar | src/pycdstar/media.py | Python | apache-2.0 | 6,924 | 0.002022 | import os
import hashlib
from string import ascii_letters
import logging
from time import time, strftime
import subprocess
from tempfile import NamedTemporaryFile
import json
from mimetypes import guess_type
import pathlib
from unidecode import unidecode
import pycdstar
from pycdstar.resource import Bitstream
log = logging.getLogger(pycdstar.__name__)
def ensure_unicode(s):
if not isinstance(s, str): # pragma: no cover
s = s.decode('utf8')
return s
class File(object):
def __init__(self, path, temporary=False, name=None, type='original', mimetype=None):
path = pathlib.Path(path)
assert path.exists() and path.is_file()
self.path = path
self.temporary = temporary
self.bitstream_name = name or self.clean_name
self.bitstream_type = type
self._md5 = None
self.mimetype = mimetype or guess_type(self.path.name, strict=False)[0]
@property
def ext(self):
return self.path.suffix.lower()
@property
def clean_name(self):
valid_characters = ascii_letters + '._0123456789'
name = ensure_unicode(self.path.name)
res = ''.join([c if c in valid_characters else '_' for c in unidecode(name)])
assert Bitstream.NAME_PATTERN.match(res)
return res
@property
def md5(self):
if self._md5 is None:
self._md5 = hashlib.md5()
with self.path.open(mode="rb") as fp:
self._md5.update(fp.read())
self._md5 = self._md5.hexdigest()
return self._md5
@property
def size(self):
return self.path.stat().st_size
@staticmethod
def format_size(num):
suffix = 'B'
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
@property
def size_h(self):
return self.format_size(self.size)
def add_bitstreams(self):
return []
def add_metadata(self):
return {}
def create_object(self, api, metadata=None):
"""
Create an object using the CDSTAR API, with the file content as bitstream.
:param api:
:return:
"""
metadata = {k: v for k, v in (metadata or {}).items()}
metadata.setdefault('creator', '{0.__name__} {0.__version__}'.format(pycdstar))
metadata.setdefault('path', '%s' % self.path)
metadata.update(self.add_metadata())
bitstream_specs = [self] + self.add_bitstreams()
obj = api.get_object()
res = {}
try:
obj.metadata = metadata
for file_ in bitstream_specs:
res[file_.bitstream_type] = file_.add_as_bitstream(obj)
except: # noqa: E722
obj.delete()
raise
return obj, metadata, res
def add_as_bitstream(self, obj):
start = time()
log.info('{0} uploading bitstream {1} for object {2} ({3})...'.format(
strftime('%H:%M:%S'), self.bitstream_name, obj.id, self.size_h))
obj.add_bitstream(
fname=str(self.path), name=self.bitstream_name, mimetype=self.mimetype)
log.info('... done in {0:.2f} secs'.format(time() - start))
if self.temporary and self.path.exists():
self.path.unlink()
return self.bitstream_name
class Audio(File):
"""
Audio file handling requires the `lame` command to convert files to mp3.
"""
def _convert(self):
with NamedTemporaryFile(delete=False, suffix='.mp3') as fp:
subprocess.check_call(['lame', '--preset', 'insane', str(self.path), fp.name])
return fp.name
def add_bitstreams(self):
| if self.mimetype == 'audio/mpeg':
# we only need an alias with correct name!
path = self.path
temporary = False
else:
path = self._convert()
temporary = True
return [File(path, name='web.mp3', type='web', temporary=temporary)]
class Image(File):
"""
Image file handling requires | ImageMagick's `convert` and `identify` commands to
create different resolutions of a file and determine its dimensions.
"""
resolutions = {
'thumbnail': '-thumbnail 103x103^ -gravity center -extent 103x103'.split(),
'web': '-resize 357x357'.split(),
}
def _convert(self, opts):
with NamedTemporaryFile(delete=False, suffix='.jpg') as fp:
subprocess.check_call(['convert', str(self.path)] + opts + [fp.name])
return fp.name
def _identify(self):
res = ensure_unicode(subprocess.check_output(['identify', str(self.path)]))
assert res.startswith(str(self.path))
dim = res.replace(str(self.path), '').strip().split()[1]
return dict(zip(['height', 'width'], map(int, dim.split('x'))))
def add_bitstreams(self):
return [
File(self._convert(opts), temporary=True, name=type_ + '.jpg', type=type_)
for type_, opts in self.resolutions.items()]
def add_metadata(self):
return self._identify()
class Video(File):
"""
Video file handling requires the `ffmpeg` command to convert files to mp4 and the
`ffprobe` command to determine the duration of a video.
"""
def __init__(self, *args, **kw):
File.__init__(self, *args, **kw)
self._props = None
def _ffprobe(self):
cmd = 'ffprobe -loglevel quiet -print_format json -show_streams'.split()
return json.loads(ensure_unicode(subprocess.check_output(cmd + [str(self.path)])))
@property
def duration(self):
if self._props is None:
self._props = self._ffprobe()
return float(self._props['streams'][0]['duration'])
def _ffmpeg(self, iopts, opts, suffix):
with NamedTemporaryFile(delete=False, suffix=suffix) as fp:
if os.path.exists(fp.name):
os.remove(fp.name)
subprocess.check_call(
['ffmpeg'] + iopts + ['-i', str(self.path)] + opts + [fp.name])
return fp.name
def add_bitstreams(self):
thumbnail_offset = '-{0}'.format(min([int(self.duration / 2), 20]))
res = [File(
self._ffmpeg(
['-itsoffset', thumbnail_offset],
['-vcodec', 'mjpeg', '-vframes', '1', '-an', '-f', 'rawvideo'],
'.jpg'),
temporary=True,
name='thumbnail.jpg',
type='thumbnail')]
if self.ext in ['.mov', '.qt', '.mod', '.avi']:
res.append(File(
self._ffmpeg([], '-c:v libx264'.split(), '.mp4'),
name=os.path.splitext(self.clean_name)[0] + '.mp4',
type='mp4'))
return res
def add_metadata(self):
return {'duration': self.duration}
|
Dawny33/Code | Hackerrank/101 Hack Sept/order.py | Python | gpl-3.0 | 346 | 0.020231 | T = int(input())
arr = []
dirr = {}
while(T):
T-=1
a,b = map(int, raw_input().split())
arr.append(a+b)
arr2 = sorted(arr)
fin = []
for i in range(len(arr2)):
for j in range(len(arr)):
if arr | [i] == arr2[j]:
fin.append(j+1)
#print arr |
#print arr2
print reduce(lambda x, y: str(x) + " "+ str(y), fin)
|
NateShoffner/PySnip | feature_server/scheduler.py | Python | gpl-3.0 | 2,339 | 0.002993 | # Copyright (c) Mathias Kaerlev 2012.
# This file is part of pyspades.
# pyspades is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# pyspades is distributed in the hope that | it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with pyspades. If not, see <http://www.gnu.org/licenses/>.
from twisted.internet import reactor
from twisted.internet.task import LoopingCall
try:
from weakref import | WeakSet
except ImportError:
# python 2.6 support (sigh)
from weakref import WeakKeyDictionary
class WeakSet(object):
def __init__(self):
self._dict = WeakKeyDictionary()
def add(self, value):
self._dict[value] = True
def remove(self, value):
del self._dict[value]
def __iter__(self):
for key in self._dict.keys():
yield key
def __contains__(self, other):
return other in self._dict
def __len__(self):
return len(self._dict)
class Scheduler(object):
def __init__(self, protocol):
self.protocol = protocol
self.calls = WeakSet()
self.loops = WeakSet()
def call_later(self, *arg, **kw):
call = reactor.callLater(*arg, **kw)
self.calls.add(call)
return call
def call_end(self, *arg, **kw):
call = self.protocol.call_end(*arg, **kw)
self.calls.add(call)
return call
def loop_call(self, delay, func, *arg, **kw):
loop = LoopingCall(func, *arg, **kw)
loop.start(delay, False)
self.loops.add(loop)
return loop
def reset(self):
for call in self.calls:
if call.active():
call.cancel()
for loop in self.loops:
if loop.running:
loop.stop()
self.calls = WeakSet()
self.loops = WeakSet() |
dorneanu/pyTCP2WS | lib/WebSocketServer.py | Python | bsd-3-clause | 3,326 | 0.000902 | #!/usr/bin/env python3
#-*- coding: utf-8 -*-
import tornado.web
import tornado.websocket
import tornado.httpserver
import tornado.ioloop
import logging
import json
from threading import Thread
from queue import Queue
# Handle WebSocket clients
clients = []
### Handler -------------------------------------------------------------------
class WebSocketHandler(tornado.websocket.WebSocketHandler):
""" Handle default WebSocket connections """
# Logging settings
logger = logging.getLogger("WebSocketHandler")
logger.setLevel(logging.INFO)
def open(self):
""" New connection has been established """
clients.append(self)
self.logger.info("New connection")
def on_message(self, message):
""" Data income event callback """
self.write_message(u"%s" % message)
def on_close(self):
""" Connection was closed """
clients.remove(self)
self.logger.info("Connection removed")
class IndexPageHandler(tornado.web.RequestHandler):
""" Default index page handler. Not implemented yet. """
def get(self):
pass
### Classes -------------------------------------------------------------------
class Application(tornado.web.Application):
def __init__(self):
# Add here several handlers
handlers = [
(r'/', IndexPageHandler),
(r'/websocket', WebSocketHandler)
]
# Application settings
settings = {
'template_path': 'templates'
}
# Call parents con | structor
| tornado.web.Application.__init__(self, handlers, **settings)
class HTTPServer():
""" Create tornado HTTP server serving our application """
def __init__(self, host, port, in_queue=Queue()):
# Settings
self.application = Application()
self.server = tornado.httpserver.HTTPServer(self.application)
self.host = host
self.port = port
self.in_queue = in_queue
# Listen to ..
self.server.listen(self.port, self.host)
# Logging settings
logging.basicConfig(level=logging.DEBUG)
self.logger = logging.getLogger("HTTPServer")
self.logger.setLevel(logging.INFO)
def start_server(self):
""" Start HTTP server """
self.logger.info("Starting HTTP server on port %d" % self.port)
http_server = Thread(target=tornado.ioloop.IOLoop.instance().start)
http_server.start()
def start_collector(self):
""" Start collector server """
self.logger.info("Start collector server")
collector_server = Thread(target=self.collect_data)
collector_server.start()
def collector_process_data(self, data):
""" Process incoming data and send it to all available clients """
for c in clients:
c.on_message(json.dumps(data))
def collect_data(self):
""" Wait for data in individual thread """
self.logger.info("Waiting for incoming data ...")
while True:
item = self.in_queue.get()
self.logger.info("Received data!")
self.collector_process_data(item)
def start(self):
""" Start server """
# Start HTTP server
self.start_server()
# Start data collector
self.start_collector()
|
exildev/AutoLavadox | maq_autolavadox/bin/pildriver.py | Python | mit | 15,553 | 0.000064 | #!/home/dark/Exile/Git/AutoLavadox/maq_autolavadox/bin/python
"""PILdriver, an image-processing calculator using PIL.
An instance of class PILDriver is essentially a software stack machine
(Polish-notation interpreter) for sequencing PIL image
transformations. The state of the instance is the interpreter stack.
The only method one will normally invoke after initialization is the
`execute' method. This takes an argument list of tokens, pushes them
onto the instance's stack, and then tries to clear the stack by
successive evaluation of PILdriver operators. Any part of the stack
not cleaned off persists and is part of the evaluation context for
the next call of the execute method.
PILDriver doesn't catch any exceptions, on the theory that these
are actually diagnostic information that should be interpreted by
the calling code.
When called as a script, the command-line arguments are passed to
a PILDriver instance. If there are no command-line arguments, the
module runs an interactive interpreter, each line of which is split into
space-separated tokens and passed to the execute method.
In the method descriptions below, a first line beginning with the string
`usage:' means this method can be invoked with the token that follows
it. Following <>-enclosed arguments describe how the method interprets
the entries on the stack. Each argument specification begins with a
type specification: either `int', `float', `string', or `image'.
All operations consume their arguments off the stack (use `dup' to
keep copies around). Use `verbose 1' to see the stack state displayed
before each operation.
Usage examples:
`show crop 0 0 200 300 open test.png' loads test.png, crops out a portion
of its upper-left-hand corner and displays the cropped portion.
`save rotated.png rotate 30 open test.tiff' loads test.tiff, rotates it
30 degrees, and saves the result as rotated.png (in PNG format).
"""
# by Eric S. Raymond <esr@thyrsus.com>
# $Id$
# TO DO:
# 1. Add PILFont capabilities, once that's documented.
# 2. Add PILDraw operations.
# 3. Add support for composing and decomposing multiple-image files.
#
from __future__ import print_function
from PIL import Image
class PILDriver(object):
verbose = 0
def do_verbose(self):
"""usage: verbose <int:num>
Set verbosity flag from top of stack.
"""
self.verbose = int(self.do_pop())
# The evaluation stack (internal only)
stack = [] # Stack of pending operations
def push(self, item):
"Push an argument onto the evaluation stack."
self.stack.insert(0, item)
def top(self):
"Return the top-of-stack element."
return self.stack[0]
# Stack manipulation (callable)
def do_clear(self):
"""usage: clear
Clear the stack.
"""
self.stack = []
def do_pop(self):
"""usage: pop
Discard the top element on the stack.
"""
return self.stack.pop(0)
def do_dup(self):
"""usage: dup
Duplicate the top-of-stack item.
"""
if hasattr(self, 'format'): # If it's an image, do a real copy
dup = self.stack[0].copy()
else:
dup = self.stack[0]
self.push(dup)
def do_swap(self):
"""usage: swap
Swap the top-of-stack item with the next one down.
"""
self.stack = [self.stack[1], self.stack[0]] + self.stack[2:]
# Image module functions (callable)
def do_new(self):
"""usage: new <int:xsize> <int:ysize> <int:color>:
Create and push a greyscale image of given size and color.
"""
xsize = int(self.do_pop())
ysize = int(self.do_pop())
color = int(self.do_pop())
self.push(Image.new("L", (xsize, ysize), color))
def do_open(self):
"""usage: open <string:filename>
Open the indicated image, read it, push the image on the stack.
"""
self.push(Image.open(self.do_pop()))
def do_blend(self):
"""usage: blend <image:pic1> <image:pic2> <float:alpha>
Replace two images and an alpha with the blended image.
"""
image1 = self.do_pop()
image2 = self.do_pop()
alpha = float(self.do_pop())
self.push(Image.blend(image1, image2, alpha))
def do_composite(self):
"""usage: composite <image:pic1> <image:pic2> <image:mask>
Replace two images and a mask with their composite.
"""
image1 = self.do_pop()
image2 = self.do_pop()
mask = self.do_pop()
self.push(Image.composite(image1, image2, mask))
def do_merge(self):
"""usage: merge <string:mode> <image:pic1>
[<image:pic2> [<image:pic3> [<image:pic4>]]]
Merge top-of stack images in a way described by the mode.
"""
mode = self.do_pop()
bandlist = []
for band in mode:
bandlist.append(self.do_pop())
self.push(Image.merge(mode, bandlist))
# Image class methods
def do_convert(self):
"""usage: convert <string:mode> <image:pic1>
Convert the top image to the given mode.
"""
mode = self.do_pop()
image = self.do_pop()
self.push(image.convert(mode))
def do_copy(self):
"""usage: copy <image:pic1>
Make and push a true copy of the top image.
"""
self.dup()
def do_crop(self):
"""usage: crop <int:left> <int:upper> <int:right> <int:lower>
<image:pic1>
Crop and push a rectangular region from the current image.
"""
left = int(self.do_pop())
upper = int(self.do_pop())
right = int(self.do_pop())
lower = int(self.do_pop())
image = self.do_pop()
self.push(image.crop((left, upper, right, lower)))
def do_draft(self):
"""usage: draft <string:mode> <int:xsize> <int:ysize>
Configure the loader for a given mode and size.
"""
mode = self.do_pop()
xsize = int(self.do_pop())
ysize = int(self.do_pop())
self.push(self.draft(mode, (xsize, ysize)))
def do_filter(self):
"""usage: filter <string:filtername> <image:pic1>
Process the top image with the given filter.
"""
from PIL import ImageFilter
imageFilter = getattr(ImageFilter, self.do_pop().upper())
image = self.do_pop()
self.push(image.filter(imageFilter))
def do_getbbox(self):
"""usage: getbbox
Push left, upper, right, and lower pixel coordinates of the top image.
"""
bounding_box = self.do_pop().getbbox()
self.push(bounding_box[3])
self.push(bounding_box[2])
self.push(bounding_box[1])
self.push(bounding_box[0])
def do_getextrema(self):
"""usage: extrema
Push minimum and maximum pixel values of the top image.
"""
extrema = self.do_pop().extrema()
self.push(extrema[1])
self.push(extrema[0])
def do_offset(self):
"""usage: offset <int:xoffset> <int:yoffset> <image:pic1>
Offset the pixels in the top image.
"""
xoff = int(self.do_pop())
yoff = int(self.do_pop())
image = self.do_pop()
self.push(image.offset(xoff, yoff))
def do_paste(self):
"""usage: paste <image:figure> <int:xoffset> <int:yoffset>
<image:ground>
Paste figure image into ground with upper left at given offsets.
"""
figure = self.do_pop()
xoff = int(self.do_pop())
yoff = int(self.do_pop())
ground = self.do_pop()
if figure.mode == "RGBA":
| ground.paste(figure, (xoff, yoff), figure)
else:
ground.paste(figure, (xoff, yoff))
self.push(ground)
def do_resize(self):
"""usage: re | size <int:xsize> <int:ysize> <image:pic1>
Resize the top image.
"""
ysize = int(self.do_pop())
xsize = int(self.do_pop())
image = self.do_pop()
self.push(image.resize( |
desec-io/desec-stack | api/manage.py | Python | mit | 248 | 0 | #!/usr/bin/ | env python
import sys
import os
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.settings")
fr | om django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
tms1337/polls-app | django_polls/urls.py | Python | mit | 820 | 0 | """django_polls URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Hom | e
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', adm | in.site.urls),
url(r'^polls/', include('polls.urls'))
]
|
jorgenschaefer/healthmonitor | healthmonitor/settings_devel_fast.py | Python | agpl-3.0 | 240 | 0 | # Settings for running unittests. These are optimized for s | peed.
from .settings_devel import * # noqa
COMPRESS_ENABLED = False
COMPRESS_PRECOMPILERS = []
MIGRATION_MODULES = {
"wei | ght": "healthmonitor.migrations_not_used_in_tests"
}
|
googleads/googleads-python-lib | examples/ad_manager/v202202/proposal_line_item_service/get_all_proposal_line_items.py | Python | apache-2.0 | 1,904 | 0.007353 | #!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all proposal line items.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
def main(client):
# Initialize appropriate service.
proposal_line_item_service = client.GetService(
'ProposalLineItemService', version='v202202')
# Create a statement to select proposal line items.
statement = ad_manager.StatementBuilder(version='v202202')
# Retrieve a small amount of proposal line items at a time, paging
# through until all proposal line items have been retrieved.
while True:
response = proposal_line_item_service.getProposalLineItemsByStatement(
statement.ToStatement())
if 'results' in response and len(response['results']):
for proposal_line_item in response['results']:
# Print out some information for each proposal line item.
print('Proposal line item with ID "%d" and name "%s" was found.\n' %
(proposal_ | line_item['id'], proposal_line_item['name']))
statement.offset += statement.limit
else:
break
print('\nNumber of results found: %s' % response['totalResultSetSize'])
if __name__ == '__main__' | :
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
|
hdzierz/Kaka | scripts/load_length_frequency_data.py | Python | gpl-2.0 | 2,940 | 0.003061 | # -*- coding: utf-8 -*-
from api.connectors import *
from seafood.models import *
from api.imports import *
import time
import datetime
from django.utils.timezone import get_current_timezone, make_aware
def convert_date(dt):
return time.strptime(dt, "%d.%m.%Y")
def convert_date_time(dt, default=datetime.datetime.now(), fmt="%d %b %Y %H:%M:%S"):
#dt = dt.replace('a.m.', 'AM')
#dt = dt.replace('p.m.', 'PM')
tz = get_current_timezone()
if(dt):
dt = time.strptime(dt, fmt)
dt = datetime.datetime.fromtimestamp(time.mktime(dt))
return make_aware(dt, tz)
else:
return make_aware(default, tz)
def convert_boolean(val):
if val == 'Y':
return True
else:
return False
def convert_int(val):
try:
return int(val)
except Exception:
return None
class ImportFish(ImportOp):
ob_ct = 0
@staticmethod
def LoadFishDataOp(line, succ):
sp, created = Species.objects.get_or_create(name=line['Species'])
vessel, created = Vessel.objects.get_or_create(name=line['Vessel'])
trip, created = Trip.objects.get_or_create(
name='Trip_' + line['Voyage'],
vessel=vessel,
study = ImportFish.study,
datasource=ImportFish.ds,
)
tow, created = Tow.objects.get_or_create(
name='Tow_' + line['Tow'],
trip=trip,
study=ImportFish.study,
datasource=ImportFish.ds,
)
fob = LengthFrequencyOb()
fob.name = line['Vessel']
fob.recordeddate = datetime.datetime.now()
fob.species = sp
fob.trip = trip
fob.vessel = vessel
fob.datasource = ImportFish.ds
fob.study = ImportFish.study
fob.tow=tow
SaveKVs(fob, line)
fob.save()
ImportFish.ob_ct += 1
return True
@staticmethod
def CleanOp():
FishOb.objects.filter(datasource=ImportFish.ds).delete()
Trip.objects.filter(datasource=ImportFish.ds).delete()
Tow.objects.filter(datasource=ImportFish.ds).delete()
LengthFrequenc | yOb.objects.filter(datasource=ImportFish.ds).delete()
def load_data(fn):
conn = CsvConnector(fn, delimiter=',')
im = GenericImport(conn, ImportFish.study, ImportFish.ds)
im.load_op = Impo | rtFish.LoadFishDataOp
im.clean_op = ImportFish.CleanOp
im.Clean()
im.Load()
def init():
dt = datetime.datetime.now()
ds, created = DataSource.objects.get_or_create(
name='FishImp LengthFrequency',
supplieddate=dt
)
st = Study.get_or_create_from_name(
name='Fish Length Frequencies',
species_name='Fish',
study_group_name='Fish Trips',
study_area_name='Fish Studies')
ImportFish.study = st
ImportFish.ds = ds
def run():
init()
load_data('data/Compiled_length_frequencies_BBC.csv')
|
akretion/stock-logistics-workflow | stock_picking_manual_procurement_group/__init__.py | Python | agpl-3.0 | 149 | 0 | # -*- coding: utf-8 -*-
# Copyright 2016 OpenSynergy Indonesia
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from | . import | models
|
johannv/pythonTestEasy | correction/Code.py | Python | mit | 5,151 | 0.003922 | # -*- coding: utf-8 -*-
'''
Created on 9 déc. 2012
@author: Vincent Bruneau, Johann Verbroucht
'''
import unicodedata
from Student import Student
from Teacher import Teacher
class Code(object):
'''
Pour réaliser ces exercices il n'y a pas besoin de modifier les autres
classes, il suffit d'écrire les fonctions nécessaires.
'''
'''Exercice 1:
Développez la fonction permettant de retourner le nombre d'élément d'une
liste.
'''
def get_list_size(self, mylist):
return len(mylist)
'''
Exercice 2:
Développez la fonction permettant de retourner la factoriel d'un nombre.
Exemple: 6! = 6*5*4*3*2*1 = 720
'''
def factoriel(self, number):
if number <= 1:
return 1
else:
return number * self.factoriel(number - 1)
'''
Exercice 3:
Développez la fonction permettant de retourner le plus grand nombre
d'une liste.
Si la liste est vide, la fonction renvoie 0.
'''
def get_max_in_list(self, mylist):
if len(mylist) == 0:
return 0
return max(mylist)
'''
Exer | cice 4:
Développez la fonction qui renvoie la liste triée par ordre croissant.
'''
def sort_list(self, mylist):
return sorted(mylist)
'''
Exercice 5:
| Développez la fonction qui renvoie une liste sans nombres impairs.
'''
def delete_uneven(self, mylist):
evenlist = list()
for element in mylist:
if element % 2 == 0:
evenlist.append(element)
return evenlist
'''
Exercice 6:
Développez la fonction permettant de retourner le nombre d'occurrences
d'une chaîne de caractères dans une autre.
Exemples:
get_occurrence('foo', 'foobar foo') retourne 2
get_occurrence('foo', 'foofoo foobar') retourne 3
'''
def get_occurrence(self, string1, string2):
return string2.count(string1)
'''
Exercice 7:
Développez la fonction permettant de créer un nouvel élève en remplissant
ses informations.
Il faut aussi créer un professeur et l'associer à un élève.
'''
def create_student(self, studentid, studentlastname, studentfirstname, teacherid, teacherlastname, teacherfirstname):
student = Student(studentid, studentlastname, studentfirstname)
teacher = Teacher(teacherid, teacherlastname, teacherfirstname)
student.teacher = teacher
return student
'''
Exercice 8:
Développez la fonction qui renvoie la moyenne de l'élève.
'''
def get_average(self, student):
result = 0
for mark in student.marklist:
result += mark.mark
return result / len(student.marklist)
'''
Exercice 9:
Développez la fonction qui renvoie la meilleure note de l'élève.
'''
def get_best_mark(self, student):
sorted_mark = sorted(student.marklist, key=lambda mark: mark.mark, reverse=True)
return sorted_mark[0]
'''
Exercice 10:
Développez la fonction qui renvoie la liste des notes par ordre croissant.
'''
def sort_mark_list(self, student):
return sorted(student.marklist, key=lambda mark: mark.mark)
'''
Exercice 11:
Un nombre de Kaprekar est un nombre qui lorsqu'il est élevé au carré,
peut être séparé en une partie gauche et une partie droite (non nulle)
telles que la somme donne le nombre initial.
Exemples:
703 est un nombre de Kaprekar car 703² = 494 209 et que 494 + 209 = 703.
4879 est un nombre de Kaprekar car 4879² = 23 804 641 et 238 + 04641 = 4879.
Développez la fonction qui permet de tester si un nombre est un nombre de
Kaprekar ou non.
Attention: 1 est considéré comme un nombre de Kaprekar,
2 et 3 ne le sont pas.
'''
def is_a_kaprekar_number(self, number):
if number == 1:
return True
squareString = str(number * number)
for i in range(1,len(squareString)):
if int(squareString[0:i]) + int(squareString[i:]) == number:
return True
return False
'''
Exercice 12:
Développez la fonction qui indique si un mot est un palindrome ou non. Un
palindrome est un mot ou une phrase dont l'ordre des lettres reste le
même qu'on le lise de gauche à droite ou de droite à gauche.
Attention: Ne pas tenir compte de la ponctuation, ni des accents.
Exemples:
Eh ! ça va la vache.
Kayak ...
'''
def is_a_palindrome(self, string):
normalizedString = self.normalize_string(string)
length = len(normalizedString)
return normalizedString[0:length / 2] == normalizedString[length: length / 2: -1]
'''
Cette fonction permet de supprimer les caractères accentués, les espaces
et la ponctuation d'une chaîne de caractère. Exemple:
"Il a arrêté une voiture." ==> "Ilaarreteunevoiture"
'''
def normalize_string(self, string):
return ''.join(c for c in unicodedata.normalize('NFKD', unicode(string)) if c.isalnum()).lower()
|
activitycentral/statistics-consolidation | test/test_cons.py | Python | gpl-3.0 | 337 | 0.002967 | #!/usr/bin/env python
import sugar_stats_consolidation
from sugar_stats_consolidation.db import *
from sugar_stats_consolidation. | rrd_files import *
from sugar_stats_consolidation.consolidation import *
db = DB_Stats('statistics', 'root', 'gustavo')
db.create();
con = Consolidation('/var/lib/suga | r-stats/rrd', db)
con.process_rrds()
|
miketrumpis/compsense_demo | csdemo/demos/compsense_demo.py | Python | bsd-3-clause | 5,118 | 0.007425 | #!/usr/bin/env python
from time import time
import numpy as np
from scipy.sparse.linalg import LinearOperator, cg
from csdemo.utils.bdct_linapprox_ordering import bdct_linapprox_ordering
from csdemo.utils.psnr import psnr
from csdemo.measurements.dct2_xforms import A_dct2, At_dct2
from csdemo.measurements.lpnoiselet_xforms import A_lpnlet, At_lpnlet
import csdemo.optimization.tvqc as tvqc
def _find_image(name):
import os.path as p
name, ext = p.splitext(name)
name += '.npy'
data_path = p.join(p.split(__file__)[0], p.join('imagedata', name))
a = np.load(data_path)
return a
def compare_recons(n_coefs, image_name='cameraman',
return_images=False, be_loud=False):
"""
Run demo as a function of the number of coefficients used in
reconstructing the image.
Returns
-------
DCT psnr, LPTV psnr, CS psnr
if return_images is True, then the 3 corresponding reconstructions
are returned following the 3-tuple of psnr measurements
"""
pic = _find_image(image_name)
n = pic.shape[0]
x = pic.flatten().astype('d')
N = len(x)
# for repeatable experiments
np.random.seed(1981)
# for linear approximation
lporder = bdct_linapprox_ordering(n, n)
# low pass the DCT to contain K1 coefficients
K1 = 1000
omega1 = lporder[:K1]
# "% for random projection, avoid mean" -- I think avoid sampling DCT at k=0
q = np.arange(N)
np.random.shuffle(q)
# K2 = number of auxiliary measurements
# (either noiselet or more dct2)
K2 = n_coefs
# --- Measurement functions ----------------------------------------------
omega2 = q[:K2]
# for DCT + noiselet approximation
Phi = lambda z: A_lpnlet(z, n, omega1, omega2)
Phit = lambda z: At_lpnlet(z, n, omega1, omega2)
# for linear and tvlp approximations
om_lin = lporder[:(K1+K2)]
Phi2 = lambda z: A_dct2(z, n, om_lin)
Phi2t = lambda z: At_dct2(z, n, om_lin)
# take measurements
y = Phi(x)
y2 = Phi2(x)
# linear DCT reconstruction
xlin = Phi2t(y2)
# optimal l2 solution for compressed sensing, use this
# image as a starting point for CS optimization
PPt = lambda z: Phi(Phit(z))
A = LinearOperator( (K1+K2, K1+K2), matvec=PPt, dtype=y.dtype )
y0, i = cg(A, y, tol=1e-8, maxiter=200)
if i != 0:
if i < 0:
raise ValueError('bad inputs to CG algorithm')
else:
print 'Warning, CG did not converge after', i, 'iterations'
x0 = Phit(y0)
# parameters for optimization
lb_tol = 918
## lb_tol | = 7
mu = 5
cg_tol = 1e-8
cg_maxiter = 800
# lowpass tv recovery
eps2 = 1e-3 * np.dot(y2,y2)**0.5
# make LinearOperators from Phi2, Phi2t
# Phi2 is (K1+K2, N)
A = LinearOperator( (K1+K2, N), matvec=Phi2, dtype=y2.dtype )
# Phi2t is (N, K1+K2)
At = LinearOperator( (N, K1+K2), matvec=Phi2t, dtype=y2.dtype )
print 'finding LPTV solution'
xlptv, tlptv = tvqc.logbarrier(
xlin, A, At, y2, eps2, lb_tol, mu, cg_tol, cg_maxiter, be_ | loud=be_loud
)
xlptv.shape = (n,n)
# CS recovery
eps = 1e-3 * np.dot(y,y)**0.5
A = LinearOperator( (K1+K2, N), matvec=Phi, dtype=y.dtype )
At = LinearOperator( (N, K1+K2), matvec=Phit, dtype=y.dtype )
xp, tp = tvqc.logbarrier(
x0, A, At, y, eps, lb_tol, mu, cg_tol, cg_maxiter, be_loud=be_loud
)
xp.shape = (n, n)
xlin.shape = (n,n)
dct_psnr = psnr(pic, xlin)
lptv_psnr = psnr(pic, xlptv)
cs_psnr = psnr(pic, xp)
r_tuple = (dct_psnr, lptv_psnr, cs_psnr)
if return_images:
r_tuple += (xlin, xlptv, xp)
return r_tuple
def compare_at(n_coefs_trials, be_loud=False, plot=False):
"""
Run a number of trials of the comparison demo, using the the given
sequence as the number of coefficients at each step
"""
dct_psnrs = []
lptv_psnrs = []
cs_psnrs = []
for nc in n_coefs_trials:
dct, lptv, cs = compare_recons(nc, be_loud=be_loud)
dct_psnrs.append(dct)
lptv_psnrs.append(lptv)
cs_psnrs.append(cs)
if plot:
import matplotlib.pyplot as pp
f = pp.figure()
ax = f.add_subplot(111)
n_coefs = np.array(n_coefs_trials)
ax.plot(n_coefs, dct, 'b')
ax.plot(n_coefs, dct, 'b.')
ax.plot(n_coefs, lptv, 'g')
ax.plot(n_coefs, lptv, 'g.')
ax.plot(n_coefs, cs, 'r')
ax.plot(n_coefs, cs, 'r.')
## x_min = n_coefs.min()
## x_max = n_coefs.max()
## x_width = x_max - x_min
## ax.set_xlim( (x_min - 0.05*width, x_max + 0.05*width) )
ax.set_xlabel('Number of Measurements')
ax.set_ylabel('PSNR')
pp.show()
return dct_psnrs, lptv_psnrs, cs_psnrs
def show_comparison(n_coefs):
dct, lptv, cs = compare_recons(n_coefs, be_loud=True)
print 'K = 1000 +', n_coefs, '=', 1000+n_coefs
print 'DCT PSNR = %5.2f'%dct
print 'LPTV PSNR = %5.2f'%lptv
print 'CS PSNR = %5.2f'%cs
if __name__=='__main__':
show_comparison(20000)
|
nishworks/Flask-starter | flask_app/api/base.py | Python | mit | 995 | 0.001005 | from __future__ import absolute_import
import decimal
import json
import logging
import flask
log = log | ging.getLogger(__name__)
def json_handler(obj):
""" Handles non-serializable objects """
if isinstance(obj, decimal.Decimal):
return float(obj)
try:
return str(obj)
except TypeError:
return obj.__dict__
def json_response(response, status_code, message=None, errors=None, headers=None):
""" Return a http json response """
response = {
| 'uri': flask.request.path,
'message': message,
'status': status_code,
'request-params': flask.g.request_args,
'request-method': flask.request.method,
'response': response,
'errors': errors
}
resp = flask.make_response(
json.dumps(response, default=json_handler),
status_code,
{'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*'})
resp.headers.extend(headers or {})
return resp
|
lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_03_01/models/application_gateway_authentication_certificate.py | Python | mit | 1,860 | 0.001075 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
|
from .sub_resource import SubResource
cl | ass ApplicationGatewayAuthenticationCertificate(SubResource):
"""Authentication certificates of an application gateway.
:param id: Resource ID.
:type id: str
:param data: Certificate public data.
:type data: str
:param provisioning_state: Provisioning state of the authentication
certificate resource. Possible values are: 'Updating', 'Deleting', and
'Failed'.
:type provisioning_state: str
:param name: Name of the resource that is unique within a resource group.
This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'data': {'key': 'properties.data', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ApplicationGatewayAuthenticationCertificate, self).__init__(**kwargs)
self.data = kwargs.get('data', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
|
stan-dev/math | lib/boost_1.75.0/tools/build/src/build/property.py | Python | bsd-3-clause | 23,372 | 0.003637 | # Status: ported, except for tests.
# Base revision: 64070
#
# Copyright 2001, 2002, 2003 Dave Abrahams
# Copyright 2006 Rene Rivera
# Copyright 2002, 2003, 2004, 2005, 2006 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import re
import sys
from functools import total_ordering
from b2.util.utility import *
from b2.build import feature
from b2.util import sequence, qualify_jam_action, is_iterable_typed
import b2.util.set
from b2.manager import get_manager
__re_two_ampersands = re.compile ('&&')
__re_comma = re.compile (',')
__re_split_condition = re.compile ('(.*):(<.*)')
__re_split_conditional = re.compile (r'(.+):<(.+)')
__re_colon = re.compile (':')
__re_has_condition = re.compile (r':<')
__re_separate_condition_and_property = re.compile (r'(.*):(<.*)')
_not_applicable_feature='not-applicable-in-this-context'
feature.feature(_not_applicable_feature, [], ['free'])
__abbreviated_paths = False
class PropertyMeta(type):
"""
This class exists to implement the isinstance() and issubclass()
hooks for the Property class. Since we've introduce the concept of
a LazyProperty, isinstance(p, Property) will fail when p is a LazyProperty.
Implementing both __instancecheck__ and __subclasscheck__ will allow
LazyProperty instances to pass the isinstance() and issubclass check for
the Property class.
Additionally, the __call__ method intercepts the call to the Property
constructor to ensure that calling Property with the same arguments
will always return the same Property instance.
"""
_registry = {}
current_id = 1
def __call__(mcs, f, value, condition=None):
"""
This intercepts the call to the Property() constructor.
This exists so that the same arguments will always return the same Property
instance. This allows us to give each instance a unique ID.
"""
from b2.build.feature import Feature
if not isinstance(f, Feature):
f = feature.get(f)
if condition is None:
condition = []
key = (f, value) + tuple(sorted(condition))
if key not in mcs._registry:
instance = super(PropertyMeta, mcs).__call__(f, value, condition)
mcs._registry[key] = instance
return mcs._registry[key]
@staticmethod
def check(obj):
return (hasattr(obj, 'feature') and
hasattr(obj, 'value') and
hasattr(obj, 'condition'))
def __instancecheck__(self, instance):
return self.check(instance)
def __subclasscheck__(self, subclass):
return self.check(subclass)
@total_ordering
class Property(object):
__slots__ = ('feature', 'value', 'condition', '_to_raw', '_hash', 'id')
__metaclass__ = PropertyMeta
def __init__(self, f, value, condition=None):
assert(f.free or ':' not in value)
if condition is None:
condition = []
self.feature = f
self.value = value
self.condition = condition
self._hash = hash((self.feature, self.value) + tuple(sorted(self.condition)))
self.id = PropertyMeta.current_id
# increment the id counter.
# this allows us to take a list of Property
# instances and use their unique integer ID
# to create a key for PropertySet caching. This is
# much faster than string comparison.
PropertyMeta.current_id += 1
condition_str = ''
if condition:
condition_str = ",".join(str(p) for p in self.condition) + ':'
self._to_raw = '{}<{}>{}'.format(condition_str, f.name, value)
def to_raw(self):
return self._to_raw
def __str__(self):
return self._to_raw
def __hash__(self):
return self._hash
def __eq__(self, other):
return self._hash == other._hash
def __lt__(self, other):
return (self.feature.name, self.value) < (other.feature.name, other.value)
@total_ordering
class LazyProperty(object):
def __init__(self, feature_name, value, condition=None):
if condition is None:
condition = []
self.__property = Property(
feature.get(_not_applicable_feature), feature_name + value, condition=condition)
self.__name = feature_name
self.__value = value
self.__condition = condition
self.__feature = None
def __getattr__(self, item):
if self.__feature is None:
try:
self.__feature = feature.get(self.__name)
self.__property = Property(self.__feature, self.__value, self.__condition)
except KeyError:
pass
return getattr(self.__property, item)
def __hash__(self):
return hash(self.__property)
def __str__(self):
return self.__property._to_raw
def __eq__(self, other):
return self.__property == other
def __lt__(self, other):
return (self.feature.name, self.value) < (other.feature.name, other.value)
def create_from_string(s, allow_condition=False,allow_missing_value=False):
assert isinstance(s, basestring)
assert isinstance(allow_condition, bool)
assert isinstance(allow_missing_value, bool)
condition = []
import types
if not isinstance(s, types.StringType):
print type(s)
if __re_has_condition.search(s):
if not allow_condition:
raise BaseException("Conditional property is not allowed in this context")
m = __re_separate_condition_and_property.match(s)
condition = m.group(1)
s = m.group(2)
# FIXME: break dependency cycle
from b2.manager import get_manager
if condition:
condition = [create_from_string(x) for x in condition.split(',')]
feature_name = get_grist(s)
if not feature_name:
if feature.is_implicit_value(s):
f = feature.implied_feature(s)
value = s
p = Property(f, value, condition=condition)
else:
raise get_manager().errors()("Invalid property '%s' -- unknown feature" % s)
else:
value = get_value(s)
if not value and not allow_missing_value:
get_manager().errors()("Invalid property '%s' -- no value specified" % s)
if feature.valid(feature_name):
p = Property(feature.get(feature_name), value, condition=condition)
else:
# In case feature name is not known, it is wrong to do a hard error.
# Feature sets change depending on the toolset. So e.g.
# <toolset-X:version> is an unknown feature when using toolset Y.
#
# Ideally we would like to ignore this value, but most of
# Boost.Build code expects that we return a valid Property. For this
# reason we use a sentinel | <not-applicable-in-this-context> feature.
#
| # The underlying cause for this problem is that python port Property
# is more strict than its Jam counterpart and must always reference
# a valid feature.
p = LazyProperty(feature_name, value, condition=condition)
return p
def create_from_strings(string_list, allow_condition=False):
assert is_iterable_typed(string_list, basestring)
return [create_from_string(s, allow_condition) for s in string_list]
def reset ():
""" Clear the module state. This is mainly for testing purposes.
"""
global __results
# A cache of results from as_path
__results = {}
reset ()
def set_abbreviated_paths(on=True):
global __abbreviated_paths
if on == 'off':
on = False
on = bool(on)
__abbreviated_paths = on
def get_abbreviated_paths():
return __abbreviated_paths or '--abbreviated-paths' in sys.argv
def path_order (x, y):
""" Helper for as_path, below. Orders properties with the implicit ones
first, and within the two sections in alphabetical order of feature
name.
"""
if x == y:
return 0
xg = get_grist (x)
yg = get_grist (y) |
anthonynsimon/python-data-structures-algorithms | tests/test_parse_tree.py | Python | apache-2.0 | 686 | 0.001458 | import unittest
from lib.data_structures.trees.parse_tree import ParseTree
class TestParseTree(unittest.TestCase):
def evaluate(self, expression | , result):
parser = ParseTree()
parse_tree = parser.build_parse_tree(expressio | n)
self.assertEqual(parser.evaluate(parse_tree), result)
print(parse_tree)
def testParseTree(self):
self.evaluate("( ( 5 + ( 2 * ( 100 / 2 ) ) ) - 5 )", 100)
self.evaluate("( 10 + 5 )", 15)
self.evaluate("( 10 / 2 )", 5)
self.evaluate("( 5 * ( 5 * ( 5 * 5 ) ) ) )", 625)
self.evaluate("( 10 / ( 5 + ( 3 + 2 ) ) )", 1)
self.evaluate("( 1 + ( 10 - ( 5 + ( 3 + 2 ) ) ) )", 1) |
capitalone/cloud-custodian | tools/c7n_mailer/tests/test_slack.py | Python | apache-2.0 | 8,070 | 0.001859 | import unittest
import copy
import json
import os
from mock import patch, MagicMock
from common import RESOURCE_3, SQS_MESSAGE_5
from c7n_mailer.slack_delivery import SlackDelivery
from c7n_mailer.email_delivery import EmailDelivery
SLACK_TOKEN = "slack-token"
SLACK_POST_MESSAGE_API = "https://slack.com/api/chat.postMessage"
class TestSlackDelivery(unittest.TestCase):
def setUp(self):
self.config = {
'slack_token': SLACK_TOKEN,
'templates_folders': [
os.path.abspath(os.path.dirname(__file__)),
os.path.abspath('/'),
os.path.join(os.path.abspath(os.path.dirname(__file__)), "test-templates/")
]
}
self.session = MagicMock()
self.logger = MagicMock()
self.email_delivery = EmailDelivery(self.config, self.session, self.logger)
self.message = copy.deepcopy(SQS_MESSAGE_5)
self.resource = copy.deepcopy(RESOURCE_3)
self.message['resources'] = [self.resource]
self.target_channel = 'test-channel'
def test_map_sending_to_channel(self):
slack = SlackDelivery(self.config, self.logger, self.email_delivery)
result = slack.get_to_addrs_slack_messages_map(self.message)
assert self.target_channel in result
assert json.loads(result[self.target_channel])['channel'] == self.target_channel
def test_map_sending_to_tag_channel_with_hash(self):
self.target_channel = '#tag-channel'
slack = SlackDelivery(self.config, self.logger, self.email_delivery)
message_destination = ['slack://tag/SlackChannel']
self.resource['Tags'].append({"Key": "SlackChannel", "Value": self.target_channel})
self.message['action']['to'] = message_destination
self.message['policy']['actions'][1]['to'] = message_destination
result = slack.get_to_addrs_slack_messages_map(self.message)
assert self.target_channel in result
assert json.loads(result[self.target_channel])['channel'] == self.target_channel
self.lo | gger.debug.assert_called_with("Generating message for specified Slack channel.")
def test_map_sending_to_tag_channel_without_hash(self):
self.target_channel = 'tag-channel'
channel_name | = "#" + self.target_channel
slack = SlackDelivery(self.config, self.logger, self.email_delivery)
message_destination = ['slack://tag/SlackChannel']
self.resource['Tags'].append({"Key": "SlackChannel", "Value": self.target_channel})
self.message['action']['to'] = message_destination
self.message['policy']['actions'][1]['to'] = message_destination
result = slack.get_to_addrs_slack_messages_map(self.message)
assert channel_name in result
assert json.loads(result[channel_name])['channel'] == channel_name
self.logger.debug.assert_called_with("Generating message for specified Slack channel.")
def test_map_sending_to_tag_channel_no_tag(self):
slack = SlackDelivery(self.config, self.logger, self.email_delivery)
message_destination = ['slack://tag/SlackChannel']
self.message['action']['to'] = message_destination
self.message['policy']['actions'][1]['to'] = message_destination
result = slack.get_to_addrs_slack_messages_map(self.message)
assert result == {}
self.logger.debug.assert_called_with("No SlackChannel tag found in resource.")
def test_map_sending_to_webhook(self):
webhook = "https://hooks.slack.com/this-is-a-webhook"
slack = SlackDelivery(self.config, self.logger, self.email_delivery)
message_destination = [webhook]
self.message['action']['to'] = message_destination
self.message['policy']['actions'][1]['to'] = message_destination
result = slack.get_to_addrs_slack_messages_map(self.message)
assert webhook in result
assert 'channel' not in json.loads(result[webhook])
@patch('c7n_mailer.slack_delivery.requests.post')
def test_slack_handler(self, mock_post):
mock_post.return_value.status_code = 200
mock_post.return_value.json.return_value = {'ok': True}
slack = SlackDelivery(self.config, self.logger, self.email_delivery)
result = slack.get_to_addrs_slack_messages_map(self.message)
slack.slack_handler(self.message, result)
self.logger.info.assert_called_with("Sending account:core-services-dev "
"policy:ebs-mark-unattached-deletion ebs:1 slack:slack"
"_default to test-channel")
@patch('c7n_mailer.slack_delivery.requests.post')
def test_send_slack_msg_webhook(self, mock_post):
mock_post.return_value.status_code = 200
mock_post.return_value.json.return_value = {'ok': True}
webhook = "https://hooks.slack.com/this-is-a-webhook"
message_destination = [webhook]
self.message['action']['to'] = message_destination
self.message['policy']['actions'][1]['to'] = message_destination
slack = SlackDelivery(self.config, self.logger, self.email_delivery)
result = slack.get_to_addrs_slack_messages_map(self.message)
slack.send_slack_msg(webhook, result[webhook])
args, kwargs = mock_post.call_args
assert webhook == kwargs['url']
assert kwargs['data'] == result[webhook]
@patch('c7n_mailer.slack_delivery.requests.post')
def test_send_slack_msg(self, mock_post):
mock_post.return_value.status_code = 200
mock_post.return_value.json.return_value = {'ok': True}
slack = SlackDelivery(self.config, self.logger, self.email_delivery)
result = slack.get_to_addrs_slack_messages_map(self.message)
slack.send_slack_msg(self.target_channel, result[self.target_channel])
args, kwargs = mock_post.call_args
assert self.target_channel == json.loads(kwargs['data'])['channel']
assert SLACK_POST_MESSAGE_API == kwargs['url']
assert kwargs['data'] == result[self.target_channel]
@patch('c7n_mailer.slack_delivery.requests.post')
def test_send_slack_msg_retry_after(self, mock_post):
retry_after_delay = 1
mock_post.return_value.status_code = 429
mock_post.return_value.headers = {'Retry-After': retry_after_delay}
slack = SlackDelivery(self.config, self.logger, self.email_delivery)
result = slack.get_to_addrs_slack_messages_map(self.message)
slack.send_slack_msg(self.target_channel, result[self.target_channel])
args, kwargs = mock_post.call_args
self.logger.info.assert_called_with("Slack API rate limiting. Waiting %d seconds",
retry_after_delay)
@patch('c7n_mailer.slack_delivery.requests.post')
def test_send_slack_msg_not_200_response(self, mock_post):
mock_post.return_value.status_code = 404
mock_post.return_value.text = "channel_not_found"
slack = SlackDelivery(self.config, self.logger, self.email_delivery)
result = slack.get_to_addrs_slack_messages_map(self.message)
slack.send_slack_msg(self.target_channel, result[self.target_channel])
self.logger.info.assert_called_with('Error in sending Slack message status:%s response: %s',
404, 'channel_not_found')
@patch('c7n_mailer.slack_delivery.requests.post')
def test_send_slack_msg_not_ok_response(self, mock_post):
mock_post.return_value.status_code = 200
mock_post.return_value.json.return_value = {'ok': False, 'error': "failed"}
slack = SlackDelivery(self.config, self.logger, self.email_delivery)
result = slack.get_to_addrs_slack_messages_map(self.message)
slack.send_slack_msg(self.target_channel, result[self.target_channel])
self.logger.info.assert_called_with('Error in sending Slack message. Status:%s, '
'response:%s', 200, 'failed')
|
bobintetley/asm3 | src/asm3/lookups.py | Python | gpl-3.0 | 50,935 | 0.010405 |
import asm3.configuration
import asm3.financial
import asm3.utils
from asm3.i18n import _
import re
# Look up tables map
# tablename : ( tablelabel, namefield, namelabel, descfield, hasspecies, haspfspecies, haspfbreed, hasapcolour, hasdefaultcost, hasunits, hassite, canadd, candelete, canretire,(foreignkeys) )
# tablename : ( tablelabel, namefield, namelabel, descfield, modifiers,(foreignkeys) )
# modifiers:
# add - add new records
# del - can delete
# ret - can retire a value
# species - has a SpeciesID column (breed)
# pubspec - has a PetFinderSpecies column (species)
# pubbreed - has a PetFinderBreed column (breed)
# pubcol - has an AdoptAPetColour column (basecolour)
# sched - has a RescheduleDays column (testtype, vaccinationtype)
# acc - has an AccountID column (costtype, donationtype)
# cost - has a DefaultCost column (citationtype, costtype, donationtype, licencetype)
# units - has Units column (internallocation)
# site - has SiteID column (internallocation)
# vat - has an IsVAT column (donationtype)
LOOKUP_TABLES = {
"lksaccounttype": (_("Account Types"), "AccountType", _("Type"), "", "", ("accounts.AccountType",)),
"lkanimalflags": (_("Animal Flags"), "Flag", _("Flag"), "", "add del", ""),
"animaltype": (_("Animal Types"), "AnimalType", _("Type"), "AnimalDescription", "add del ret", ("animal.AnimalTypeID",)),
"basecolour": (_("Colors"), "BaseColour", _("Color"), "BaseColourDescription", "add del ret pubcol", ("animal.BaseColourID", "animallost.BaseColourID", "animalfound.BaseColourID")),
"breed": (_("Breeds"), "BreedName", _("Breed"), "BreedDescription", "add del ret species pubbreed", ("animal.BreedID", "animal.Breed2ID", "animallost.BreedID", "animalfound.BreedID")),
"lkcoattype": (_("Coat Types"), "CoatType", _("Coat Type"), "", "add del", ("animal.CoatType",)),
"citationtype": (_("Citation Types"), "CitationName", _("Citation Type"), "CitationDescription", "add del ret cost", ("ownercitation.CitationTypeID",)),
"lksclinicstatus": (_("Clinic Statuses"), "Status", _("Status"), "", "", ("clinicappointment.Status",)),
"costtype": (_("Cost Types"), "CostTypeName", _("Cost Type"), "CostTypeDescription", "add del ret cost acc", ("animalcost.CostTypeID",)),
"deathreason": (_("Death Reasons"), "ReasonName", _("Reason"), "ReasonDescription", "add del ret", ("animal.PTSReasonID",)),
"diet": (_("Diets"), "DietName", _("Diet"), "DietDescription", "add del ret", ("animaldiet.DietID",)),
"donationpayment": (_("Payment Methods"), "PaymentName", _("Type"), "PaymentDescription", "add del ret", ("ownerdonation.DonationPaymentID",)),
"donationtype": (_("Payment Types"), "DonationName", _("Type"), "DonationDescription", "add del ret cost vat acc", ("ownerdonation.DonationTypeID", "accounts.DonationTypeID")),
"entryreason": (_("Entry Reasons"), "ReasonName", _("Reason"), "ReasonDescription", "add del ret", ("animal.EntryReasonID", "adoption.ReturnedReasonID") ),
"incidentcompleted":(_("Incident Completed Types"), "CompletedName", _("Completed Type"), "CompletedDescription", "add del ret", ("animalcontrol.IncidentCompletedID",)),
"incidenttype": (_("Incident Types"), "IncidentName", _("Type"), "IncidentDescription", "add del ret", ("animalcontrol.IncidentTypeID",)),
"internallocation": (_("Internal Locations"), "LocationName", _("Location"), "LocationDescription", "add del ret units site", ("animal.ShelterLocation",)),
"jurisdiction": (_("Jurisdictions"), "JurisdictionName", _("Jurisdiction"), "JurisdictionDescription", "add del ret", ("animalcontrol.JurisdictionID","owner.JurisdictionID")),
"licencetype": (_("License Types"), "LicenceTypeName", _("Type"), "LicenceTypeDescription", "add del ret cost", ("ownerlicence.LicenceTypeID",)),
"logtype": (_("Log Types"), "LogTypeName", _("Type"), "LogTypeDescription", "add del ret", ("log.LogTypeID",)),
"lksmovementtype": (_("Movement Types"), "MovementType", _("Type"), "", "", ("adoption.MovementType", "animal.ActiveMovementType",)),
"lkownerflags": (_("Person Flags"), "Flag", _("Flag"), "", "add del", ""),
"lksrotatype": (_("Rota Types"), "RotaType", _("Type"), "", "", ("ownerrota.RotaTypeID",)),
"lksex": (_("Sexes"), "Sex", _("Sex"), "", "", ("animal.Sex", "animallost.Sex", "animalfound.Sex")),
"lksize": (_("Sizes"), "Size", _("Size"), "", "", ("animal.Size",)),
"lksyesno": (_("Yes/No"), "Name", _("Yes/No"), "", "", ("animal.Neutered",)),
"lksynun": (_("Yes/No/Unknown"), "Name", _("Yes/No/Unknown"), "", "", ("animal.IsHouseTrained",)),
"lksynunk": (_("Good with kids"), "Name", _("Good with kids"), "", "", ("animal.IsGoodWithChildren",)),
"lksposneg": (_("Positive/Negative"), "Name", _("Positive/Negative"), "", "", ("animal.CombiTestResult",)),
"pickuplocation": (_("Pickup Locations"), "LocationName", _("Location"), "LocationDescription", "add del ret", ("animal.PickupLocationID",)),
"reservationstatus": (_("Reservation Statuses"), "StatusName", _("Status"), "StatusDescription", "add del ret", ("adoption.ReservationStatusID",)),
"site": (_("Sites"), "SiteName", _("Site"), "", "add del", ("users.SiteID","internallocation.SiteID")),
"species": (_("Species"), "SpeciesName", _("Species"), "SpeciesDescription", "add del ret pubspec", ("animal.SpeciesID", "onlineformfield.Spe | ciesID", "animallost.AnimalTypeID", "animalf | ound.AnimalTypeID")),
"stocklocation": (_("Stock Locations"), "LocationName", _("Location"), "LocationDescription", "add del ret", ("stocklevel.StockLocationID",)),
"stockusagetype": (_("Stock Usage Type"), "UsageTypeName", _("Usage Type"), "UsageTypeDescription", "add del ret", ("stockusage.StockUsageTypeID",)),
"lkurgency": (_("Urgencies"), "Urgency", _("Urgency"), "", "", ("animalwaitinglist.Urgency",)),
"testtype": (_("Test Types"), "TestName", _("Type"), "TestDescription", "add del ret cost sched", ("animaltest.TestTypeID",)),
"testresult": (_("Test Results"), "ResultName", _("Result"), "ResultDescription", "add del ret", ("animaltest.TestResultID",)),
"lkstransportstatus": (_("Transport Statuses"), "Name", _("Status"), "", "", ("animaltransport.Status",)),
"transporttype": (_("Transport Types"), "TransportTypeName", _("Type"), "TransportTypeDescription", "add del ret", ("animaltransport.TransportTypeID",)),
"traptype": (_("Equipment Loan Types"), "TrapTypeName", _("Type"), "TrapTypeDescription", "add del ret cost", ("ownertraploan.TrapTypeID",)),
"vaccinationtype": (_("Vaccination Types"), "VaccinationType", _("Type"), "VaccinationDescription", "add del ret cost sched", ("animalvaccination.VaccinationID",)),
"voucher": (_("Voucher Types"), "VoucherName", _("Type"), "VoucherDescription", "add del ret cost", ("ownervoucher.VoucherID",)),
"lkworktype": (_("Work Types"), "WorkType", _("Type"), "", "add del ret", ("ownerrota.WorkTypeID",))
}
LOOKUP_TABLELABEL = 0
LOOKUP_NAMEFIELD = 1
LOOKUP_NAMELABEL = 2
LOOKUP_DESCFIELD = 3
LOOKUP_MODIFIERS = 4
LOOKUP_FOREIGNKEYS = 5
# Database of microchip manufacturer prefixes. locales is a space separated list of
# locales the pattern is valid for (blank is all locales)
# This list is evaluated in order, so entries with more specificity (ie. a locale and longer pattern)
# should be placed first as the first match is returned.
MICROCHIP_MANUFACTURERS = [
{ "length": 16, "regex": r"^AVID", "name": "AVID", "locales": "" },
{ "length": 14, "regex": r"^TR", "name": "AKC Reunite", "locales": "" },
{ "length": 9, "regex": r"^\d+$", "name": "AVID", "locales": "" },
{ "length": 11, "regex": r"^\d{3}\*\d{3}\*\d{3}", "name": "AVID", "locales": "" },
{ "length": 11, "regex": r"^\d{3}\-\d{3}\-\d{3}", "name": "AVID", "locales": "" },
{ "length": 10, "regex": r"^0A1", "name": "24PetWatch", "locales": "" },
{ "length": 10, "regex": r"^0A0", "name": "Microchip ID", "locales": "" },
{ "length": |
Clarity-89/clarityv2 | src/clarityv2/crm/migrations/0008_auto_20171108_2318.py | Python | mit | 1,542 | 0.002594 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-08 21:18
from __future__ import unicode_literals
from decimal import Decimal
from django.db import migrations, models
import django_countries.fields
class Migration(migrations.Migration):
dependencies = [
('crm', '0007_auto_20161219_1914'),
]
operations = [
migrations.RenameField(
model_n | ame='client',
old_name='vat',
new_name='vat_number',
),
migrations.RemoveField(
model_name='project',
name='tax_ | rate',
),
migrations.AddField(
model_name='project',
name='vat',
field=models.DecimalField(choices=[(Decimal('0.00'), 'no vat'), (Decimal('0.20'), 'standard vat rate')], decimal_places=2, default=Decimal('0.00'), max_digits=4, verbose_name='tax rate'),
),
migrations.AlterField(
model_name='client',
name='country',
field=django_countries.fields.CountryField(default='BE', max_length=2, verbose_name='Country'),
),
migrations.AlterField(
model_name='client',
name='language',
field=models.CharField(choices=[('en', 'English')], default='nl', max_length=10, verbose_name='language'),
),
migrations.AlterField(
model_name='contact',
name='country',
field=django_countries.fields.CountryField(default='BE', max_length=2, verbose_name='Country'),
),
]
|
emtwo/redash_client | samples/ActivityStreamExperimentDashboard.py | Python | mpl-2.0 | 1,209 | 0.01158 | import os
from templates import event_rate
from redash_client import RedashClient
from constants import VizType, ChartType, VizWidth
class ActivityStreamExperimentDashboard(object):
DEFAULT_EVENTS = ["CLICK", "SEARCH", "BLOCK", "DELETE"]
DATA_SOURCE_ID = 5
def __init__(self, api_key, dash_name, exp_id, start_date=None, end_date=None):
self._api_key = api_key
self._experiment_id = exp_id
self._start_date = start_date
self._end_date = end_date
self.redash = RedashClient(api_key)
self._dash_id = self.redash.new_dashboard(dash_name)
self.redash.publish_dashboard(self._dash_id)
def add_event_graphs(self, additional_events=[]):
required_events = self.DEFAULT_EVENTS + additional_events
for event in required_events:
query_name = "Experiment vs. Con | trol {0} Rate".format(event)
query_string, fields = event_rate(event, self._start_date, self._experiment_id)
query_id = self.redash.new_query(query_name, query_string, self.DATA_SOURCE_ID)
viz_id = | self.redash.new_visualization(query_id, ChartType.LINE, {fields[0]: "x", fields[1]: "y", fields[2]: "series"})
self.redash.append_viz_to_dash(self._dash_id, viz_id, VizWidth.WIDE)
|
etherkit/OpenBeacon2 | macos/venv/lib/python3.8/site-packages/cmd2/table_creator.py | Python | gpl-3.0 | 39,315 | 0.003772 | # coding=utf-8
"""
cmd2 table creation API
This API is built upon two core classes: Column and TableCreator
The general use case is to inherit from TableCreator to create a table class with custom formatting options.
There are already implemented and ready-to-use examples of this below TableCreator's code.
"""
import copy
import functools
import io
from collections import deque
from enum import Enum
from typing import Any, Optional, Sequence, Tuple, Union
from wcwidth import wcwidth
from . import ansi, constants, utils
# This is needed for compatibility with early versions of Python 3.5 prior to 3.5.4
try:
from typing import Deque
except ImportError: # pragma: no cover
import typing
# The following copied from the implementation of Deque in Python 3.5.4
# noinspection PyProtectedMember, PyUnresolvedReferences
class Deque(deque, typing.MutableSequence[typing.T]):
__slots__ = ()
__extra__ = deque
def __new__(cls, *args, **kwds):
if typing._geqv(cls, Deque):
raise TypeError('Type Deque cannot be instantiated; use deque() instead')
return typing._generic_new(deque, cls, *args, **kwds)
# Constants
EMPTY = ''
SPACE = ' '
class HorizontalAlignment(Enum):
"""Horizontal alignment of text in a cell"""
LEFT = 1
CENTER = 2
RIGHT = 3
class VerticalAlignment(Enum):
"""Vertical alignment of text in a cell"""
TOP = 1
MIDDLE = 2
BOTTOM = 3
class Column:
"""Table column configuration"""
def __init__(self, header: str, *, width: Optional[int] = None,
header_horiz_align: HorizontalAlignment = HorizontalAlignment.LEFT,
header_vert_align: VerticalAlignment = VerticalAlignment.BOTTOM,
data_horiz_align: HorizontalAlignment = HorizontalAlignment.LEFT,
data_vert_align: VerticalAlignment = VerticalAlignment.TOP,
max_data_lines: Union[int, float] = constants.INFINITY) -> None:
"""
Column initializer
:param header: label for column header
:param width: display width of column. This does not account for any borders or padding which
may be added (e.g pre_line, inter_cell, and post_line). Header and data text wrap within
this width using word-based wrapping (defaults to width of header or 1 if header is blank)
:param header_horiz_align: horizontal alignment of header cells (defaults to left)
:param header_vert_align: vertical alignment of header cells (defaults to bottom)
:param data_horiz_align: horizontal alignment of data cells (defaults to left)
:param data_vert_align: vertical alignment of data cells (defaults to top)
:param max_data_lines: maximum lines allowed in a data cell. If line count exceeds this, then the final
line displayed will be truncated with an ellipsis. (defaults to INFINITY)
:raises: | ValueError if width is less than 1
:raises: ValueError if max_data_lines is less than 1
"""
self.header = header
if width is None:
# Use the width of the widest line in the header or 1 if the header has no width
line_widths = [ansi.style_aware_wcswidth(line) for line in self.header.splitlines()]
line_widths.append(1) |
self.width = max(line_widths)
elif width < 1:
raise ValueError("Column width cannot be less than 1")
else:
self.width = width
self.header_horiz_align = header_horiz_align
self.header_vert_align = header_vert_align
self.data_horiz_align = data_horiz_align
self.data_vert_align = data_vert_align
if max_data_lines < 1:
raise ValueError("Max data lines cannot be less than 1")
self.max_data_lines = max_data_lines
class TableCreator:
"""
Base table creation class. This class handles ANSI style sequences and characters with display widths greater than 1
when performing width calculations. It was designed with the ability to build tables one row at a time. This helps
when you have large data sets that you don't want to hold in memory or when you receive portions of the data set
incrementally.
TableCreator has one public method: generate_row()
This function and the Column class provide all features needed to build tables with headers, borders, colors,
horizontal and vertical alignment, and wrapped text. However, it's generally easier to inherit from this class and
implement a more granular API rather than use TableCreator directly. There are ready-to-use examples of this
defined after this class.
"""
def __init__(self, cols: Sequence[Column], *, tab_width: int = 4) -> None:
"""
TableCreator initializer
:param cols: column definitions for this table
:param tab_width: all tabs will be replaced with this many spaces. If a row's fill_char is a tab,
then it will be converted to one space.
"""
self.cols = copy.copy(cols)
self.tab_width = tab_width
@staticmethod
def _wrap_long_word(word: str, max_width: int, max_lines: Union[int, float], is_last_word: bool) -> Tuple[str, int, int]:
"""
Used by _wrap_text() to wrap a long word over multiple lines
:param word: word being wrapped
:param max_width: maximum display width of a line
:param max_lines: maximum lines to wrap before ending the last line displayed with an ellipsis
:param is_last_word: True if this is the last word of the total text being wrapped
:return: Tuple(wrapped text, lines used, display width of last line)
"""
styles = utils.get_styles_in_text(word)
wrapped_buf = io.StringIO()
# How many lines we've used
total_lines = 1
# Display width of the current line we are building
cur_line_width = 0
char_index = 0
while char_index < len(word):
# We've reached the last line. Let truncate_line do the rest.
if total_lines == max_lines:
# If this isn't the last word, but it's gonna fill the final line, then force truncate_line
# to place an ellipsis at the end of it by making the word too wide.
remaining_word = word[char_index:]
if not is_last_word and ansi.style_aware_wcswidth(remaining_word) == max_width:
remaining_word += "EXTRA"
truncated_line = utils.truncate_line(remaining_word, max_width)
cur_line_width = ansi.style_aware_wcswidth(truncated_line)
wrapped_buf.write(truncated_line)
break
# Check if we're at a style sequence. These don't count toward display width.
if char_index in styles:
wrapped_buf.write(styles[char_index])
char_index += len(styles[char_index])
continue
cur_char = word[char_index]
cur_char_width = wcwidth(cur_char)
if cur_char_width > max_width:
# We have a case where the character is wider than max_width. This can happen if max_width
# is 1 and the text contains wide characters (e.g. East Asian). Replace it with an ellipsis.
cur_char = constants.HORIZONTAL_ELLIPSIS
cur_char_width = wcwidth(cur_char)
if cur_line_width + cur_char_width > max_width:
# Adding this char will exceed the max_width. Start a new line.
wrapped_buf.write('\n')
total_lines += 1
cur_line_width = 0
continue
# Add this character and move to the next one
cur_line_width += cur_char_width
wrapped_buf.write(cur_char)
char_index += 1
return wrapped_buf.getvalue(), total_lines, cur_line_width
@staticmethod
def _wrap_text(text: str, max_width: int, max_lines: Union[int, float]) -> str:
"""
|
JuezUN/INGInious | inginious/frontend/plugins/lti_register/pages/constants.py | Python | agpl-3.0 | 265 | 0.003774 | _use_minified = True
def set_us | e_m | inified(use_minified):
""" Define if use minified files """
global _use_minified
_use_minified = use_minified
def use_minified():
""" return a boolean to define if use minified files """
return _use_minified |
yongshengwang/builthue | desktop/libs/liboozie/src/liboozie/conf.py | Python | apache-2.0 | 2,552 | 0.009013 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the Lice | nse. You may obtain a copy of the License at
#
# | http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from django.utils.translation import ugettext as _, ugettext_lazy as _t
from desktop.lib.conf import Config, coerce_bool, validate_path
OOZIE_URL = Config(
key='oozie_url',
help=_t('URL of Oozie server. This is required for job submission. Empty value disables the config check.'),
default='http://localhost:11000/oozie',
type=str)
SECURITY_ENABLED = Config(
key="security_enabled",
help=_t("Whether Oozie requires client to perform Kerberos authentication."),
default=False,
type=coerce_bool)
REMOTE_DEPLOYMENT_DIR = Config(
key="remote_deployement_dir",
default="/user/hue/oozie/deployments",
help=_t("Location on HDFS where the workflows/coordinators are deployed when submitted by a non-owner."))
def get_oozie_status(user):
from liboozie.oozie_api import get_oozie
status = 'down'
try:
if not 'test' in sys.argv: # Avoid tests hanging
status = str(get_oozie(user).get_oozie_status())
except:
pass
return status
def config_validator(user):
"""
config_validator() -> [ (config_variable, error_message) ]
Called by core check_config() view.
"""
from hadoop.cluster import get_all_hdfs
res = []
if OOZIE_URL.get():
status = get_oozie_status(user)
if 'NORMAL' not in status:
res.append((status, _('The Oozie server is not available')))
class ConfigMock:
def __init__(self, value): self.value = value
def get(self): return self.value
def get_fully_qualifying_key(self): return self.value
for cluster in get_all_hdfs().values():
res.extend(validate_path(ConfigMock('/user/oozie/share/lib'), is_dir=True, fs=cluster,
message=_('Oozie Share Lib not installed in default location.')))
return res
|
gst/amqpy | setup.py | Python | mit | 1,664 | 0 | #!/usr/bin/env python3
import sys
import os
from setuptools import setup, find_packages
import amqpy
if sys.version_info < (3, 2):
raise Exception('amqpy requires Python 3.2 or higher')
name = 'a | mqpy'
description = 'an AMQP 0.9.1 client library for Python >= 3.2.0'
keywords = ['amqp', 'rabbitmq', 'qpid']
classifiers = [
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Imp | lementation :: PyPy',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking'
]
package_data = {
'': ['*.rst', '*.ini', 'AUTHORS', 'LICENSE'],
}
def long_description():
if os.path.exists('README.rst'):
with open('README.rst') as f:
return f.read()
else:
return description
setup(
name=name,
description=description,
long_description=long_description(),
version=amqpy.__version__,
author=amqpy.__author__,
author_email=amqpy.__contact__,
maintainer=amqpy.__maintainer__,
url=amqpy.__homepage__,
platforms=['any'],
license='LGPL',
packages=find_packages(exclude=['ez_setup', 'tests', 'tests.*']),
package_data=package_data,
tests_require=['pytest>=2.6'],
classifiers=classifiers,
keywords=keywords
)
|
Lindennerd/exercicios_luizpaulo_leandro | exercicios/ex_4.py | Python | apache-2.0 | 263 | 0.015385 | # coding=utf-8
#[3,2,4] >> 3² + 2² + 4² = 9 + 4 + 16 = 29
pow = lambda x: x**2
def exer(lista):
result = 0
for i in lista:
result = pow | (i)+result
return result
def f(a):
def g(b, c, d, e):
print(a, b, c, d, e) |
return g
|
GunnerJnr/_CodeInstitute | Stream-2/Back-End-Development/18.Using-Python-with-MySQL-Part-Three-Intro/3.How-to-Build-an-Update-SQL-String/database/mysql.py | Python | mit | 7,289 | 0.001921 | import MySQLdb as _mysql
from collections import namedtuple
import re
# Only needs to compile one time so we put it here
float_match = re.compile(r'[-+]?\d*\.?\d+(?:[eE][-+]?\d+)?$').match
def is_number(string):
return bool(float_match(string))
class MySQLDatabase(object):
"""
This is the driver class that we will use
for connecting to our database. In here we'll
create a constructor (__init__) that will connect
to the database once the driver class is instantiated
and a destructor method that will close the database
connection once the driver object is destroyed.
"""
def __init__(self, database_name, username,
password, host='localhost'):
"""
Here we'll try to connect to the database
using the variables that we passed through
and if the connection fails we'll print out the error
"""
try:
self.db = _mysql.connect(db=database_name, host=host, user=username, passwd=password)
self.database_name = database_name
print "Connected to MySQL!"
except _mysql.Error, e:
print e
def __del__(self):
"""
Here we'll do a check to see if `self.db` is present.
This will only be the case if the connection was
successfully made in the initialiser.
Inside that condition we'll close the connection
"""
if hasattr(self, 'db'):
self.db.close()
print "MySQL Connection Closed"
def get_available_tables(self):
"""
This method will allow us to see what
tables are available to us when we're
running our queries
"""
cursor = self.db.cursor()
cursor.execute("SHOW TABLES;")
self.tables = cursor.fetchall()
cursor.close()
return self.tables
def convert_to_named_tuples(self, cursor):
results = None
names = " ".join(d[0] for d in cursor.description)
klass = namedtuple('Results', names)
try:
results = map(klass._make, cursor.fetchall())
except _mysql.ProgrammingError, e:
print e
return results
def get_columns_for_table(self, table_name):
"""
This method will enable us to interact
with our database to find what columns
are currently in a specific table
"""
cursor = self.db.cursor()
cursor.execute("SHOW COLUMNS FROM `%s`" % table_name)
self.columns = cursor.fetchall()
cursor.close()
return self.columns
def select(self, table, columns=None, named_tuples=False, **kwargs):
"""
We'll create our `select` method in order
to make it simpler for extracting data from
the database.
select(table_name, [list_of_column_names])
"""
sql_str = "SELECT "
# add columns or just use the wildcard
if not columns:
sql_str += " * "
else:
for column in columns:
sql_str += "%s, " % column
sql_str = sql_str[:-2] # remove the last comma!
# add the to the SELECT query
sql_str += " FROM `%s`.`%s`" % (self.database_name, table)
# if there's a JOIN clause attached
if kwargs.has_key('join'):
sql_str += " JOIN %s " % kwargs.get('join')
# if there's a WHERE clause attached
if kwargs.has_key('where'):
sql_str += " WHERE %s " % kwargs.get('where')
# if there's a LIMIT clause attached
if kwargs.has_key('limit'):
sql_str += " LIMIT %s " % kwargs.get('limit')
# Finalise out SQL string
sql_str += ";"
cursor = self.db.cursor()
cursor.execute(sql_str)
if named_tuples:
results = self.convert_to_named_tuples(cursor)
else:
results = cursor.fetchall()
cursor.close()
return results
def delete(self, table, **wheres):
"""
This function will allow us to delete data from a given tables
based on wether or not a WHERE clause is present or not
"""
sql_str = "DELETE FROM `%s`.`%s`" % (self.database_name, table)
if wheres is not None:
first_ | where_clause = True
for where, term in wheres.iteritems():
if first_where_clause:
# This is the first WHERE clause
sql_str += " WHERE `%s`.`%s` %s" % (table, where, term)
first_where_clause = False
else:
# this is the second (additional) WHERE clause so we use AND
sql_str += " AND `%s`.`%s` %s" % (table, where, term)
sql_str += ";"
curs | or = self.db.cursor()
cursor.execute(sql_str)
self.db.commit()
cursor.close()
# Only needs to compile one time so we put it here
float_match = re.compile(r'[-+]?\d*\.?\d+(?:[eE][-+]?\d+)?$').match
def is_number(string):
return bool(float_match(string))
def insert(self, table, **column_names):
"""
Insert function
Example usages:-
db.insert('people', first_name='Ringo',
second_name='Starr', DOB=STR_TO_DATE('01-01-1999', '%d-%m-%Y'))
"""
sql_str = "INSERT INTO `%s`.`%s` " % (self.database_name, table)
if column_names is not None:
columns = "("
values = "("
for arg, value in column_names.iteritems():
columns += "`%s`, " % arg
# Check how we should add this to the columns string
if is_number(value) or arg == 'DOB':
# It's a number or date so we don't add the ''
values += "%s, " % value
else:
# It's a string so we add the ''
values += "5S, " % value
columns = columns[:-2] # Strip off the spare ',' from the end
values = values[:-2] # Same here too
columns += ") VALUES" # Add the connecting keyword and brace
values += ");" # Add the brace and like terminator
sql_str += "%s %s" % (columns, values)
cursor = self.db.cursor()
cursor.execute(sql_str)
self.db.commit()
cursor.close()
def update(self, table, where=None, **column_values):
sql_str = "UPDATE `%s`.`%s` SET " % (self.database_name, table)
if column_values is not None:
for column_name, value in column_names.iteritems():
sql_str += "`%s`=" % column_name
# check how we should add this to the column string
if is_number(value):
# it's a number so we don't add ''
sql_str += "%s, " % value
else:
# it's a date or string so add the ''
sql_str += "'%s', " % value
sql_str = sql_str[:-2] # strip off the last , and space character
if where:
sql_str += " WHERE %s" % where
cusrsor = self.db.cursor()
cursor.execute(sql_str)
self.db.commit()
cursor.close()
|
MechanisM/djangodash2011 | test_project/test_app/urls.py | Python | bsd-3-clause | 1,313 | 0.003808 | from django.conf.urls.defaults import *
from staste.charts.views import PieChart, TimeserieChart, LatestCountAndAverageChart
from staste.middleware import response_time_metrica
from .views import IndexView
from .metrics import gender_age_me | trica
urlpatterns = patterns('',
url(r'^$', IndexView.as_view(), name="index"),
url(r'^pie/$',
PieChart.as_view(metrica=gender_age_metrica,
axis_keyword='gender'),
name='gender_pie'),
url(r'^timeline/$',
TimeserieChart.as_view(metrica=gender_age_metrica),
name='gender_timeline' | ),
url(r'^requests/pie/$',
PieChart.as_view(metrica=response_time_metrica,
axis_keyword='view'),
name='requests_pie'),
url(r'^requests/$',
LatestCountAndAverageChart.as_view(metrica=response_time_metrica,
title='Requests count and average response time'),
name='requests_timeserie')
)
|
bandarji/lekhan | python/reddit/palindrome.py | Python | apache-2.0 | 332 | 0.003012 | # https://www.reddit.com/r/learnpython/co | mments/82ucgu/calling_an_input_inside_a_def_function/
def main():
while True:
word = raw_input('Enter a word: ')
if word == '-1':
bre | ak
not_ = '' if word[:] == word[::-1] else ' not'
print "Word '%s' is%s a palindrome" % (word, not_)
main()
|
data-exp-lab/girder_ythub | plugin_tests/notebook_test.py | Python | bsd-3-clause | 8,231 | 0.000121 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import mock
from tests import base
from girder.models.model_base import ValidationExce | ption
def setUp | Module():
base.enabledPlugins.append('ythub')
base.startServer()
def tearDownModule():
base.stopServer()
class FakeAsyncResult(object):
def __init__(self):
self.task_id = 'fake_id'
def get(self):
return dict(
nodeId='123456',
volumeId='blah_volume',
serviceId='tmp-blah',
urlPath='?token=foo'
)
class FakeAsyncResult2(object):
def __init__(self):
self.task_id = 'fake_id'
def get(self):
return dict(
nodeId='654321',
volumeId='foobar_volume',
serviceId='tmp-foobar',
urlPath='?token=blah'
)
class FakeAsyncResult3(object):
def __init__(self):
self.task_id = 'fake_id'
def get(self):
return dict(
nodeId='162534',
volumeId='foobaz_volume',
serviceId='tmp-foobaz',
urlPath='?token=ragl'
)
class NotebookTestCase(base.TestCase):
def _getUser(self, userDict):
try:
user = self.model('user').createUser(**userDict)
except ValidationException:
resp = self.request(
path='/user/authentication', method='GET',
basicAuth='{login}:{password}'.format(**userDict))
self.assertStatusOk(resp)
user = resp.json['user']
return user
def setUp(self):
global PluginSettings
from girder.plugins.ythub.constants import PluginSettings
self.model('setting').set(
PluginSettings.TMPNB_URL, "http://tmpnb.null")
users = ({
'email': 'root@dev.null',
'login': 'admin',
'firstName': 'Root',
'lastName': 'van Klompf',
'password': 'secret'
}, {
'email': 'joe@dev.null',
'login': 'joeregular',
'firstName': 'Joe',
'lastName': 'Regular',
'password': 'secret'
})
self.admin, self.user = [self._getUser(user) for user in users]
def testNotebooks(self):
# Grab the default user folders
resp = self.request(
path='/folder', method='GET', user=self.user, params={
'parentType': 'user',
'parentId': self.user['_id'],
'sort': 'name',
'sortdir': 1
})
privateFolder = resp.json[0]
publicFolder = resp.json[1]
example_frontend = {
'imageName': 'xarthisius/ythub',
'command': './perform_magic',
'memLimit': '2048m',
'port': 12345,
'user': 'user',
'targetMount': '/blah',
'urlPath': '?token={token}',
'description': 'foo',
'cpuShares': None,
'public': True,
}
# Actually create a new frontend (private)
resp = self.request(
path='/frontend', method='POST', params=example_frontend,
user=self.admin)
self.assertStatus(resp, 200)
frontend = resp.json
with mock.patch('celery.Celery') as celeryMock:
with mock.patch('urllib.request.urlopen') as urllibMock:
instance = celeryMock.return_value
instance.send_task.side_effect = [
FakeAsyncResult(), FakeAsyncResult(),
FakeAsyncResult2(), FakeAsyncResult2(),
FakeAsyncResult3(), FakeAsyncResult3(),
FakeAsyncResult(), FakeAsyncResult()
]
req = urllibMock.return_value
req.fetch.return_value = {}
params = {
'frontendId': str(frontend['_id']),
'folderId': str(privateFolder['_id'])
}
resp = self.request(
'/notebook', method='POST',
user=self.user, params=params)
self.assertStatus(resp, 200)
notebook = resp.json
self.assertEqual(notebook['serviceInfo']['nodeId'], '123456')
self.assertEqual(notebook['serviceInfo']['volumeId'], 'blah_volume')
self.assertEqual(notebook['serviceInfo']['serviceId'], 'tmp-blah')
self.assertEqual(notebook['url'], 'http://tmp-blah.tmpnb.null/?token=foo')
self.assertEqual(notebook['frontendId'], str(frontend['_id']))
self.assertEqual(notebook['folderId'], str(privateFolder['_id']))
self.assertEqual(notebook['creatorId'], str(self.user['_id']))
with mock.patch('celery.Celery') as celeryMock:
with mock.patch('urllib.request.urlopen') as urllibMock:
params = {
'frontendId': str(frontend['_id']),
'folderId': str(privateFolder['_id'])
}
# Return exisiting
resp = self.request(
path='/notebook', method='POST', user=self.user,
params=params)
self.assertStatus(resp, 200)
self.assertEqual(resp.json['_id'], notebook['_id'])
# Create 2nd user's nb
params['folderId'] = str(publicFolder['_id'])
resp = self.request(
path='/notebook', method='POST', user=self.user,
params=params)
self.assertStatus(resp, 200)
other_notebook = resp.json
# Create admin nb
params['folderId'] = str(publicFolder['_id'])
resp = self.request(
path='/notebook', method='POST', user=self.admin,
params=params)
self.assertStatus(resp, 200)
admin_notebook = resp.json
# By default user can list only his/her notebooks
resp = self.request(
path='/notebook', method='GET', user=self.user)
self.assertStatus(resp, 200)
self.assertEqual([_['_id'] for _ in resp.json],
[other_notebook['_id'], notebook['_id']])
# Filter by folder
resp = self.request(
path='/notebook', method='GET', user=self.admin,
params={'folderId': publicFolder['_id']})
self.assertStatus(resp, 200)
self.assertEqual([_['_id'] for _ in resp.json],
[admin_notebook['_id'], other_notebook['_id']])
# Filter by folder and user
resp = self.request(
path='/notebook', method='GET', user=self.admin,
params={'folderId': publicFolder['_id'],
'userId': self.user['_id']})
self.assertStatus(resp, 200)
self.assertEqual(resp.json[0]['_id'], other_notebook['_id'])
# Get notebook by Id
resp = self.request(
path='/notebook/{_id}'.format(**notebook), method='GET')
self.assertStatus(resp, 401)
resp = self.request(
path='/notebook/{_id}'.format(**admin_notebook), method='GET',
user=self.user)
self.assertStatus(resp, 403)
resp = self.request(
path='/notebook/{_id}'.format(**notebook), method='GET',
user=self.admin)
self.assertStatus(resp, 200)
self.assertEqual(resp.json['_id'], notebook['_id'])
with mock.patch('celery.Celery') as celeryMock:
resp = self.request(
path='/notebook/{_id}'.format(**admin_notebook),
method='DELETE', user=self.user)
self.assertStatus(resp, 403)
resp = self.request(
path='/notebook/{_id}'.format(**notebook), method='DELETE',
user=self.admin)
self.assertStatus(resp, 200)
# Check if notebook is gone
resp = self.request(
path='/notebook/{_id}'.format(**notebook), method='GET',
user=self.admin)
self.assertStatus(resp, 400)
def tearDown(self):
self.model('user').remove(self.u |
saurabh6790/omn-app | stock/doctype/stock_reconciliation/stock_reconciliation.py | Python | agpl-3.0 | 11,140 | 0.035458 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
import webnotes.defaults
import json
from webnotes import msgprint, _
from webnotes.utils import cstr, flt, cint
from stock.stock_ledger import update_entries_after
from controllers.stock_controller import StockController
class DocType(StockController):
def setup(self):
self.head_row = ["Item Code", "Warehouse", "Quantity", "Valuation Rate"]
self.entries = []
def validate(self):
self.validate_data()
self.validate_expense_account()
def on_submit(self):
self.insert_stock_ledger_entries()
self.make_gl_entries()
def on_cancel(self):
self.delete_and_repost_sle()
self.make_cancel_gl_entries()
def validate_data(self):
if not self.doc.reconciliation_json:
return
data = json.loads(self.doc.reconciliation_json)
# strip out extra columns (if any)
data = [row[:4] for row in data]
if self.head_row not in data:
msgprint(_("""Wrong Template: Unable to find head row."""),
raise_exception=1)
# remove the help part and save the json
head_row_no = 0
if data.index(self.head_row) != 0:
head_row_no = data.index(self.head_row)
data = data[head_row_no:]
self.doc.reconciliation_json = json.dumps(data)
def _get_msg(row_num, msg):
return _("Row # ") + ("%d: " % (row_num+head_row_no+2)) + _(msg)
self.validation_messages = []
item_warehouse_combinations = []
# validate no of rows
rows = data[1:]
if len(rows) > 100:
msgprint(_("""Sorry! We can only allow upto 100 rows for Stock Reconciliation."""),
raise_exception=True)
for row_num, row in enumerate(rows):
# find duplicates
if [row[0], row[1]] in item_warehouse_combinations:
self.validation_messages.append(_get_msg(row_num, "Duplicate entry"))
else:
item_warehouse_combinations.append([row[0], row[1]])
self.validate_item(row[0], row_num+head_row_no+2)
# note: warehouse will be validated through link validation
# if both not specified
if row[2] == "" and row[3] == "":
self.validation_messages.append(_get_msg(row_num,
"Please specify either Quantity or Valuation Rate or both"))
# do not allow negative quantity
if flt(row[2]) < 0:
self.validation_messages.append(_get_msg(row_num,
"Negative Quantity is not allowed"))
# do not allow negative valuation
if flt(row[3]) < 0:
self.validation_messages.append(_get_msg(row_num,
"Negative Valuation Rate is not allowed"))
# throw all validation messages
if self.validation_messages:
for msg in self.validation_messages:
msgprint(msg)
raise webnotes.ValidationError
def validate_item(self, item_code, row_num):
from stock.utils import validate_end_of_life, validate_is_stock_item, \
validate_cancelled_item
# using try except to catch all validation msgs and display together
try:
item = webnotes.doc("Item", item_code)
# end of life and stock item
validate_end_of_life(item_code, item.end_of_life, verbose=0)
validate_is_stock_item(item_code, item.is_stock_item, verbose=0)
# item should not be serialized
if item.has_serial_no == "Yes":
raise webnotes.ValidationError, (_("Serialized Item: '") + item_code +
_("""' can not be managed using Stock Reconciliation.\
You can add/delete Serial No directly, \
to modify stock of this item."""))
# docstatus should be < 2
validate_cancelled_item(item_code, item.docstatus, verbose=0)
except Exception, e:
self.validation_messages.append(_("Row # ") + ("%d: " % (row_num)) + cstr(e))
def insert_stock_ledger_entries(self):
""" find difference between current and expected entries
and create stock ledger entries based on the difference"""
from stock.utils import get_valuation_method
from stock.stock_ledger import get_previous_sle
row_template = ["item_code", "warehouse", "qty", "valuation_rate"]
if not self.doc.reconciliation_json:
msgprint(_("""Stock Reconciliation file not uploaded"""), raise_exception=1)
data = json.loads(self.doc.reco | nciliation_json)
for row_num, row in enumerate(data[data.index(self.head_row)+1:]):
row = webno | tes._dict(zip(row_template, row))
row["row_num"] = row_num
previous_sle = get_previous_sle({
"item_code": row.item_code,
"warehouse": row.warehouse,
"posting_date": self.doc.posting_date,
"posting_time": self.doc.posting_time
})
# check valuation rate mandatory
if row.qty != "" and not row.valuation_rate and \
flt(previous_sle.get("qty_after_transaction")) <= 0:
webnotes.msgprint(_("As existing qty for item: ") + row.item_code +
_(" at warehouse: ") + row.warehouse +
_(" is less than equals to zero in the system, \
valuation rate is mandatory for this item"), raise_exception=1)
change_in_qty = row.qty != "" and \
(flt(row.qty) - flt(previous_sle.get("qty_after_transaction")))
change_in_rate = row.valuation_rate != "" and \
(flt(row.valuation_rate) - flt(previous_sle.get("valuation_rate")))
if get_valuation_method(row.item_code) == "Moving Average":
self.sle_for_moving_avg(row, previous_sle, change_in_qty, change_in_rate)
else:
self.sle_for_fifo(row, previous_sle, change_in_qty, change_in_rate)
def sle_for_moving_avg(self, row, previous_sle, change_in_qty, change_in_rate):
"""Insert Stock Ledger Entries for Moving Average valuation"""
def _get_incoming_rate(qty, valuation_rate, previous_qty, previous_valuation_rate):
if previous_valuation_rate == 0:
return flt(valuation_rate)
else:
if valuation_rate == "":
valuation_rate = previous_valuation_rate
return (qty * valuation_rate - previous_qty * previous_valuation_rate) \
/ flt(qty - previous_qty)
if change_in_qty:
# if change in qty, irrespective of change in rate
incoming_rate = _get_incoming_rate(flt(row.qty), flt(row.valuation_rate),
flt(previous_sle.get("qty_after_transaction")),
flt(previous_sle.get("valuation_rate")))
row["voucher_detail_no"] = "Row: " + cstr(row.row_num) + "/Actual Entry"
self.insert_entries({"actual_qty": change_in_qty, "incoming_rate": incoming_rate}, row)
elif change_in_rate and flt(previous_sle.get("qty_after_transaction")) > 0:
# if no change in qty, but change in rate
# and positive actual stock before this reconciliation
incoming_rate = _get_incoming_rate(
flt(previous_sle.get("qty_after_transaction"))+1, flt(row.valuation_rate),
flt(previous_sle.get("qty_after_transaction")),
flt(previous_sle.get("valuation_rate")))
# +1 entry
row["voucher_detail_no"] = "Row: " + cstr(row.row_num) + "/Valuation Adjustment +1"
self.insert_entries({"actual_qty": 1, "incoming_rate": incoming_rate}, row)
# -1 entry
row["voucher_detail_no"] = "Row: " + cstr(row.row_num) + "/Valuation Adjustment -1"
self.insert_entries({"actual_qty": -1}, row)
def sle_for_fifo(self, row, previous_sle, change_in_qty, change_in_rate):
"""Insert Stock Ledger Entries for FIFO valuation"""
previous_stock_queue = json.loads(previous_sle.get("stock_queue") or "[]")
previous_stock_qty = sum((batch[0] for batch in previous_stock_queue))
previous_stock_value = sum((batch[0] * batch[1] for batch in \
previous_stock_queue))
def _insert_entries():
if previous_stock_queue != [[row.qty, row.valuation_rate]]:
# make entry as per attachment
if row.qty:
row["voucher_detail_no"] = "Row: " + cstr(row.row_num) + "/Actual Entry"
self.insert_entries({"actual_qty": row.qty,
"incoming_rate": flt(row.valuation_rate)}, row)
# Make reverse entry
if previous_stock_qty:
row["voucher_detail_no"] = "Row: " + cstr(row.row_num) + "/Reverse Entry"
self.insert_entries({"actual_qty": -1 * previous_stock_qty,
"incoming_rate": previous_stock_qty < 0 and
flt(row.valuation_rate) or 0}, row)
if change_in_qty:
if row.valuation_rate == "":
# dont want change in valu |
severb/flowy | flowy/swf/worker.py | Python | mit | 14,853 | 0.000269 | import os
import socket
import venusian
from botocore.exceptions import ClientError
from flowy.swf.client import SWFClient, IDENTITY_SIZE
from flowy.swf.decision import SWFActivityDecision
from flowy.swf.decision import SWFWorkflowDecision
from flowy.swf.history import SWFExecutionHistory
from flowy.utils import logger
from flowy.utils import setup_default_logger
from flowy.worker import Worker
__all__ = ['SWFWorkflowWorker', 'SWFActivityWorker']
class SWFWorker(Worker):
def __init__(self):
super(SWFWorker, self).__init__()
self.remote_reg_callbacks = []
def __call__(self, name, version, input_data, decision, *extra_args):
return super(SWFWorker, self).__call__(
(str(name), str(version)), input_data, decision, *extra_args)
def register_remote(self, swf_client, domain):
"""Register or check compatibility of all configs in Amazon SWF."""
for remote_reg_callback in self.remote_reg_callbacks:
# Raises if there are registration problems
remote_reg_callback(swf_client, domain)
def register(self, config, func, version, name=None):
super(SWFWorker, self).register(config, func, (name, version))
def add_remote_reg_callback(self, callback):
self.remote_reg_callbac | ks.append(callback)
def make_scanner(self):
return venusian.Scanner(
register_task=self.register_task,
add_remote_reg_callback=self.add_remote_reg_callback)
class SWFWorkflowWorker(SWFWorker):
categories = ['swf_workflow']
# Be explicit about w | hat arguments are expected
def __call__(self, name, version, input_data, decision, execution_history):
super(SWFWorkflowWorker, self).__call__(
name, version, input_data, decision, # needed for worker logic
decision, execution_history) # extra_args passed to proxies
def break_loop(self):
"""Used to exit the loop in tests. Return True to break."""
return False
def run_forever(self, domain, task_list,
swf_client=None,
setup_log=True,
register_remote=True,
identity=None):
"""Starts an endless single threaded/single process worker loop.
The worker polls endlessly for new decisions from the specified domain
and task list and runs them.
If reg_remote is set, all registered workflow are registered remotely.
An identity can be set to track this worker in the SWF console,
otherwise a default identity is generated from this machine domain and
process pid.
If setup_log is set, a default configuration for the logger is loaded.
A custom SWF client can be passed in swf_client, otherwise a default
client is used.
"""
if setup_log:
setup_default_logger()
identity = default_identity() if identity is None else identity
swf_client = SWFClient() if swf_client is None else swf_client
if register_remote:
self.register_remote(swf_client, domain)
try:
while 1:
if self.break_loop():
break
name, version, input_data, exec_history, decision = poll_decision(
swf_client, domain, task_list, identity)
self(name, version, input_data, decision, exec_history)
except KeyboardInterrupt:
pass
class SWFActivityWorker(SWFWorker):
categories = ['swf_activity']
# Be explicit about what arguments are expected
def __call__(self, name, version, input_data, decision):
# No extra arguments are used
super(SWFActivityWorker, self).__call__(
name, version, input_data, decision, # needed for worker logic
decision.heartbeat) # extra_args
def break_loop(self):
"""Used to exit the loop in tests. Return True to break."""
return False
def run_forever(self, domain, task_list,
swf_client=None,
setup_log=True,
register_remote=True,
identity=None):
"""Same as SWFWorkflowWorker.run_forever but for activities."""
if setup_log:
setup_default_logger()
identity = default_identity() if identity is None else identity
swf_client = SWFClient() if swf_client is None else swf_client
if register_remote:
self.register_remote(swf_client, domain)
try:
while 1:
if self.break_loop():
break
swf_response = {}
while not swf_response.get('taskToken'):
try:
swf_response = swf_client.poll_for_activity_task(
domain, task_list, identity=identity)
except ClientError:
# add a delay before retrying?
logger.exception('Error while polling for activities:')
at = swf_response['activityType']
decision = SWFActivityDecision(swf_client, swf_response['taskToken'])
self(at['name'], at['version'], swf_response['input'], decision)
except KeyboardInterrupt:
pass
def default_identity():
"""Generate a local identity string for this process."""
identity = "%s-%s" % (socket.getfqdn(), os.getpid())
return identity[-IDENTITY_SIZE:] # keep the most important part
def poll_decision(swf_client, domain, task_list, identity=None):
"""Poll a decision and create a SWFWorkflowContext structure.
:type swf_client: :class:`SWFClient`
:param swf_client: an implementation or duck typing of :class:`SWFClient`
:param domain: the domain containing the task list to poll
:param task_list: the task list from which to poll decision
:param identity: an identity str of the request maker
:rtype: tuple
:returns: a tuple consisting of (name, version, input_data,
:class:'SWFExecutionHistory', :class:`SWFWorkflowDecision`)
"""
first_page = poll_first_page(swf_client, domain, task_list, identity)
token = first_page['taskToken']
all_events = events(swf_client, domain, task_list, first_page, identity)
# Sometimes the first event is on the second page,
# and the first page is empty
first_event = next(all_events)
assert first_event['eventType'] == 'WorkflowExecutionStarted'
wesea = 'workflowExecutionStartedEventAttributes'
assert first_event[wesea]['taskList']['name'] == task_list
task_duration = first_event[wesea]['taskStartToCloseTimeout']
workflow_duration = first_event[wesea]['executionStartToCloseTimeout']
tags = first_event[wesea].get('tagList', None)
child_policy = first_event[wesea]['childPolicy']
name = first_event[wesea]['workflowType']['name']
version = first_event[wesea]['workflowType']['version']
input_data = first_event[wesea]['input']
try:
running, timedout, results, errors, order = load_events(all_events)
except _PaginationError:
# There's nothing better to do than to retry
return poll_decision(swf_client, domain, task_list, identity)
execution_history = SWFExecutionHistory(running, timedout, results, errors, order)
decision = SWFWorkflowDecision(swf_client, token, name, version, task_list,
task_duration, workflow_duration, tags,
child_policy)
return name, version, input_data, execution_history, decision
def poll_first_page(swf_client, domain, task_list, identity=None):
"""Return the response from loading the first page. In case of errors,
empty responses or whatnot retry until a valid response.
:type swf_client: :class:`SWFClient`
:param swf_client: an implementation or duck typing of :class:`SWFClient`
:param domain: the domain containing the task list to poll
:param task_list: the task list from which to poll for events
:param identity: an identity str of the request maker
:rtyp |
roramirez/qpanel | qpanel/asterisk.py | Python | mit | 4,629 | 0 | # -*- coding: utf-8 -*-
#
# Class Qpanel for Asterisk
#
# Copyright (C) 2015-2020 Rodrigo Ramírez Norambuena <a@rodrigoramirez.com>
#
from __future__ import absolute_import
from Asterisk.Manager import Manager, ActionFailed, PermissionDenied
class ConnectionErrorAMI(Exception):
'''
This exception is raised when is not possible or is not connected to
AMI for a requested action.
'''
_error = 'Not Connected'
pass
class AsteriskAMI:
def __init__(self, host, port, user, password):
'''
Initialise a class for Asterisk
'''
self.host = host
self.port = int(port)
self.password = password
self.user = user
self.is_connected = False
self.connection = self.connect_ami()
def connect_ami(self):
try:
manager = Manager((self.host, self.port),
self.user, self.password)
return manager
except BaseException:
return None
def queueStatus(self):
return self.getQueues()
def getQueues(self):
if self.connection is None:
raise ConnectionErrorAMI(
"Failed to connect to server at '{}:{}' for user {}\n"
'Please check that Asterisk running and accepting AMI '
'connections.'.format(self.host, self.port, self.user))
cmd = self.connection.QueueStatus()
return cmd
def spy(self, channel, where_listen, option=None):
'''Generate a Originate event by Manager to used Spy Application
Parameters
----------
channel: str
| channel to create Originate action tu use ChanSpy
where_listen: str
channel where listen the spy action.
option: str
other option to add for execute distinct options.
whisper: w
barge: B
other string to add ChanSpy Command
The option is concatenate to ',q'
Returns
-------
| originate result command : Dictionary
if case the fail return return {'Response': 'failed',
'Message': str(msg)}
'''
options = ',q'
if option:
options = options + option
try:
# create a originate call for Spy a exten
return self.connection.Originate(where_listen,
application='ChanSpy',
data=channel + options,
async_param='yes')
except ActionFailed as msg:
return {'Response': 'failed', 'Message': str(msg)}
except PermissionDenied as msg:
return {'Response': 'failed', 'Message': 'Permission Denied'}
def hangup(self, channel):
'''Hangup Channel
Parameters
----------
channel: str
channel to hangup
Returns
-------
hangup result action : Dictionary
if case the fail return return {'Response': 'failed',
'Message': str(msg)}
'''
try:
# hangup channels
return self.connection.Hangup(channel)
except ActionFailed as msg:
return {'Response': 'failed', 'Message': str(msg)}
except PermissionDenied as msg:
return {'Response': 'failed', 'Message': 'Permission Denied'}
def reset_stats(self, queue):
'Reset stats for <queue>.'
id = self.connection._write_action('QueueReset', {'Queue': queue})
return self.connection._translate_response(
self.connection.read_response(id))
def isConnected(self):
if not self.connection:
return False
return True
def remove_from_queue(self, agent, queue):
'''Remove a <agent> from a <queue>
Parameters
----------
agent: str
Agent or Inteface to remove
queue: str
name of queue from remove agent
Returns
-------
originate result command : Dictionary
if case the fail return return {'Response': 'failed',
'Message': str(msg)}
'''
try:
return self.connection.QueueRemove(queue, agent)
except ActionFailed as msg:
return {'Response': 'failed', 'Message': str(msg)}
except PermissionDenied as msg:
return {'Response': 'failed', 'Message': 'Permission Denied'}
|
TomNeyland/err | errbot/backends/tox.py | Python | gpl-3.0 | 16,422 | 0.002131 | import codecs
import logging
import sys
from time import sleep
import os
from os.path import exists, join
import io
from errbot.backends import base
from errbot.errBot import ErrBot
from errbot.backends.base import Message, Identifier, Presence, Stream, MUCRoom
from errbot.backends.base import ONLINE, OFFLINE, AWAY, DND
from errbot.backends.base import build_message
from errbot.backends.base import STREAM_TRANSFER_IN_PROGRESS
from threading import Thread
log = logging.getLogger(__name__)
try:
from pytox import Tox, OperationFailedError
except ImportError:
log.exception("Could not start the tox")
log.fatal("""
If you intend to use the Tox backend please install tox:
pip install PyTox
""")
sys.exit(-1)
# Backend notes
#
# TOX mapping to Err Identifier :
# TOX client_id as hash string -> node
TOX_MAX_MESS_LENGTH = 1368
NOT_ADMIN = "You are not recognized as an administrator of this bot"
TOX_TO_ERR_STATUS = {
Tox.USERSTATUS_NONE: ONLINE,
Tox.USERSTATUS_AWAY: AWAY,
Tox.USERSTATUS_BUSY: DND,
}
TOX_GROUP_TO_ERR_STATUS = {
Tox.CHAT_CHANGE_PEER_ADD: ONLINE,
Tox.CHAT_CHANGE_PEER_DEL: AWAY,
Tox.CHAT_CHANGE_PEER_NAME: None,
}
class ToxStreamer(io.BufferedRWPair):
def __init__(self):
r, w = os.pipe()
self.r, self.w = io.open(r, 'rb'), io.open(w, 'wb')
super(ToxStreamer, self).__init__(self.r, self.w)
class ToxConnection(Tox):
def __init__(self, backend, name):
super(ToxConnection, self).__init__()
self.backend = backend
self.incoming_streams = {}
self.outgoing_streams = {}
state_file = join(backend.bot_config.BOT_DATA_DIR, 'tox.state')
if exists(state_file):
self.load_from_file(state_file)
self.set_name(name)
self.rooms = set() # keep track of joined room
log.info('TOX: ID %s' % self.get_address())
def connect(self, bootstrap_servers):
log.info('TOX: connecting...')
self.bootstrap_from_address(*bootstrap_servers)
def friend_to_idd(self, friend_number):
return Identifier(node=self.get_client_id(friend_number))
def idd_to_friend(self, identifier, autoinvite=True, autoinvite_message='I am just a bot.'):
"""
Returns the Tox friend number from the roster.
:exception ValueError if the identifier is not a Tox one.
:param identifier: an err Identifier
:param autoinvite: set to True if you want to invite this identifier if it is not in your roster.
:return: the tox friend number from the roster, None if it could not be found.
"""
if len(identifier.node) > 76 or len(identifier.node) < 64:
raise ValueError("%s is not a valid Tox Identifier.")
try:
self.get_friend_id(identifier.node)
return self.get_friend_id(identifier.node)
except OperationFailedError:
if autoinvite:
return self.add_friend(identifier.node, autoinvite_message)
return None
def on_friend_request(self, friend_pk, message):
log.info('TOX: Friend request from %s: %s' % (friend_pk, message))
self.add_friend_norequest(friend_pk)
def on_group_invite(self, friend_number, type_, data):
data_hex = codecs.encode(data, 'hex_codec')
log.info('TOX: Group invite [type %s] from %s : %s' % (type_, self.get_name(friend_number), data_hex))
if type_ == 1:
super().send_message(friend_number, "Err tox backend doesn't support audio groupchat yet.")
return
if not self.backend.is_admin(friend_number):
super().send_message(friend_number, NOT_ADMIN)
return
try:
groupnumber = self.join_groupchat(friend_number, data)
if groupnumber >= 0:
self.rooms.add(TOXMUCRoom(self, groupnumber))
else:
log.error("Error joining room %s", data_hex)
except OperationFailedError:
log.exception("Error joining room %s", data_hex)
def on_friend_message(self, friend_number, message):
msg = Message(message)
msg.frm = self.friend_to_idd(friend_number)
log.debug('TOX: %s: %s' % (msg.frm, message))
msg.to = self.backend.jid
self.backend.callback_message(msg)
def on_group_namelist_change(self, group_number, friend_group_number, change):
log.debug("TOX: user %s changed state in group %s" % (friend_group_number, group_number))
newstatus = TOX_GROUP_TO_ERR_STATUS[change]
if newstatus:
chatroom = Identifier(node=str(group_number), resource=str(friend_group_number))
pres = Presence(nick=self.group_peername(group_number, friend_group_number),
status=newstatus,
chatroom=chatroom)
self.backend.callback_presence(pres)
def on_user_status(self, friend_number, kind):
log.debug("TOX: user %s changed state", friend_number)
pres = Presence(identifier=self.friend_to_idd(friend_number),
status=TOX_TO_ERR_STATUS[kind])
self.backend.callback_presence(pres)
def on_status_message(self, friend_number, message):
pres = Presence(identifier=self.friend_to_idd(friend_number),
message=message)
self.backend.callback_presence(pres)
def on_connection_status(self, friend_number, status):
log.debug("TOX: user %s changed connection status", friend_number)
pres = Presence(identifier=self.friend_to_idd(friend_number),
status=ONLINE if status else OFFLINE)
self.backend.callback_presence(pres)
def on_group_message(self, group_number, friend_group_number, message):
log.debug('TOX: Group-%i User-%i: %s' % (group_number, friend_group_number, message))
msg = Message(message, type_='groupchat')
msg.frm = Identifier(node=str(group_number), resource=str(friend_group_number))
msg.to = self.backend.jid
log.debug('TOX: callback with type = %s' % msg.type)
self.backend.callback_message(msg)
# File transfers
def on_file_send_request(self, friend_number, file_num | ber, file_size, filename):
log.debug("TOX: incoming file transfer %s : %s", friend_number, filename)
# make a pipe on which we will be able to write from tox
pipe = ToxStreamer()
# make the original stream with all the info
stream = Stream(self.friend_to_idd(friend_number), pipe, filename, file_size)
# store it for tracking purposes
self.incoming_streams[(friend_number, | file_number)] = (pipe, stream)
# callback err so it will duplicate the stream and send it to all the plugins
self.backend.callback_stream(stream)
# always say ok, and kill it later if finally we don't want it
self.file_send_control(friend_number, 1, file_number, Tox.FILECONTROL_ACCEPT)
def on_file_data(self, friend_number, file_number, data):
log.debug("TOX: file data received : %s, size : %d", friend_number, len(data))
pipe, _ = self.incoming_streams[(friend_number, file_number)]
pipe.write(data)
def on_file_control(self, friend_number, receive_send, file_number, control_type, data):
log.debug("TOX: file control received : %s, type : %d", friend_number, control_type)
if receive_send == 0:
pipe, stream = self.incoming_streams[(friend_number, file_number)]
if control_type == Tox.FILECONTROL_KILL:
stream.error("Other party killed the transfer")
pipe.w.close()
elif control_type == Tox.FILECONTROL_FINISHED:
log.debug("Other party signal the end of transfer on %s:%s" % (friend_number, file_number))
pipe.flush()
pipe.w.close()
log.debug("Receive file control %s", control_type)
else:
stream = self.outgoing_streams[(friend_number, file_number)]
if control_type == Tox.FILECONTROL_ACCEPT:
log.debug("TOX: file accepted by rem |
hanleilei/note | python/vir_manager/utils/mail_utils.py | Python | cc0-1.0 | 2,926 | 0.000733 | import smtplib
from email.mime.text import MIMEText
from django.core.mail import EmailMultiAlternatives
from django.conf import settings
def send_message(message):
"""
* desc 快捷发送邮件
* input 要发送的邮件信息
* output None
"""
mail_handler = SendMail()
mail_handler.send_mail(settings.REPORT_USER, 'Error info', message)
class SendMail(object):
"""docstring for SendMail"""
def __init__(self):
self.mail_host = settings.MAIL_HOST
self.mail_host_user = settings.MAIL_HOST_USER
self.mail_host_pwd = settings.MAIL_HOST_PWD
self.smtp = smtplib.SMTP()
self.smtp_login()
def smtp_login(self):
# login the host
self.smtp.connect(self.mail_host)
self.smtp.login(self.mail_host_user, self.mail_host_pwd)
def send_file_mail(self, receiver_list, subject, file_info, file_name):
# 发送附件的方法
part = MIMEApplication(file_info)
part.add_header('Content-Disposition',
'attachment', filename=file_name)
msg.attach(part)
sender = self.mail_host_user
msg['Subject'] = subject
msg['From'] = sender
msg['To'] = ";".join(receiver_list)
self.smtp.sendmail(sender, receiver_list, msg.as_string())
def send_mail(self, receiver_list, subject, context, mail_type="plain"):
"""
* desc 发送邮件的接口
* input receiver_list 收件人的地址列表 subject 主题 context 发送的内容 mail_type 邮件的格式 目前测试成功 plain 和 html
* output 发送成功与否
"""
sender = self.mail_host_user
msg = MIMEText(context, mail_type)
msg['Subject'] = subject
msg['From'] = sender
msg['To'] = ";".join(receiver_list)
self.smtp.sendmail(sender, receiver_list, msg.as_string())
def close(self):
# 关闭建立的链接
self.smtp.close()
class MailHandler(object):
def __init__(self):
pass
def send_mail_message(self, to_user, msg, error=0):
"""
* desc 发送错误邮件
* input 要发送的人 发送的消息 错误还是告警
* output 0 发送成功 1 发送失败
"""
subject = settings.MSUBMAIL
if error:
text_content = 'Virtual Manager Error'
else:
text_content = | 'Virtual Manager Warning'
from_email = settings.FMAIL
try:
to = [str(user) + "@huj | iang.com" for user in to_user.split(',')]
print(to)
content_msg = EmailMultiAlternatives(
subject, text_content, from_email, to)
html_content = u'<b>' + msg + '</b>'
content_msg.attach_alternative(html_content, 'text/html')
content_msg.send()
return 0
except:
return 1
|
dubzzz/py-mymoney | www/scripts/generate_db.py | Python | mit | 2,908 | 0.004814 | #!/usr/bin/python
# This script has to generate the sqlite database
#
# Requirements (import from):
# - sqlite3
#
# Syntax:
# ./generate_db.py
import sqlite3
import hashlib
import getpass
import sys
| from os import path, urandom
SCRIPT_PATH = path.dirname(__file__)
DEFAULT_DB = path.join(SCRIPT_PATH, "../mymoney.db")
def generate_tables(db=DEFAULT_DB):
conn = sqlite3.connect(db)
with conn:
c = conn.cursor()
# Drop tables if they exist
#c.execute('''DROP TABLE IF EXISTS node''')
#c.execute('''DROP TABLE IF EXISTS expense''')
#c.execute('''DRO | P TABLE IF EXISTS node_expense''')
#c.execute('''DROP TABLE IF EXISTS users''')
# Create tables
c.execute('''CREATE TABLE IF NOT EXISTS node (
id INTEGER PRIMARY KEY,
parent_id INTEGER,
title TEXT NOT NULL,
FOREIGN KEY(parent_id) REFERENCES node(id))''')
c.execute('''CREATE TABLE IF NOT EXISTS expense (
id INTEGER PRIMARY KEY,
title TEXT NOT NULL,
date INTEGER NOT NULL,
price REAL NOT NULL)''')
c.execute('''CREATE TABLE IF NOT EXISTS node_expense (
expense_id INTEGER NOT NULL,
node_id INTEGER NOT NULL,
visible BOOLEAN NOT NULL,
PRIMARY KEY(expense_id, node_id),
FOREIGN KEY(expense_id) REFERENCES expense(id),
FOREIGN KEY(node_id) REFERENCES node(id))''')
c.execute('''CREATE TABLE IF NOT EXISTS users (
id INTEGER PRIMARY KEY,
username TEXT NOT NULL,
password TEXT NOT NULL,
salt TEXT NOT NULL)''')
# Commit the changes
conn.commit()
if __name__ == '__main__':
generate_tables(DEFAULT_DB)
if sys.version_info < (3, 0):
username = raw_input("Username: ")
else:
username = input("Username: ")
ask_password = True
while ask_password:
ask_password = False
password = getpass.getpass("Enter your password: ")
cpassword = getpass.getpass("Enter your password (confirmation): ")
if password != cpassword:
ask_password = True
print("Passwords differ")
conn = sqlite3.connect(DEFAULT_DB)
with conn:
c = conn.cursor()
salt = urandom(16).encode('hex')
h = hashlib.sha1()
h.update(salt+password)
hashvalue= h.hexdigest()
c.execute('''DELETE FROM users WHERE username=?''', (username,))
c.execute('''INSERT INTO users (username, password, salt)
VALUES (?, ?, ?)''', (username, hashvalue, salt));
|
goldsborough/euler | 18.py | Python | mit | 1,380 | 0.000725 | #!/usr/bin/env python
# -*- coding: utf-8 -8-
"""
By starting at the top of the triangle below and moving to adjacent numbers
on the row below, t | he maximum total from top to bottom is 23.
3
7 4
2 4 6
8 5 9 3
That is, 3 + 7 + 4 + 9 = 23.
Find the maximum total from top to bottom of the triangle below:
"""
triangle = """
75
95 64
| 17 47 82
18 35 87 10
20 04 82 47 65
19 01 23 75 03 34
88 02 77 73 07 63 67
99 65 04 28 06 16 70 92
41 41 26 56 83 40 80 70 33
41 48 72 33 47 32 37 16 94 29
53 71 44 65 25 43 91 52 97 51 14
70 11 33 28 77 73 17 78 39 68 17 57
91 71 52 38 17 14 91 43 58 50 27 29 48
63 66 04 68 89 53 67 30 73 16 69 87 40 31
04 62 98 27 23 09 70 98 73 93 38 53 60 04 23
"""
def largest_triangle_sum(values, row=0, column=0, sums={}):
if (row, column) in sums:
return sums[row, column]
s = values[row][column]
if row + 1 < len(values):
left = largest_triangle_sum(values, row + 1, column, sums)
right = largest_triangle_sum(values, row + 1, column + 1, sums)
s += max([left, right])
sums[row, column] = s
return s
def main():
values = [[int(j) for j in i.split()] for i in triangle.split('\n') if i]
print(largest_triangle_sum(values))
if __name__ == '__main__':
main()
|
LLNL/spack | var/spack/repos/builtin/packages/py-pulp/package.py | Python | lgpl-2.1 | 768 | 0.001302 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPulp(PythonPackage):
"""PuLP is an LP modeler written in Python. PuLP can generate MPS or LP
files and call GLPK, | COIN-OR CLP/CBC, CPLEX, GUROBI, MOSEK, XPRESS, CHOCO,
MIPCL, SCIP to solve linear problems."""
ho | mepage = "https://github.com/coin-or/pulp"
pypi = "PuLP/PuLP-2.6.0.tar.gz"
maintainers = ['marcusboden']
version('2.6.0', '4b4f7e1e954453e1b233720be23aea2f10ff068a835ac10c090a93d8e2eb2e8d')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
|
wpjesus/codematch | ietf/message/resources.py | Python | bsd-3-clause | 1,927 | 0.008822 | # Autogenerated by the mkresources management command 2014-11-13 23:53
from tastypie.resources import ModelResource
from tastypie.fields import ToOneField, ToManyField
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from ietf import api
from ietf.message.models import * # pyflakes:ignore
from ietf.person.resources import PersonResource
from ietf.group.resources import GroupResource
from ietf.doc.resources import DocumentResource
class MessageResource(ModelResource):
by = ToOneField(PersonResource, 'by')
related_groups = ToManyField(GroupResource, 'related_groups', null=True)
related_docs = ToManyField(DocumentResource, 'related_docs', null=True)
class Meta:
queryset = Message.objects.all()
serializer = api.Serializer()
#resource_name = 'message'
filtering = {
"id": ALL,
"time": ALL,
"subject": ALL,
"frm": ALL,
"to": ALL,
"cc": ALL,
"bcc": ALL,
"reply_to": ALL,
"body": ALL,
"content_type": ALL,
"by": ALL_WITH_RELATIONS,
"related_groups": ALL_WITH_RELATIONS,
"related_docs": ALL_WITH_RELATIONS,
}
api.mess | age.register(MessageResource())
from ietf.person.resources import PersonResource
class SendQueueResource(ModelResource):
by = ToOneField(PersonResource, 'by')
message = ToOneField(MessageResource, 'message')
class Meta:
queryset = SendQueue.objects.all()
serializer = api.Seriali | zer()
#resource_name = 'sendqueue'
filtering = {
"id": ALL,
"time": ALL,
"send_at": ALL,
"sent_at": ALL,
"note": ALL,
"by": ALL_WITH_RELATIONS,
"message": ALL_WITH_RELATIONS,
}
api.message.register(SendQueueResource())
|
angelapper/edx-platform | common/djangoapps/third_party_auth/tasks.py | Python | agpl-3.0 | 9,113 | 0.004391 | # -*- coding: utf-8 -*-
"""
Code to manage fetching and storing the metadata of IdPs.
"""
import datetime
import logging
import dateutil.parser
import pytz
import requests
from celery.task import task
from lxml import etree
from onelogin.saml2.utils import OneLogin_Saml2_Utils
from requests import exceptions
from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig, SAMLProviderData
log = logging.getLogger(__name__)
SAML_XML_NS = 'urn:oasis:names:tc:SAML:2.0:metadata' # The SAML Metadata XML namespace
class MetadataParseError(Exception):
""" An error occurred while parsing the SAML metadata from an IdP """
pass
@task(name='third_party_auth.fetch_saml_metadata')
def fetch_saml_metadata():
"""
Fetch and store/update the metadata of all IdPs
This task should be run on a daily basis.
It's OK to run this whether or not SAML is enabled.
Return value:
tuple(num_skipped, num_attempted, num_updated, num_failed, failure_messages)
num_total: Total number of providers found in the database
num_skipped: Number of providers skipped for various reasons (see L52)
num_attempted: Number of providers whose metadata was fetched
num_updated: Number of providers that are either new or whose metadata has changed
num_failed: Number of providers that could not be updated
failure_messages: List of error messages for the providers that could not be updated
"""
# First make a list of all the metadata XML URLs:
saml_providers = SAMLProviderConfig.key_values('idp_slug', flat=True)
num_total = len(saml_providers)
num_skipped = 0
url_map = {}
for idp_slug in saml_providers:
config = SAMLProviderConfig.current(idp_slug)
# Skip SAML provider configurations which do not qualify for fetching
if any([
not config.enabled,
not config.automatic_refresh_enabled,
not SAMLConfiguration.is_enabled(config.site)
]):
num_skipped += 1
continue
url = config.metadata_source
if url not in url_map:
url_map[url] = []
if config.entity_id not in url_map[url]:
url_map[url].append(config.entity_id)
# Now attempt to fetch the metadata for the remaining SAML providers:
num_attempted = len(url_map)
num_updated = 0
failure_messages = [] # We return the length of this array for num_failed
for url, entity_ids in url_map.items():
try:
log.info("Fetching %s", url)
if not url.lower().startswith('https'):
log.warning("This SAML metadata URL is not secure! It should use HTTPS. (%s)", url)
response = requests.get(url, verify=True) # May raise HTTPError or SSLError or ConnectionError
response.raise_for_status() # May raise an HTTPError
try:
parser = etree.XMLParser(remove_comments=True)
xml = etree.fromstring(response.content, parser)
except etree.XMLSyntaxError:
raise
# TODO: Can use OneLogin_Saml2_Utils to validate signed XML if anyone is using that
for entity_id in entity_ids:
log.info(u"Processing IdP with entityID %s", entity_id)
public_key, sso_url, expires_at = _parse_metadata_xml(xml, entity_id)
changed = _update_data(entity_id, public_key, sso_url, expires_at)
if changed:
log.info(u"→ Created new record for SAMLProviderData")
num_updated += 1
else:
log.info(u"→ Updated existing SAMLProviderData. Nothing has changed.")
except (exceptions.SSLError, exceptions.HTTPError, exceptions.RequestException, MetadataParseError) as error:
# Catch and process exception in case of errors during fetching and processing saml metadata.
# Here is a description of each exception.
# SSLError is raised in case of errors caused by SSL (e.g. SSL cer verification failure etc.)
# HTTPError is raised in case of unexpected status code (e.g. 500 error etc.)
# RequestException is the base exception for any request related error that "requests" lib raises.
# MetadataParseError is raised if there is error in the fetched meta data (e.g. missing @entityID etc.)
log.exception(error.message)
failure_messages.append(
"{error_type}: {error_message}\nMetadata Source: {url}\nEntity IDs: \n{entity_ids}.".format(
error_type=type(error).__name__,
error_message=error.message,
url=url,
entity_ids="\n".join(
["\t{}: {}".format(count, item) for count, item in enumerate(entity_ids, start=1)],
)
)
)
except etree.XMLSyntaxError as error:
log.exception(error.message)
failure_messages.append(
"XMLSyntaxError: {error_message}\nMetadata Source: {url}\nEntity IDs: \n{entity_ids}.".format(
error_message=str(error.error_log),
url=url,
entity_ids="\n".join(
["\t{}: {}".format(count, item) for count, item in enumerate(entity_ids, start=1)],
)
)
)
# Return counts for total, skipped, attempted, updated, and failed, along with any failure messages
return num_total, num_skipped, num_attempted, num_updated, len(failure_messages), failure_messages
def _parse_metadata_xml(xml, entity_id):
"""
Given an XML document containing SAML 2.0 metadata, parse it and return a tuple of
(public_key, sso_url, expires_at) for the specified entityID.
Raises MetadataParseError if anything is wrong.
"""
if xml.tag == etree.QName(SAML_XML_NS, 'EntityDescriptor'):
entity_desc = xml
else:
if xml.tag != etree.QName(SAML_XML_NS, 'EntitiesDescriptor'):
raise MetadataParseError("Expected root element to be <EntitiesDescriptor>, not {}".format(xml.tag))
entity_desc = xml.find(
".//{}[@entityID='{}']".format(etree.QName(SAML_XML_NS, 'EntityDescriptor'), entity_id)
)
if not entity_desc:
raise MetadataParseError("Can't find EntityDescriptor for entityID {}".format(entity_id))
expires_at = None
if "validUntil" in xml.attrib:
expires_at = dateutil.parser.parse(xml.attrib["validUntil"])
if "cacheDuration" in xml.attrib:
cache_expires = OneLogin_Saml2_Utils.parse_duration(xml.attrib["cacheDuration"])
cache_expires = datetime.datetime.fromtimestamp(cache_expires, tz=pytz.utc)
if expires_at is None or cache_expires < expires_at:
expires_at = cache_expires
sso_desc = entity_desc.find(etree.QName(SAML_XML_NS, "IDPSSODescriptor"))
if not sso_desc:
raise MetadataParseError("IDPSSODescriptor missing")
if 'urn:oasis:names:tc:SAML:2.0:protocol' not in sso_desc.get("protocolSupportEnumeration"):
raise MetadataParseError("This IdP does not support SAML 2.0")
# Now we just need to get the public_key and sso_url
public_key = sso_desc.findtext("./{}//{}".format(
etree.QName(SAML_XML_NS, "KeyDescriptor"), "{http://www.w3.org/2000/09/xmldsig#}X509Certificate"
))
if not public_key:
raise MetadataParseError("Public Key missing. Expected an <X509Certificate>")
public_key = public_key.replace(" ", "")
binding_elements = sso_desc.iterfind("./{}".format(etree.QName(SAML_XML_NS, "SingleSignOnService")))
sso_bindings = {element.get('Binding'): element.get('Location') for element in binding_elements}
try:
# The only binding supported by python-saml and python-social-auth is HTTP-Redirect:
sso_url = sso_bindings['ur | n:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect']
except KeyError:
raise MetadataParseError("Unable to find SSO URL with HTTP-Redirect binding.")
return public_key, | ss |
jzbontar/mc-cnn | samples/load_bin.py | Python | bsd-2-clause | 249 | 0 | import numpy as np
left = np.memmap('../left.bin', dtype=np.float32, shape=(1, 70, 370, 1226))
right = np.memm | ap('../right.bin', dtype=np.float32, shape=(1, 70, 370, 1226))
disp = np.memmap('../disp.bin', dtype=np.float32, shape=(1, 1, 37 | 0, 1226))
|
tchellomello/home-assistant | tests/components/met/test_weather.py | Python | apache-2.0 | 2,271 | 0.000881 | """Test Met weather entity."""
from homeassistant.components.met import DOMAIN
from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN
async def test_tracking_home(hass, mock_weather):
"""Test we track home."""
await hass.config_entries.flow.async_init("met", context={"source": "onboarding"})
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("weather")) == 1
assert len(mock_weather.mock_calls) == 4
# Test the hourly sensor is disab | led by default
registry = await hass.helpers.entity_registry.async_get_registry()
state = hass.states.get("weather.test_home_hourly")
assert state is None
entry = registry.async_get("weather.test_home_hourly")
assert entry
assert entry.disabled
assert entry.disabled_by == "integration"
# Test we track config
await hass.config.async_update(latitude=10, longitude=20)
await hass.async_block_till_done()
assert len(mock_w | eather.mock_calls) == 8
entry = hass.config_entries.async_entries()[0]
await hass.config_entries.async_remove(entry.entry_id)
assert len(hass.states.async_entity_ids("weather")) == 0
async def test_not_tracking_home(hass, mock_weather):
"""Test when we not track home."""
# Pre-create registry entry for disabled by default hourly weather
registry = await hass.helpers.entity_registry.async_get_registry()
registry.async_get_or_create(
WEATHER_DOMAIN,
DOMAIN,
"10-20-hourly",
suggested_object_id="somewhere_hourly",
disabled_by=None,
)
await hass.config_entries.flow.async_init(
"met",
context={"source": "user"},
data={"name": "Somewhere", "latitude": 10, "longitude": 20, "elevation": 0},
)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("weather")) == 2
assert len(mock_weather.mock_calls) == 4
# Test we do not track config
await hass.config.async_update(latitude=10, longitude=20)
await hass.async_block_till_done()
assert len(mock_weather.mock_calls) == 4
entry = hass.config_entries.async_entries()[0]
await hass.config_entries.async_remove(entry.entry_id)
assert len(hass.states.async_entity_ids("weather")) == 0
|
streamlio/heron | heron/executor/tests/python/heron_executor_unittest.py | Python | apache-2.0 | 14,316 | 0.008103 | # Copyright 2016 Twitter. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''heron executor unittest'''
import os
import socket
import unittest2 as unittest
from heron.executor.src.python.heron_executor import ProcessInfo
from heron.executor.src.python.heron_executor import HeronExecutor
from heron.proto.packing_plan_pb2 import PackingPlan
# pylint: disable=unused-argument
# pylint: disable=missing-docstring
def get_test_heron_internal_yaml():
"""Get the path to test_heron_internal.yaml
For example, __file__ would be
/tmp/_bazel_heron/randgen_dir/heron/heron/executor/tests/python/heron_executor_unittest.py
"""
heron_dir = '/'.join(__file__.split('/')[:-5])
yaml_path = os.path.join(heron_dir, 'heron/config/src/yaml/conf/test/test_heron_internals.yaml')
return yaml_path
INTERNAL_CONF_PATH = get_test_heron_internal_yaml()
HOSTNAME = socket.gethostname()
class MockPOpen(object):
"""fake subprocess.Popen object that we can use to mock processes and pids"""
next_pid = 0
def __init__(self):
self.pid = MockPOpen.next_pid
MockPOpen.next_pid += 1
@staticmethod
def set_next_pid(next_pid):
MockPOpen.next_pid = next_pid
class MockExecutor(HeronExecutor):
"""mock executor that overrides methods that don't apply to unit tests, like running processes"""
def __init__(self, args):
self.processes = []
super(MockExecutor, self).__init__(args, None)
# pylint: disable=no-self-use
def _load_logging_dir(self, heron_internals_config_file):
return "fake_dir"
def _run_process(self, name, cmd, env=None):
popen = MockPOpen()
self.processes.append(ProcessInfo(popen, name, cmd))
return popen
def _get_jvm_version(self):
return "1.8.y.x"
class HeronExecutorTest(unittest.TestCase):
"""Unittest for Heron Executor"""
def get_expected_shell_command(container_id):
return 'heron_shell_binary --port=shell-port ' \
'--log_file_prefix=fake_dir/heron-shell-%s.log ' \
'--secret=topid' % container_id
def build_packing_plan(self, instance_distribution):
packing_plan = PackingPlan()
for container_id in instance_distribution.keys():
container_plan = packing_plan.container_plans.add()
container_plan.id = int(container_id)
for (component_name, global_task_id, component_index) in instance_distribution[container_id]:
instance_plan = container_plan.instance_plans.add()
instance_plan.component_name = component_name
instance_plan.task_id = int(global_task_id)
instance_plan.component_index = int(component_index)
return packing_plan
# pylint: disable=no-self-argument
def get_expected_metricsmgr_command(container_id):
return "heron_java_home/bin/java -Xmx1024M -XX:+PrintCommandLineFlags -verbosegc " \
"-XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -XX:+PrintGCCause " \
"-XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=100M " \
"-XX:+PrintPromotionFailure -XX:+PrintTenuringDistribution -XX:+PrintHeapAtGC " \
"-XX:+HeapDumpOnOutOfMemoryError -XX:+UseConcMarkSweepGC -XX:+PrintCommandLineFlags " \
"-Xloggc:log-files/gc.metricsmgr.log -Djava.net.preferIPv4Stack=true " \
"-cp metricsmgr_classpath com.twitter.heron.metricsmgr.MetricsManager metricsmgr-%d " \
"metricsmgr_port topname topid %s " \
"metrics_sinks_config_file" % (container_id, INTERNAL_CONF_PATH)
def get_expected_metricscachemgr_command():
return "heron_java_home/bin/java -Xmx1024M -XX:+PrintCommandLineFlags -verbosegc " \
"-XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -XX:+PrintGCCause " \
"-XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=100M " \
"-XX:+PrintPromotionFailure -XX:+PrintTenuringDistribution -XX:+PrintHeapAtGC " \
"-XX:+HeapDumpOnOutOfMemoryError -XX:+UseConcMarkSweepGC -XX:+PrintCommandLineFlags " \
"-Xloggc:log-files/gc.metricscache.log -Djava.net.preferIPv4Stack=true " \
"-cp metricscachemgr_classpath com.twitter.heron.metricscachemgr.MetricsCacheManager " \
"--metricscache_id metricscache-0 --master_port metricscachemgr_masterport " \
"--stats_port metricscachemgr_statsport --topology_name topname --topology_id topid " \
"--system_config_file %s --sink_config_file metrics_sinks_config_file " \
"--cluster cluster --role role --environment environ --verbose" % (INTERNAL_CONF_PATH)
def get_expected_instance_command(component_name, instance_id, container_id):
instance_name = "container_%d_%s_%d" % (container_id, component_name, instance_id)
return "heron_java_home/bin/java -Xmx320M -Xms320M -Xmn160M -XX:MaxMetaspaceSize=128M " \
"-XX:MetaspaceSize=128M -XX:ReservedCodeCacheSize=64M -XX:+CMSScavengeBeforeRemark " \
"-XX:TargetSurvivorRatio=90 -XX:+PrintCommandLineFlags -verbosegc -XX:+PrintGCDetails " \
"-XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -XX:+PrintGCCause " \
"-XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=5 -XX:GCLogFileSize=100M " \
"-XX:+PrintPromotionFailure -XX:+PrintTenuringDistribution -XX:+PrintHeapAtGC " \
"-XX:+HeapDumpOnOutOfMemoryError -XX:+UseConcMarkSweepGC -XX:ParallelGCThreads=4 " \
"-Xloggc:log-files/gc.%s.log -XX:+HeapDumpOnOutOfMemoryError " \
"-Djava.net.preferIPv4Stack=true -cp instance_classpath:classpath " \
"com.twitter.heron.instance.HeronInstance topname topid %s %s %d 0 stmgr-%d " \
"master_port metricsmgr_port %s" \
% (instance_name, instance_name, component_name, instance_id,
container_id, INTERNAL_CONF_PATH)
MockPOpen.set_next_pid(37)
expected_processes_container_0 = [
ProcessInfo(MockPOpen(), 'heron-shell-0', get_expected_shell_command(0)),
ProcessInfo(MockPOpen(), 'metricsmgr-0', get_expected_metricsmgr_command(0)),
ProcessInfo(MockPOpen(), 'heron-tmaster',
'tmaster_binary %s master_port '
'tmaster_controller_port tmaster_stats_port '
'topname topid zknode zkroot '
'%s metrics_sinks_config_file metricsmgr_port '
'ckptmgr-port' % (HOSTNAME, INTERNAL_CONF_PATH )),
ProcessInfo(MockPOpen(), 'heron-metricscache', get_expected_metricscachemgr_command()),
]
MockPOpen.set_next_pid(37)
expected_processes_container_1 = [
ProcessInfo(MockPOpen(), 'stmgr-1',
'stmgr_binary topname topid topdefnfile zknode zkroot stmgr-1 '
'container_1_word_3,container_1_exclaim1_2,container_1_exclaim1_1 %s master_port '
'metricsmgr_port shell-port %s ckptmgr-port ckptmgr-1'
% (HOSTNAME, INTERNAL_CONF_PATH)),
ProcessInfo(MockPOpen(), 'container_1_word_3', get_expected_instance_command('word', 3, 1)),
ProcessInfo(Mo | ckPOpen(), 'container_1_exclaim1_1',
get_expected_instance_command('exclaim1', 1, 1)),
ProcessInfo(MockPOpen(), 'container_1_exclaim1_2',
get_expected_instance_command('exclaim1', 2, 1)),
ProcessInfo(MockPOpen(), 'heron-shell-1', get_expected_shell_command(1)),
ProcessInfo(MockPO | pen(), 'metricsmgr-1', get_expected_metricsmgr_command(1)),
]
MockPOpen.set_next_pid(37)
expected_processes_container_7 = [
ProcessInfo(MockPOpen(), 'container_7_word_11', get_expected_instance_command('word', 11, 7)),
ProcessInfo(MockPOpen(), 'container_7_exclaim1_210',
|
LCOGT/valhalla | valhalla/accounts/migrations/0005_profile_terms_accepted.py | Python | gpl-3.0 | 406 | 0 | # Generated by Django 2.1.3 on 2019-02-22 22:41
from django.db import mig | rations, mode | ls
class Migration(migrations.Migration):
dependencies = [
('accounts', '0004_auto_20170418_0219'),
]
operations = [
migrations.AddField(
model_name='profile',
name='terms_accepted',
field=models.DateTimeField(blank=True, null=True),
),
]
|
carlegbert/wholenote | fnote/blueprints/page/__init__.py | Python | mit | 45 | 0 | from fnote. | blueprints.page.views import pa | ge
|
tonk/ansible | lib/ansible/playbook/collectionsearch.py | Python | gpl-3.0 | 2,597 | 0.00308 | # Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.six import string_types
from ansible.playbook.attribute import FieldAttribute
from ansible.utils.collection_loader import AnsibleCollectionLoader
from ansible.temp | late import is_template, Environment
from ansi | ble.utils.display import Display
display = Display()
def _ensure_default_collection(collection_list=None):
default_collection = AnsibleCollectionLoader().default_collection
# Will be None when used as the default
if collection_list is None:
collection_list = []
# FIXME: exclude role tasks?
if default_collection and default_collection not in collection_list:
collection_list.insert(0, default_collection)
# if there's something in the list, ensure that builtin or legacy is always there too
if collection_list and 'ansible.builtin' not in collection_list and 'ansible.legacy' not in collection_list:
collection_list.append('ansible.legacy')
return collection_list
class CollectionSearch:
# this needs to be populated before we can resolve tasks/roles/etc
_collections = FieldAttribute(isa='list', listof=string_types, priority=100, default=_ensure_default_collection,
always_post_validate=True, static=True)
def _load_collections(self, attr, ds):
# We are always a mixin with Base, so we can validate this untemplated
# field early on to guarantee we are dealing with a list.
ds = self.get_validated_value('collections', self._collections, ds, None)
# this will only be called if someone specified a value; call the shared value
_ensure_default_collection(collection_list=ds)
if not ds: # don't return an empty collection list, just return None
return None
# This duplicates static attr checking logic from post_validate()
# because if the user attempts to template a collection name, it may
# error before it ever gets to the post_validate() warning (e.g. trying
# to import a role from the collection).
env = Environment()
for collection_name in ds:
if is_template(collection_name, env):
display.warning('"collections" is not templatable, but we found: %s, '
'it will not be templated and will be used "as is".' % (collection_name))
return ds
|
ceball/param | tests/API1/testdefaults.py | Python | bsd-3-clause | 1,315 | 0.005323 | """
Do all subclasses of Parameter supply a valid default?
"""
from param.parameterized import add_metaclass
from param import concrete_descendents, Parameter
# import all parameter types
from param import * # noqa
from para | m import ClassSelector
from . import API1TestCase
positional_args = {
ClassSelector: (object,)
}
skip = []
try:
import numpy # noqa
except ImportError:
skip.append('Array')
try:
import pandas # noqa
except ImportError:
skip.append('D | ataFrame')
skip.append('Series')
class TestDefaultsMetaclass(type):
def __new__(mcs, name, bases, dict_):
def test_skip(*args,**kw):
from nose.exc import SkipTest
raise SkipTest
def add_test(p):
def test(self):
# instantiate parameter with no default (but supply
# any required args)
p(*positional_args.get(p,tuple()))
return test
for p_name, p_type in concrete_descendents(Parameter).items():
dict_["test_default_of_%s"%p_name] = add_test(p_type) if p_name not in skip else test_skip
return type.__new__(mcs, name, bases, dict_)
@add_metaclass(TestDefaultsMetaclass)
class TestDefaults(API1TestCase):
pass
if __name__ == "__main__":
import nose
nose.runmodule()
|
Kortemme-Lab/klab | klab/scripting.py | Python | mit | 2,249 | 0.00578 | #!/usr/bin/env python2
import os, shutil, glob
from functools import wraps
def print_warning(message, *args, **kwargs):
from . import colortext
if args or kwargs: message = message.format(*args, **kwargs)
colortext.write(message + '\n', color='red')
def print_error_and_die(message, *args, **kwargs):
aborting = "Aborting..."
if not message.endswith('\n'):
aborting = ' ' + aborting
print_warning(message + aborting, *args, **kwargs)
raise SystemExit(1)
class catch_and_print_errors:
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
if exc_type == KeyboardInterrupt:
print()
return True
if getattr(exc_value, 'no_stack_trace', False):
print_warning(str(exc_value))
return True
def __call__(self, function):
| @wraps(function)
def wrapper(*args, **kwargs):
with self:
return function(*args, **kwargs)
return wrapper
def use_path_completion():
import readline
readline.set_completer_delims(' \t\n;')
readline.parse_and_bind("tab: complete")
readline.set_completer(path_completer)
def path_completer(text, state):
globs = glob.glob(os.path.expand | user(text) + '*') + [None]
add_slash = lambda x: x + '/' if os.path.isdir(x) else x
return add_slash(globs[state])
def clear_directory(directory):
if os.path.exists(directory): shutil.rmtree(directory)
os.makedirs(directory)
def relative_symlink(target, link_name):
"""Make a symlink to target using the shortest possible relative path."""
link_name = os.path.abspath(link_name)
abs_target = os.path.abspath(target)
rel_target = os.path.relpath(target, os.path.dirname(link_name))
if os.path.exists(link_name):
os.remove(link_name)
os.symlink(rel_target, link_name)
# Bread'n'butter shell commands.
def mkdir(newdir):
if os.path.isdir(newdir):
pass
elif os.path.isfile(newdir):
raise OSError("a file with the same name as the desired " \
"dir, '%s', already exists." % newdir)
else:
os.makedirs(newdir)
def touch(path):
with open(path, 'w'):
pass
|
hnakamur/my-fabfiles | fabfile/common/lib/template.py | Python | mit | 3,217 | 0.003108 | from datetime import datetime
import hashlib
import os
from StringIO import StringIO
from fabric.api import env, hide, put, settings
from fabric.contrib import files
from fabric.utils import apply_lcwd
from fabfile.common.lib.operations import run_or_sudo
from fabfile.common.lib import file
FABRIC_MANAGED_DEFAULT_FORMAT = "Fabric managed: %(dest)s copied from %(src)s"
def ensure_template(filename, destination, context=None, template_dir=None, use_sudo=False, backup=True, mirror_local_mode=False, mode=None, fabric_managed_format=FABRIC_MANAGED_DEFAULT_FORMAT):
run_fn = run_or_sudo(use_sudo)
# Normalize destination to be an actual filename, due to using StringIO
with settings(hide('everything'), warn_only=True):
if run_fn('test -d %s' % _expand_path(destination)).succeeded:
sep = "" if destination.endswith('/') else "/"
destination += sep + os.path.basename(filename)
context = modify_context(filename, destination, context=context,
fabric_managed_format=fabric_managed_format)
text = render_as_text(filename, context=context, template_dir=template_dir)
local_sum = hashlib.sha256(text).hexdigest()
remote_sum = file.calc_sha256sum(destination, use_sudo=use_sudo)
if remote_sum == local_sum:
return False
# Use mode kwarg to implement mirror_local_mode, again due to using
# StringIO
if mirror_local_mode and mode is None:
mode = os.stat(filename).st_mode
# To prevent put() from trying to do this
# logic itself
mirror_local_mode = False
# Back up original file
if backup and exists(destination):
run_fn("cp %s{,.bak}" % _expand_path(destination))
# | Upload the file.
put(
local_path=StringIO(text),
remote_path=destination,
use_sudo=use_sudo,
mirror_local_mode=mirror_local_mode,
mode=mode
)
return True
def modify_context(filename, destination, context=None, fabric_managed_format=FABRIC_MANAGED_DEFAULT_FORMAT):
if context is None:
context | = {}
context['fabric_managed'] = format_fabric_managed(filename, destination,
fabric_managed_format)
return context
def format_fabric_managed(filename, destination,
fabric_managed_format=FABRIC_MANAGED_DEFAULT_FORMAT):
return fabric_managed_format % {
'dest': destination,
'src': filename
}
def render_as_text(filename, context=None, template_dir=None):
try:
template_dir = template_dir or os.getcwd()
template_dir = apply_lcwd(template_dir, env)
from jinja2 import Environment, FileSystemLoader
jenv = Environment(loader=FileSystemLoader(template_dir))
text = jenv.get_template(filename).render(**context or {})
# Force to a byte representation of Unicode, or str()ification
# within Paramiko's SFTP machinery may cause decode issues for
# truly non-ASCII characters.
text = text.encode('utf-8')
return text
except ImportError:
import traceback
tb = traceback.format_exc()
abort(tb + "\nUnable to import Jinja2 -- see above.")
def _expand_path(path):
return '"$(echo %s)"' % path
|
flackr/quickopen | src/prelaunch_client_test.py | Python | apache-2.0 | 1,291 | 0.005422 | # Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import prelaunch_client
import unittest
class TestPrelaunchClient(unittest.TestCase):
def test_is_prel | aunch_client(self):
self.assertEquals(False, prelaunch_client.is_prelaunc | h_client([""]))
self.assertEquals(False, prelaunch_client.is_prelaunch_client(["", "search", "--wait"]))
self.assertEquals(False, prelaunch_client.is_prelaunch_client(["", "prelaunch", "--wait"]))
self.assertEquals(True, prelaunch_client.is_prelaunch_client(["", "prelaunch"]))
self.assertEquals(True, prelaunch_client.is_prelaunch_client(["", "prelaunch", "search"]))
self.assertEquals(True, prelaunch_client.is_prelaunch_client(['', '--host=localhost', '--port=12345', '--no_auto_start', 'prelaunch', 'add']))
|
okfn/jsontableschema-py | examples/table_sql.py | Python | mit | 998 | 0.004008 | # pip install sqlalchemy tableschema-sql
import sqlalchemy as sa
from tableschema import Table
# Create SQL database
db = sa.create_engine('sqlite://')
# Data from WEB, schema from MEMORY
SOURCE = 'https://raw.githubusercontent.com/frictionlessdata/tableschema-py/master/data/data_infer.csv'
SCHEMA | = {'fields': [{'name': 'id', 'type': 'integer'}, {'name': 'age', 'type': 'integer'}, {'name': 'name', 'type': 'string'}] }
# Open from WEB | save to SQL database
table = Table(SOURCE, schema=SCHEMA)
table.save('articles', backend='sql', engine=db)
# Open from SQL save to DRIVE
table = Table('articles', backend='sql', engine=db)
table.schema.save('tmp/articles.json')
table.save('tmp/articles.csv')
# Open from DRIVE print to CONSOLE
table = Table('tmp/articles.csv', schema='tmp/articles.json')
print(table.read(keyed=True))
# Will print
# [{'id': 1, 'age': 39, 'name': 'Paul'}, {'id': 2, 'age': 23, 'name': 'Jimmy'}, {'id': 3, 'age': 36, 'name': 'Jane'}, {'id': 4, 'age': 28, 'name': 'Judy'}]
|
rymis/mrimpy | test/estest.py | Python | gpl-3.0 | 289 | 0.034602 | #!/usr/bin/env python
# EventServer test
import eserver
import sys
class EchoProtocol( | eserver.Protocol):
def processData(self, data):
self.send(data)
def main(argv):
S = eserver.EventServer( ('localhost', 9999), EchoPr | otocol )
S.start()
if __name__ == '__main__':
main(sys.argv)
|
occrp/id-backend | api_v3/models/attachment.py | Python | mit | 887 | 0 | from django.conf import settings
from django.db import models
from .ticket import Ticket
class Attachment(models.Model):
"""Ticket attachment model."""
ticket = models.ForeignKey(
Ticket, blank=False, related_name='attachments', db_index=True,
on_delete=models.DO_NOTHING)
user = models.ForeignKey(
settings.AUTH_USER_MODEL, blank=False | , db_index=True,
on_delete=models.DO_NOTHING)
upload = models.FileField(upload_to='attachments/%Y/%m/%d', max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
@classmethod
def filter_by_user(cls, user, queryset=None):
"""Returns any user accessible attachments.
Ones he has access to through the tickets.
"""
if queryset is None:
queryset = cls.objects
return queryset.filter(ticket__in=Ticket.filt | er_by_user(user))
|
madgik/exareme | Exareme-Docker/src/exareme/exareme-tools/madis/src/lib/colorama/ansitowin32.py | Python | mit | 6,285 | 0.000318 | import re
import sys
from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
from .win32 import windll
from .winterm import WinTerm, WinColor, WinStyle
if windll is not None:
winterm = WinTerm()
def is_a_tty(stream):
return hasattr(stream, 'isatty') and stream.isatty()
class StreamWrapper(object):
'''
Wraps a stream (such as stdout), acting as a transparent proxy for all
attribute access apart from method 'write()', which is delegated to our
Converter instance.
'''
def __init__(self, wrapped, converter):
# double-underscore everything to prevent clashes with names of
# attributes on the wrapped stream object.
self.__wrapped = wrapped
self.__convertor = converter
def __getattr__(self, name):
return getattr(self.__wrapped, name)
def write(self, text):
self.__convertor.write(text)
class AnsiToWin32(object):
'''
Implements a 'write()' method which, on Windows, will strip ANSI character
sequences from the text, and if outputting to a tty, will convert them into
win32 function calls.
'''
ANSI_RE = re.compile('\033\[((?:\d|;)*)([a-zA-Z])')
def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
# The wrapped stream (normally sys.stdout or sys.stderr)
self.wrapped = wrapped
# should we reset colors to defaults after every .write()
self.autoreset = autoreset
# create the proxy wrapping our output stream
self.stream = StreamWrapper(wrapped, self)
on_windows = sys.platform.startswith('win')
# should we strip ANSI sequences from our output?
if strip is None:
strip = on_windows
self.strip = strip
# should we should convert ANSI sequences into win32 calls?
if convert is None:
convert = on_windows and is_a_tty(wrapped)
self.convert = convert
# dict of ansi codes to win32 functions and parameters
self.win32_calls = self.get_win32_calls()
# are we wrapping stderr?
self.on_stderr = self.wrapped is sys.stderr
def should_wrap(self):
'''
True if this class is actually needed. If false, then the output
stream will not be affected, nor will win32 calls be issued, so
wrapping stdout is not actually required. This will generally be
False on non-Windows platforms, unless optional functionality like
autoreset has been requested using kwargs to init()
'''
return self.convert or self.strip or self.autoreset
def get_win32_calls(self):
if self.convert and winterm:
return {
AnsiStyle.RESET_ALL: (winterm.reset_all,),
AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
AnsiFore.RED: (winterm.fore, WinColor.RED),
AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
AnsiFore.WHITE: (win | term.fore, WinColor.GREY),
AnsiFore.RESET: (winterm.fore,),
AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
AnsiBack.RED: (winterm.back, WinColor.RED),
AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
AnsiBack.CYAN: (win | term.back, WinColor.CYAN),
AnsiBack.WHITE: (winterm.back, WinColor.GREY),
AnsiBack.RESET: (winterm.back,),
}
def write(self, text):
if self.strip or self.convert:
self.write_and_convert(text)
else:
self.wrapped.write(text)
self.wrapped.flush()
if self.autoreset:
self.reset_all()
def reset_all(self):
if self.convert:
self.call_win32('m', (0,))
elif is_a_tty(self.wrapped):
self.wrapped.write(Style.RESET_ALL)
def write_and_convert(self, text):
'''
Write the given text to our wrapped stream, stripping any ANSI
sequences from the text, and optionally converting them into win32
calls.
'''
cursor = 0
for match in self.ANSI_RE.finditer(text):
start, end = match.span()
self.write_plain_text(text, cursor, start)
self.convert_ansi(*match.groups())
cursor = end
self.write_plain_text(text, cursor, len(text))
def write_plain_text(self, text, start, end):
if start < end:
self.wrapped.write(text[start:end])
self.wrapped.flush()
def convert_ansi(self, paramstring, command):
if self.convert:
params = self.extract_params(paramstring)
self.call_win32(command, params)
def extract_params(self, paramstring):
def split(paramstring):
for p in paramstring.split(';'):
if p != '':
yield int(p)
return tuple(split(paramstring))
def call_win32(self, command, params):
if params == []:
params = [0]
if command == 'm':
for param in params:
if param in self.win32_calls:
func_args = self.win32_calls[param]
func = func_args[0]
args = func_args[1:]
kwargs = dict(on_stderr=self.on_stderr)
func(*args, **kwargs)
elif command in ('H', 'f'): # set cursor position
func = winterm.set_cursor_position
func(params, on_stderr=self.on_stderr)
elif command in ('J'):
func = winterm.erase_data
func(params, on_stderr=self.on_stderr)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.