repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
mercycorps/tola-activity
|
htdocs/activitydb/migrations/0037_auto_20151028_1631.py
|
1
|
1470
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('activitydb', '0036_auto_20151028_1519'),
]
operations = [
migrations.AlterField(
model_name='siteprofile',
name='avg_landholding_size',
field=models.DecimalField(decimal_places=14, max_digits=25, blank=True, help_text='In hectares/jeribs', null=True, verbose_name='Average Landholding Size'),
),
migrations.AlterField(
model_name='siteprofile',
name='populations_owning_land',
field=models.IntegerField(help_text='(%)', null=True, verbose_name='Households Owning Land', blank=True),
),
migrations.AlterField(
model_name='siteprofile',
name='literacy_rate',
field=models.IntegerField(help_text='%', null=True, verbose_name='Literacy Rate (%)', blank=True),
),
migrations.AlterField(
model_name='siteprofile',
name='literate_females',
field=models.IntegerField(help_text='%', null=True, verbose_name='% of Literate Females', blank=True),
),
migrations.AlterField(
model_name='siteprofile',
name='literate_males',
field=models.IntegerField(help_text='%', null=True, verbose_name='% of Literate Males', blank=True),
),
]
|
gpl-2.0
| -7,011,314,465,358,522,000
| 35.75
| 168
| 0.6
| false
| 3.972973
| false
| false
| false
|
srajag/contrail-controller
|
src/config/device-manager/device_manager/db.py
|
1
|
14129
|
#
# Copyright (c) 2014 Juniper Networks, Inc. All rights reserved.
#
"""
This file contains implementation of data model for physical router
configuration manager
"""
from vnc_api.common.exceptions import NoIdError
from physical_router_config import PhysicalRouterConfig
from sandesh.dm_introspect import ttypes as sandesh
from cfgm_common.vnc_db import DBBase
import copy
class BgpRouterDM(DBBase):
_dict = {}
obj_type = 'bgp_router'
def __init__(self, uuid, obj_dict=None):
self.uuid = uuid
self.bgp_routers = {}
self.physical_router = None
self.update(obj_dict)
# end __init__
def update(self, obj=None):
if obj is None:
obj = self.read_obj(self.uuid)
self.name = obj['fq_name'][-1]
self.params = obj['bgp_router_parameters']
self.update_single_ref('physical_router', obj)
new_peers = {}
for ref in obj.get('bgp_router_refs', []):
new_peers[ref['uuid']] = ref['attr']
for peer_id in set(self.bgp_routers.keys()) - set(new_peers.keys()):
peer = BgpRouterDM.get(peer_id)
if self.uuid in peer.bgp_routers:
del peer.bgp_routers[self.uuid]
for peer_id, attrs in new_peers.items():
peer = BgpRouterDM.get(peer_id)
if peer:
peer.bgp_routers[self.uuid] = attrs
self.bgp_routers = new_peers
def sandesh_build(self):
return sandesh.BgpRouter(name=self.name, uuid=self.uuid,
peers=self.bgp_routers,
physical_router=self.physical_router)
@classmethod
def sandesh_request(cls, req):
# Return the list of BGP routers
resp = sandesh.BgpRouterListResp(bgp_routers=[])
if req.name_or_uuid is None:
for router in cls.values():
sandesh_router = router.sandesh_build()
resp.bgp_routers.extend(sandesh_router)
else:
router = cls.find_by_name_or_uuid(req.name_or_uuid)
if router:
sandesh_router = router.sandesh_build()
resp.bgp_routers.extend(sandesh_router)
resp.response(req.context())
# end class BgpRouterDM
class PhysicalRouterDM(DBBase):
_dict = {}
obj_type = 'physical_router'
def __init__(self, uuid, obj_dict=None):
self.uuid = uuid
self.virtual_networks = set()
self.bgp_router = None
self.update(obj_dict)
self.config_manager = PhysicalRouterConfig(
self.management_ip, self.user_credentials, self._logger)
# end __init__
def update(self, obj=None):
if obj is None:
obj = self.read_obj(self.uuid)
self.management_ip = obj.get('physical_router_management_ip')
self.vendor = obj.get('physical_router_vendor_name')
self.user_credentials = obj.get('physical_router_user_credentials')
self.update_single_ref('bgp_router', obj)
self.update_multiple_refs('virtual_network', obj)
self.physical_interfaces = set([pi['uuid'] for pi in
obj.get('physical_interfaces', [])])
self.logical_interfaces = set([li['uuid'] for li in
obj.get('logical_interfaces', [])])
# end update
@classmethod
def delete(cls, uuid):
if uuid not in cls._dict:
return
obj = cls._dict[uuid]
obj.config_manager.delete_bgp_config()
obj.update_single_ref('bgp_router', {})
obj.update_multiple_refs('virtual_network', {})
del cls._dict[uuid]
# end delete
def push_config(self):
self.config_manager.reset_bgp_config()
bgp_router = BgpRouterDM.get(self.bgp_router)
if bgp_router:
for peer_uuid, params in bgp_router.bgp_routers.items():
peer = BgpRouterDM.get(peer_uuid)
if peer is None:
continue
external = (bgp_router.params['autonomous_system'] ==
peer.params['autonomous_system'])
self.config_manager.add_bgp_peer(peer.params['address'],
params, external)
self.config_manager.set_bgp_config(bgp_router.params)
vn_dict = {}
for vn_id in self.virtual_networks:
vn_dict[vn_id] = []
li_set = self.logical_interfaces
for pi_uuid in self.physical_interfaces:
pi = PhysicalInterfaceDM.get(pi_uuid)
if pi is None:
continue
li_set |= pi.logical_interfaces
for li_uuid in li_set:
li = LogicalInterfaceDM.get(li_uuid)
if li is None:
continue
vmi_id = li.virtual_machine_interface
vmi = VirtualMachineInterfaceDM.get(vmi_id)
if vmi is None:
continue
vn_id = vmi.virtual_network
if vn_id in vn_dict:
vn_dict[vn_id].append(li.name)
else:
vn_dict[vn_id] = [li.name]
for vn_id, interfaces in vn_dict.items():
vn_obj = VirtualNetworkDM.get(vn_id)
if vn_obj is None:
continue
for ri_id in vn_obj.routing_instances:
# Find the primary RI by matching the name
ri_obj = RoutingInstanceDM.get(ri_id)
if ri_obj is None:
continue
if ri_obj.fq_name[-1] == vn_obj.fq_name[-1]:
vrf_name = ':'.join(vn_obj.fq_name)
export_set = copy.copy(ri_obj.export_targets)
import_set = copy.copy(ri_obj.import_targets)
for ri2_id in ri_obj.routing_instances:
ri2 = RoutingInstanceDM.get(ri2_id)
if ri2 is None:
continue
import_set |= ri2.export_targets
export_set |= ri2.import_targets
self.config_manager.add_routing_instance(vrf_name,
import_set,
export_set,
vn_obj.prefixes,
vn_obj.gateways,
vn_obj.router_external,
interfaces,
vn_obj.vxlan_vni)
break
self.config_manager.send_bgp_config()
# end push_config
# end PhysicalRouterDM
class PhysicalInterfaceDM(DBBase):
_dict = {}
obj_type = 'physical_interface'
def __init__(self, uuid, obj_dict=None):
self.uuid = uuid
self.update(obj_dict)
pr = PhysicalRouterDM.get(self.physical_router)
if pr:
pr.physical_interfaces.add(self.uuid)
# end __init__
def update(self, obj=None):
if obj is None:
obj = self.read_obj(self.uuid)
self.physical_router = self.get_parent_uuid(obj)
self.logical_interfaces = set([li['uuid'] for li in
obj.get('logical_interfaces', [])])
# end update
@classmethod
def delete(cls, uuid):
if uuid not in cls._dict:
return
obj = cls._dict[uuid]
pr = PhysicalRouterDM.get(obj.physical_router)
if pr:
pr.physical_interfaces.discard(obj.uuid)
del cls._dict[uuid]
# end delete
# end PhysicalInterfaceDM
class LogicalInterfaceDM(DBBase):
_dict = {}
obj_type = 'logical_interface'
def __init__(self, uuid, obj_dict=None):
self.uuid = uuid
self.virtual_machine_interface = None
self.update(obj_dict)
if self.physical_interface:
parent = PhysicalInterfaceDM.get(self.physical_interface)
elif self.physical_router:
parent = PhysicalRouterDM.get(self.physical_router)
if parent:
parent.logical_interfaces.add(self.uuid)
# end __init__
def update(self, obj=None):
if obj is None:
obj = self.read_obj(self.uuid)
if obj['parent_type'] == 'physical-router':
self.physical_router = self.get_parent_uuid(obj)
self.physical_interface = None
else:
self.physical_interface = self.get_parent_uuid(obj)
self.physical_router = None
self.update_single_ref('virtual_machine_interface', obj)
self.name = obj['fq_name'][-1]
# end update
@classmethod
def delete(cls, uuid):
if uuid not in cls._dict:
return
obj = cls._dict[uuid]
if obj.physical_interface:
parent = PhysicalInterfaceDM.get(obj.physical_interface)
elif obj.physical_router:
parent = PhysicalInterfaceDM.get(obj.physical_router)
if parent:
parent.logical_interfaces.discard(obj.uuid)
obj.update_single_ref('virtual_machine_interface', {})
del cls._dict[uuid]
# end delete
# end LogicalInterfaceDM
class VirtualMachineInterfaceDM(DBBase):
_dict = {}
obj_type = 'virtual_machine_interface'
def __init__(self, uuid, obj_dict=None):
self.uuid = uuid
self.virtual_network = None
self.logical_interface = None
self.update(obj_dict)
# end __init__
def update(self, obj=None):
if obj is None:
obj = self.read_obj(self.uuid)
self.update_single_ref('logical_interface', obj)
self.update_single_ref('virtual_network', obj)
# end update
@classmethod
def delete(cls, uuid):
if uuid not in cls._dict:
return
obj = cls._dict[uuid]
obj.update_single_ref('logical_interface', {})
obj.update_single_ref('virtual_network', {})
del cls._dict[uuid]
# end delete
# end VirtualMachineInterfaceDM
class VirtualNetworkDM(DBBase):
_dict = {}
obj_type = 'virtual_network'
def __init__(self, uuid, obj_dict=None):
self.uuid = uuid
self.physical_routers = set()
self.router_external = False
self.vxlan_configured = False
self.vxlan_vni = None
self.gateways = None
self.update(obj_dict)
# end __init__
def update(self, obj=None):
if obj is None:
obj = self.read_obj(self.uuid)
self.update_multiple_refs('physical_router', obj)
self.fq_name = obj['fq_name']
try:
self.router_external = obj['router_external']
except KeyError:
self.router_external = False
try:
prop = obj['virtual_network_properties']
if prop['vxlan_network_identifier'] is not None:
self.vxlan_configured = True
self.vxlan_vni = prop['vxlan_network_identifier']
except KeyError:
self.vxlan_configured = False
self.vxlan_vni = None
self.routing_instances = set([ri['uuid'] for ri in
obj.get('routing_instances', [])])
self.virtual_machine_interfaces = set(
[vmi['uuid'] for vmi in
obj.get('virtual_machine_interface_back_refs', [])])
self.prefixes = set()
self.gateways = set()
for ipam_ref in obj.get('network_ipam_refs', []):
for subnet in ipam_ref['attr'].get('ipam_subnets', []):
self.prefixes.add('%s/%d' % (subnet['subnet']['ip_prefix'],
subnet['subnet']['ip_prefix_len'])
)
self.gateways.add(subnet['default_gateway'])
# end update
@classmethod
def delete(cls, uuid):
if uuid not in cls._dict:
return
obj = cls._dict[uuid]
obj.update_multiple_refs('physical_router', {})
del cls._dict[uuid]
# end delete
# end VirtualNetworkDM
class RoutingInstanceDM(DBBase):
_dict = {}
obj_type = 'routing_instance'
def __init__(self, uuid, obj_dict=None):
self.uuid = uuid
self.virtual_network = None
self.import_targets = set()
self.export_targets = set()
self.routing_instances = set()
self.update(obj_dict)
vn = VirtualNetworkDM.get(self.virtual_network)
if vn:
vn.routing_instances.add(self.uuid)
# end __init__
def update(self, obj=None):
if obj is None:
obj = self.read_obj(self.uuid)
self.fq_name = obj['fq_name']
self.virtual_network = self.get_parent_uuid(obj)
self.import_targets = set()
self.export_targets = set()
for rt_ref in obj.get('route_target_refs', []):
rt_name = rt_ref['to'][0]
exim = rt_ref.get('attr').get('import_export')
if exim == 'export':
self.export_targets.add(rt_name)
elif exim == 'import':
self.import_targets.add(rt_name)
else:
self.import_targets.add(rt_name)
self.export_targets.add(rt_name)
self.update_multiple_refs('routing_instance', obj)
# end update
@classmethod
def delete(cls, uuid):
if uuid not in cls._dict:
return
obj = cls._dict[uuid]
vn = VirtualNetworkDM.get(obj.virtual_network)
if vn:
vn.routing_instances.discard(obj.uuid)
del cls._dict[uuid]
# end delete
# end RoutingInstanceDM
DBBase._OBJ_TYPE_MAP = {
'bgp_router': BgpRouterDM,
'physical_router': PhysicalRouterDM,
'physical_interface': PhysicalInterfaceDM,
'logical_interface': LogicalInterfaceDM,
'virtual_machine_interface': VirtualMachineInterfaceDM,
'virtual_network': VirtualNetworkDM,
'routing_instance': RoutingInstanceDM,
}
|
apache-2.0
| -7,415,153,349,394,149,000
| 33.972772
| 84
| 0.544978
| false
| 3.946648
| true
| false
| false
|
globocom/database-as-a-service
|
dbaas/drivers/tests/test_driver_pymongo.py
|
1
|
9370
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
from mock import patch, MagicMock
from drivers import DriverFactory
from physical.tests import factory as factory_physical
from logical.tests import factory as factory_logical
from logical.models import Database
from drivers.mongodb import MongoDB, MongoDBReplicaSet
from drivers.tests.base import (BaseMongoDriverTestCase, FakeDriverClient,
BaseSingleInstanceUpdateSizesTest,
BaseHAInstanceUpdateSizesTest)
from physical.models import Instance
@patch('drivers.mongodb.MongoDB.pymongo', new=FakeDriverClient)
@patch('physical.models.DiskOffering.size_bytes',
new=MagicMock(return_value=90))
class MongoSingleUpdateSizesTestCase(
BaseSingleInstanceUpdateSizesTest,
BaseMongoDriverTestCase):
pass
@patch('drivers.mongodb.MongoDB.pymongo', new=FakeDriverClient)
@patch('physical.models.DiskOffering.size_bytes',
new=MagicMock(return_value=90))
class MongoReplicaSetUpdateSizesTestCase(
BaseMongoDriverTestCase,
BaseHAInstanceUpdateSizesTest):
driver_class = MongoDBReplicaSet
secondary_instance_quantity = 2
secondary_instance_type = Instance.MONGODB_ARBITER
class MongoUsedAndTotalTestCase(BaseMongoDriverTestCase):
"""
Tests Mongo total and used
"""
def test_masters_single_instance(self):
"""
Test validates return total and used size when has single instance
"""
self.instance.total_size_in_bytes = 105
self.instance.used_size_in_bytes = 55
self.instance.save()
self.assertEqual(self.driver.masters_total_size_in_bytes, 105)
expected_total_size_in_gb = 105 * self.GB_FACTOR
self.assertEqual(
self.driver.get_master_instance_total_size_in_gb(),
expected_total_size_in_gb
)
self.assertEqual(self.driver.masters_used_size_in_bytes, 55)
def test_masters_replicaset_instance(self):
"""
Test validates return total and used size when has single instance
"""
self.driver = MongoDBReplicaSet(databaseinfra=self.databaseinfra)
self.driver.check_instance_is_master = MagicMock(
side_effect=self.instance_helper.check_instance_is_master
)
self.instance_helper.create_instances_by_quant(
infra=self.databaseinfra, base_address='131',
instance_type=self.instance_type,
total_size_in_bytes=35, used_size_in_bytes=10
)
self.instance.total_size_in_bytes = 35
self.instance.used_size_in_bytes = 10
self.instance.save()
self.assertEqual(self.driver.masters_total_size_in_bytes, 35)
expected_total_size_in_gb = 35 * self.GB_FACTOR
self.assertEqual(
self.driver.get_master_instance_total_size_in_gb(),
expected_total_size_in_gb
)
self.assertEqual(self.driver.masters_used_size_in_bytes, 10)
class MongoDBEngineTestCase(BaseMongoDriverTestCase):
"""
Tests MongoDB Engine
"""
def test_mongodb_app_installed(self):
self.assertTrue(DriverFactory.is_driver_available("mongodb_single"))
self.assertTrue(
DriverFactory.is_driver_available("mongodb_replica_set")
)
# test mongo methods
def test_instantiate_mongodb_using_engine_factory(self):
self.assertEqual(MongoDB, type(self.driver))
self.assertEqual(self.databaseinfra, self.driver.databaseinfra)
def test_connection_string(self):
self.assertEqual(
"mongodb://<user>:<password>@{}".format(
self.instance_endpoint),
self.driver.get_connection()
)
def test_get_user(self):
self.assertEqual(self.databaseinfra.user, self.driver.get_user())
def test_get_password(self):
self.assertEqual(
self.databaseinfra.password, self.driver.get_password())
def test_get_default_port(self):
self.assertEqual(27017, self.driver.default_port)
@patch.object(MongoDB, 'get_replica_name')
def test_connection_string_when_in_replica_set(self, get_replica_name):
self.instance = factory_physical.InstanceFactory(
databaseinfra=self.databaseinfra, address='127.0.0.2', port=27018)
get_replica_name.return_value = 'my_repl'
expected_conn = ("mongodb://<user>:<password>"
"@{},127.0.0.2:27018"
"?replicaSet=my_repl").format(self.instance_endpoint)
self.assertEqual(expected_conn, self.driver.get_connection())
def test_connection_with_database(self):
self.database = factory_logical.DatabaseFactory(
name="my_db_url_name", databaseinfra=self.databaseinfra)
expected_conn = ("mongodb://<user>:<password>"
"@{}/my_db_url_name").format(self.instance_endpoint)
self.assertEqual(
expected_conn,
self.driver.get_connection(database=self.database)
)
@patch.object(MongoDB, 'get_replica_name')
def test_connection_with_database_and_replica(self, get_replica_name):
self.instance = factory_physical.InstanceFactory(
databaseinfra=self.databaseinfra, address='127.0.0.2', port=27018)
get_replica_name.return_value = 'my_repl'
self.database = factory_logical.DatabaseFactory(
name="my_db_url_name", databaseinfra=self.databaseinfra)
expected_conn = ("mongodb://<user>:<password>"
"@{},127.0.0.2:27018/my_db_url_name"
"?replicaSet=my_repl").format(self.instance_endpoint)
self.assertEqual(
expected_conn,
self.driver.get_connection(database=self.database)
)
class ManageDatabaseMongoDBTestCase(BaseMongoDriverTestCase):
""" Test case to managing database in mongodb engine """
def setUp(self):
super(ManageDatabaseMongoDBTestCase, self).setUp()
self.database = factory_logical.DatabaseFactory(
databaseinfra=self.databaseinfra)
self.instance.address = os.getenv('TESTS_MONGODB_HOST', '127.0.0.1')
self.instance.save()
# ensure database is dropped
self.driver_client.drop_database(self.database.name)
def tearDown(self):
if not Database.objects.filter(databaseinfra_id=self.databaseinfra.id):
self.database.delete()
super(ManageDatabaseMongoDBTestCase, self).tearDown()
def test_mongodb_create_database(self):
self.assertFalse(
self.database.name in self.driver_client.database_names())
self.driver.create_database(self.database)
self.assertTrue(
self.database.name in self.driver_client.database_names())
def test_mongodb_remove_database(self):
self.driver.create_database(self.database)
self.assertTrue(
self.database.name in self.driver_client.database_names())
self.driver.remove_database(self.database)
self.assertFalse(
self.database.name in self.driver_client.database_names())
class ManageCredentialsMongoDBTestCase(BaseMongoDriverTestCase):
""" Test case to managing credentials in mongodb engine """
def setUp(self):
super(ManageCredentialsMongoDBTestCase, self).setUp()
self.database = factory_logical.DatabaseFactory(
databaseinfra=self.databaseinfra)
self.credential = factory_logical.CredentialFactory(
database=self.database)
self.instance.address = os.getenv('TESTS_MONGODB_HOST', '127.0.0.1')
# self.instance.address = '127.0.0.1'
self.instance.save()
self.driver.create_database(self.database)
def tearDown(self):
self.driver.remove_database(self.database)
self.credential.delete()
self.database.delete()
super(ManageCredentialsMongoDBTestCase, self).tearDown()
def __find_user__(self, credential):
v = self.driver_client.server_info()['version']
if v < '2.6':
return getattr(
self.driver_client,
credential.database.name
).system.users.find_one({"user": credential.user})
else:
return getattr(
self.driver_client,
"admin"
).system.users.find_one(
{"user": credential.user, "db": credential.database.name}
)
def test_mongodb_create_credential(self):
self.assertIsNone(
self.__find_user__(self.credential),
"User %s already exists. Invalid test" % self.credential
)
self.driver.create_user(self.credential)
user = self.__find_user__(self.credential)
self.assertIsNotNone(user)
self.assertEquals(self.credential.user, user['user'])
self.driver.remove_user(self.credential)
def test_mongodb_remove_credential(self):
self.driver.create_user(self.credential)
self.assertIsNotNone(
self.__find_user__(self.credential),
"Error creating user %s. Invalid test" % self.credential
)
self.driver.remove_user(self.credential)
self.assertIsNone(self.__find_user__(self.credential))
|
bsd-3-clause
| -8,187,081,293,294,255,000
| 36.48
| 79
| 0.649733
| false
| 4.056277
| true
| false
| false
|
Dzess/ALFIRT
|
alfirt.runner/src/readers/tests/TagReaderX3DUnitTests.py
|
1
|
4002
|
'''
Created on Jun 9, 2011
@author: Piotr
'''
import unittest
import os
from readers.TagReaderX3D import TagReaderX3D
class TagReaderX3DUnitTests(unittest.TestCase):
def setUp(self):
# Setting up the X3D string with ALFIRT namespace tags
x3dString = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE X3D PUBLIC "ISO//Web3D//DTD X3D 3.2//EN" "http://www.web3d.org/specifications/x3d-3.2.dtd">
<X3D profile="Interchange"
version="3.2"
xmlns:xsd="http://www.w3.org/2001/XMLSchema-instance"
xmlns:alfirt="ALFIRT"
xsd:noNamespaceSchemaLocation=" http://www.web3d.org/specifications/x3d-3.2.xsd ">
<Scene>
<Viewpoint description='Rear View' orientation='0 1 0 3.14159' position='0 0 -10'/>
<Shape alfirt:anchor_translate="0 1 2" alfirt:anchor_rotate="0.4 0.2 0.3">
<IndexedFaceSet coordIndex="0 1 2">
<Coordinate point="0 0 0 1 0 0 0.5 1 0"/>
</IndexedFaceSet>
</Shape>
</Scene>
</X3D>
"""
# Creating file
self.fileName = "test_file_name"
with open(self.fileName, 'w') as fileStream:
fileStream.write(x3dString)
fileStream.close()
def tearDown(self):
# Removing file after test
os.remove(self.fileName)
def test_reading_none_results_in_exception(self):
x3dReader = TagReaderX3D()
with self.assertRaises(ValueError):
x3dReader.readScene(None)
with self.assertRaises(ValueError):
x3dReader.readScene("some no existing file")
def test_reading_file_with_no_anchor_results_in_exception(self):
'''
The anchor is required for the polar transformations around the object.
'''
# Setting up the X3D string with ALFIRT namespace tags
x3dString = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE X3D PUBLIC "ISO//Web3D//DTD X3D 3.2//EN" "http://www.web3d.org/specifications/x3d-3.2.dtd">
<X3D profile="Interchange"
version="3.2"
xmlns:xsd="http://www.w3.org/2001/XMLSchema-instance"
xmlns:alfirt="ALFIRT"
xsd:noNamespaceSchemaLocation=" http://www.web3d.org/specifications/x3d-3.2.xsd ">
<Scene>
<Viewpoint description='Rear View' orientation='0 1 0 3.14159' position='0 0 -10'/>
<Shape>
<IndexedFaceSet coordIndex="0 1 2">
<Coordinate point="0 0 0 1 0 0 0.5 1 0"/>
</IndexedFaceSet>
</Shape>
</Scene>
</X3D>
"""
# Write this file into the data
fileName = "test_file_without_anchor"
with open(fileName, 'w') as fileStream:
fileStream.write(x3dString)
fileStream.close()
# Get reader
x3dReader = TagReaderX3D()
try:
x3dReader.readScene(fileName)
except RuntimeError:
return
finally:
os.remove(fileName)
self.fail("The exception should have been thrown")
def test_reading_file_with_alfirt_tags(self):
'''
Checks if the elements passed in X3D string are correct.
'''
x3dReader = TagReaderX3D()
results = x3dReader.readScene(self.fileName)
# assert the values
translateCamera = results.camera.translate
rotateCamera = results.camera.rotate
translateAnchor = results.anchor.translate
rotateAnchor = results.anchor.rotate
self.assertEqual(translateAnchor, [0.0, 1.0, 2.0], 'Translate of the anchor should be 0 1 2')
self.assertEqual(rotateAnchor , [0.4, 0.2, 0.3 ], "Rotate of the anchor should be 0.4, 0.2 0.3")
self.assertEqual(translateCamera, [0.0, -10, 0], "The position of the camera should be 0 0 -10")
self.assertEqual(rotateCamera, [1.5707963705062866, 1.7340079025429667e-13, 3.1415903568267822], "The rotation of the camera should be 0 1 0 3.14")
#===============================================================================
# Test runner
#===============================================================================
if (__name__ == 'main'):
unittest.main(verbosity=2)
|
mit
| 5,748,462,383,747,537,000
| 32.07438
| 155
| 0.615942
| false
| 3.417592
| true
| false
| false
|
Russell-IO/ansible
|
lib/ansible/plugins/action/gather_facts.py
|
1
|
1434
|
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from collections import MutableMapping
from ansible import constants as C
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=None):
''' handler for package operations '''
self._supports_check_mode = True
self._supports_async = True
result = super(ActionModule, self).run(tmp, task_vars)
result['ansible_facts'] = {}
for fact_module in C.config.get_config_value('FACTS_MODULES', variables=task_vars):
mod_args = task_vars.get('ansible_facts_modules', {}).get(fact_module, {})
if isinstance(mod_args, MutableMapping):
mod_args.update(self._task.args.copy())
else:
mod_args = self._task.args.copy()
if fact_module != 'setup':
del mod_args['gather_subset']
self._display.vvvv("Running %s" % fact_module)
result.update(self._execute_module(module_name=fact_module, module_args=mod_args, task_vars=task_vars, wrap_async=self._task.async_val))
# tell executor facts were gathered
result['ansible_facts']['_ansible_facts_gathered'] = True
return result
|
gpl-3.0
| -5,338,959,078,966,732,000
| 33.97561
| 148
| 0.640167
| false
| 3.875676
| false
| false
| false
|
SCPR/firetracker
|
calfire_tracker/utilities.py
|
1
|
3930
|
from django.conf import settings
from django.db import models
from django.utils.encoding import smart_str
from django.utils import timezone
from django.template.defaultfilters import slugify
from geopy import geocoders
import pytz
import time
import datetime
import requests
import logging
logger = logging.getLogger("firetracker")
def search_assethost_for_image(kpcc_image_token, **kwargs):
''' model save function to query kpcc image api given an asset_host_id '''
if kwargs['image_id'] is not None:
url_prefix = 'https://a.scpr.org/api/assets/'
url_suffix = '.json?auth_token='
search_url = '%s%s%s%s' % (url_prefix, kwargs['image_id'], url_suffix, kpcc_image_token)
kpcc_query_api = requests.get(search_url, verify=False, headers={"From": "ckeller@scpr.org","User-Agent": "Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.53 Safari/525.19"})
kpcc_image_asset = kpcc_query_api.json()
try:
kpcc_image_data = {'asset_url_link': kpcc_image_asset['urls']['full'], 'asset_photo_credit': kpcc_image_asset['owner'], 'asset_host_image_id': kwargs['image_id']}
except:
kpcc_image_data = {'asset_url_link': None, 'asset_photo_credit': None, 'asset_host_image_id': None}
else:
kpcc_image_data = {'asset_url_link': None, 'asset_photo_credit': None, 'asset_host_image_id': None}
return kpcc_image_data
def fill_air_quality_data(location_latitude, location_longitude):
try:
air_quality_url = 'http://www.airnowapi.org/aq/observation/latLong/current/?format=application/json&latitude=%s&longitude=%s&distance=30&API_KEY=AABE5F75-6C5A-47C2-AB74-2D138C9055B2' % (location_latitude, location_longitude)
air_quality_query = requests.get(air_quality_url, headers= {"User-Agent": "Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.53 Safari/525.19"})
air_quality_json = air_quality_query.json()
if len(air_quality_json) == 0:
air_quality_rating = None
air_quality_parameter = None
elif len(air_quality_json) >= 1:
for data in air_quality_json:
if data["ParameterName"] == "PM2.5":
air_quality_rating = data["AQI"]
air_quality_parameter = "Fine particles (PM2.5)"
elif data["ParameterName"] == "O3":
air_quality_rating = data["AQI"]
air_quality_parameter = "Ozone (O3)"
else:
air_quality_rating = None
air_quality_parameter = None
else:
air_quality_rating = None
air_quality_parameter = None
except:
air_quality_rating = None
air_quality_parameter = None
print "exception for %s, %s\n" % (location_latitude, location_longitude)
return {"air_quality_rating": air_quality_rating, "air_quality_parameter": air_quality_parameter}
def fill_geocode_data(computed_location):
if computed_location is not None:
try:
g = geocoders.GoogleV3()
address = smart_str(computed_location)
computed_location, (location_latitude, location_longitude) = g.geocode(address)
geolocation_data = {
'computed_location': str(computed_location),
'location_latitude': location_latitude,
'location_longitude': location_longitude,
'location_geocode_error': False,
}
except (UnboundLocalError, ValueError,geocoders.google.GQueryError):
geolocation_data = {
'computed_location': str(computed_location),
'location_latitude': None,
'location_longitude': None,
'location_geocode_error': True,
}
return geolocation_data
|
gpl-2.0
| -4,657,212,522,448,050,000
| 48.125
| 236
| 0.622901
| false
| 3.575978
| false
| false
| false
|
tinyms/ArchiveX
|
tinyms/controller/setting.py
|
1
|
3046
|
__author__ = 'tinyms'
#coding=UTF8
from sqlalchemy import func
from tinyms.core.common import Utils
from tinyms.core.web import IAuthRequest
from tinyms.core.entity import Account
from tinyms.core.orm import SessionFactory
from tinyms.core.annotation import ObjectPool, route, setting, api
from tinyms.core.setting import UserSettingHelper, AppSettingHelper
@route("/workbench/setting")
class SettingPage(IAuthRequest):
def get(self, *args, **kwargs):
return self.render("workbench/setting.html", items=ObjectPool.setting)
@api("tinyms.core.setting")
class SettingApi():
def load(self):
usr = self.request.current_user
level_u = UserSettingHelper(usr)
level_u_ = level_u.load()
level_s = AppSettingHelper.load()
level_all = dict(level_u_, **level_s)
return level_all
def save(self):
kv = self.request.wrap_params_to_dict()
level_user = dict()
level_system = dict()
for k in kv:
if k.startswith("u_"):
level_user[k] = kv[k]
elif k.startswith("s_"):
level_system[k] = kv[k]
AppSettingHelper.set(level_system)
u = UserSettingHelper("%s" % self.request.current_user)
u.set(level_user)
#允许用户在设置保存之后再做其它数据变更
items = ObjectPool.setting
for k in items.keys():
obj = items[k].cls()
if hasattr(obj, "save"):
msg = obj.save(kv, self.request)
if msg:
return msg
AppSettingHelper.reload()
return "success"
@setting("tinyms_core_setting_sys", "workbench/sys_setting_page.html", "基本", "tinyms.entity.setting.system")
class SystemSetting():
def save(self, kv, http_req):
return ""
def form_submit_javascript(self, http_req):
pass
def form_fill_javascript(self, http_req):
pass
@setting("tinyms_core_setting_user", "workbench/user_setting_page.html", "个人", "tinyms.entity.setting.user")
class SystemSetting():
def save(self, kv, http_req):
_usr_old_pwd = kv.get("_usr_old_pwd")
_usr_new_pwd = kv.get("_usr_new_pwd")
_usr_new_repwd = kv.get("_usr_new_repwd")
if _usr_old_pwd and _usr_new_pwd:
if _usr_new_pwd == _usr_new_repwd:
usr_id = http_req.current_user
sf = SessionFactory.new()
num = sf.query(func.count(Account.id)).filter(Account.id == usr_id) \
.filter(Account.login_pwd == Utils.md5(_usr_old_pwd)).scalar()
if num > 0:
a = sf.query(Account).get(usr_id)
a.login_pwd = Utils.md5(_usr_new_pwd)
sf.commit()
return ""
else:
return "PasswordError"
else:
return "PasswordNotSame"
def form_submit_javascript(self, req):
pass
def form_fill_javascript(self, req):
pass
|
bsd-3-clause
| -2,948,846,204,067,711,000
| 30.925532
| 108
| 0.576
| false
| 3.516999
| false
| false
| false
|
jcnix/shade
|
social/auth.py
|
1
|
2295
|
from django.contrib import auth
from django.contrib.auth.models import User
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render, render_to_response
from django.template import RequestContext
import forms as myforms
def login(request):
if not request.user.is_authenticated():
form = myforms.LoginForm()
if request.method == 'POST':
form = myforms.LoginForm(request.POST)
if form.is_valid():
e = form.cleaned_data['email']
p = form.cleaned_data['password']
user = auth.authenticate(username=e, password=p)
if user is not None:
auth.login(request, user)
return HttpResponseRedirect('/dashboard/')
else:
form._errors['email'] = [u'Unable to authenticate']
return render(request, 'registration/login.html', {'form': form})
return render(request, 'registration/login.html', {'form': form})
else:
return HttpResponseRedirect('/')
def logout(request):
auth.logout(request)
return HttpResponseRedirect('/login/')
def register(request):
if not request.user.is_authenticated():
if request.method == 'POST':
form = myforms.RegisterForm(request.POST)
if form.is_valid():
e = form.cleaned_data['email']
p = form.cleaned_data['password']
fn = form.cleaned_data['first_name']
ln = form.cleaned_data['last_name']
user = User.objects.create_user(
username=e,
email=e,
password=p
)
user.first_name = fn
user.last_name= ln
user.save()
return HttpResponseRedirect('/')
else:
return render_to_response('register.html', {'form': form},
context_instance=RequestContext(request))
else:
form = myforms.RegisterForm()
return render_to_response('register.html', {'form': form},
context_instance=RequestContext(request))
else:
return HttpResponseRedirect('/')
|
gpl-3.0
| -197,508,900,974,582,820
| 37.898305
| 85
| 0.554684
| false
| 4.862288
| false
| false
| false
|
pierrelux/mathnotes
|
mathnotes/views/auth.py
|
1
|
1936
|
from flask_oauthlib.client import OAuth
from mathnotes.models import db, ZoteroAuthorization
from flask import Flask, redirect, url_for, render_template, jsonify, Response, current_app, Blueprint, request
from flask.ext.login import login_required, current_user
oauth = OAuth()
auth = Blueprint('auth', __name__, url_prefix='/auth')
zotero = oauth.remote_app(
'zotero',
base_url='https://api.zotero.org',
request_token_url='https://www.zotero.org/oauth/request',
access_token_url='https://www.zotero.org/oauth/access',
authorize_url='https://www.zotero.org/oauth/authorize',
app_key='ZOTERO'
)
@zotero.tokengetter
def get_zotero_token():
auth=current_user.authorizations.first()
if auth is not None:
return auth.oauth_token, auth.oauth_secret
return None
@auth.route('/oauth/zotero')
@login_required
def zotero_auth():
callback_url = url_for('auth.zotero_authorized', next=request.args.get('next'))
return zotero.authorize(callback=callback_url or request.referrer or None)
@auth.route('/oauth/zotero/authorized')
@login_required
@zotero.authorized_handler
def zotero_authorized(resp):
if resp is not None:
auth = ZoteroAuthorization(oauth_token=resp['oauth_token'],
oauth_secret=resp['oauth_token_secret'],
userID=resp['userID'],
username=resp['username'],
user_id=current_user.id)
db.session.add(auth)
db.session.commit()
else:
flash("Remote authentication to Zotero failed")
return redirect(request.args.get("next") or url_for("frontend.index"))
@auth.route('/oauth/zotero/disconnect')
@login_required
def zotero_disconnect():
auth=current_user.authorizations.first()
db.session.delete(auth)
db.session.commit()
return redirect(request.args.get("next") or url_for("frontend.index"))
|
bsd-3-clause
| 237,288,060,501,950,180
| 32.964912
| 111
| 0.668388
| false
| 3.482014
| false
| false
| false
|
malaterre/dicom-private-dicts
|
re/pms/dump1.py
|
1
|
3032
|
#!/usr/bin/env python
""" dump 1 """
import sys, json
from struct import *
array=[]
def doit(f):
chunk = f.read(0x2)
l0 = unpack('>H', chunk)
assert l0[0] == 50
chunk = f.read(l0[0])
s = unpack('>%ds' % l0[0], chunk)
chunk = f.read(0x1)
l2 = unpack('>B', chunk)
#assert l2[0] == 0
chunk = f.read(0x2)
l1 = unpack('>H', chunk)
#print l1[0] # wotsit ?
#print l0[0],s[0].decode('utf-16'),l1[0],l2[0]
#print l0[0],s[0].decode('utf-16'),l1[0]+l2[0]
#print s[0].decode('utf-16'),l1[0]
el = {}
el['name'] = s[0].decode('utf-16')
el['index'] = l1[0]+l2[0]
array.append( el )
def extract_name(i,f):
chunk = f.read(0x1)
o = unpack('>B', chunk)
assert o[0] == 1
chunk = f.read(0x1)
l0 = unpack('>B', chunk)
chunk = f.read(l0[0])
s = unpack('>%ds' % l0[0], chunk)
#print s[0]
array[i]['value']=s[0]
array[i]['len']=l0[0]
def isnull(instr):
for c in instr:
assert ord(c) == 0
def extract_dad_file(i,f):
print f.tell()
corr = 1 # old (orig file)
corr = 0 # already aligned ???
assert (f.tell() - corr) % 8 == 0 # 8bytes alignement
# read length:
chunk = f.read(0x4)
z = unpack('<I', chunk)
fl = z[0]
chunk = f.read(fl)
with open("output_%03d.dad" % i, "wb") as binfile:
binfile.write( chunk )
# trailing stuff handling:
pad = (f.tell() - corr) % 8
if pad != 0:
chunk = f.read(8 - pad)
isnull(chunk) # no digital trash, must be an in-memory representation
# the intersting stuff lie in:
# $ dd if=PmsDView.DMP of=dummy2.exe skip=104921721 count=1802240 bs=1
# as a side note we also have:
# $ dd if=PmsDView.DMP of=dummy3.exe skip=106723961 count=1802240 bs=1
# $ md5sum dummy2.exe dummy3.exe
# 6a58cd8dc039b2cfbeb4529b4fd13106 dummy2.exe
# 6a58cd8dc039b2cfbeb4529b4fd13106 dummy3.exe
if __name__ == "__main__":
filename = sys.argv[1]
with open(filename,'rb') as f:
# MZ starts at 0x640FA79
#f.seek( 104932524 ) # 0x64124ac # orig file
f.seek( 0x12F86F3 ) # new
# file type 1:
#print "start:", f.tell()
chunk = f.read(0x2)
d = unpack('>H', chunk)
assert d[0] == 120 # number of elements (x2)?
chunk = f.read(0x2)
d = unpack('>H', chunk)
print d # wotsit ?
assert d[0] == 0x0f00
for i in range(0,60):
doit(f)
chunk = f.read(0x1)
z = unpack('>B', chunk)
assert z[0] == 0
#print (f.tell() - 1) % 4
for i in range(0,60):
extract_name(i,f)
#print "end:", f.tell()
# file type dad/dotd:
chunk = f.read(5)
for i in range(0,153):
# i > 153 is junk...
extract_dad_file(i,f)
print format(f.tell(), '08x')
chunk = f.read(2000)
# Some .NET stuff (BSJB)
# The initials correspond to Brian Harry, Susan Radke-Sproull, Jason
# Zander, and Bill Evans who were part of the team in 1998 that worked on
# the CLR.
with open("general_metadata_header.bin" , "wb") as binfile:
binfile.write( chunk )
#print array
#print json.dumps(array, sort_keys=True, indent=4)
|
bsd-3-clause
| -5,247,748,825,108,255,000
| 26.315315
| 77
| 0.581464
| false
| 2.539363
| false
| false
| false
|
parksandwildlife/wastd
|
taxonomy/migrations/0003_auto_20181022_1156.py
|
1
|
3205
|
# Generated by Django 2.0.8 on 2018-10-22 03:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('taxonomy', '0002_auto_20180926_1147'),
]
operations = [
migrations.AlterField(
model_name='hbvfamily',
name='class_name',
field=models.CharField(blank=True, help_text='', max_length=1000, null=True, verbose_name='Class'),
),
migrations.AlterField(
model_name='hbvfamily',
name='division_name',
field=models.CharField(blank=True, help_text='', max_length=1000, null=True, verbose_name='Division'),
),
migrations.AlterField(
model_name='hbvfamily',
name='family_name',
field=models.CharField(blank=True, help_text='', max_length=1000, null=True, verbose_name='Family Name'),
),
migrations.AlterField(
model_name='hbvfamily',
name='kingdom_name',
field=models.CharField(blank=True, help_text='', max_length=1000, null=True, verbose_name='Kingdom'),
),
migrations.AlterField(
model_name='hbvfamily',
name='order_name',
field=models.CharField(blank=True, help_text='', max_length=1000, null=True, verbose_name='Order Name'),
),
migrations.AlterField(
model_name='hbvfamily',
name='supra_code',
field=models.CharField(blank=True, help_text='', max_length=1000, null=True, verbose_name='HBV Suprafamily Group Code'),
),
migrations.AlterField(
model_name='hbvgroup',
name='class_id',
field=models.CharField(blank=True, help_text='', max_length=1000, null=True, verbose_name='HBV Suprafamily Group Code'),
),
migrations.AlterField(
model_name='hbvparent',
name='class_id',
field=models.CharField(blank=True, help_text='', max_length=100, null=True, verbose_name='WACensus ClassID'),
),
migrations.AlterField(
model_name='hbvspecies',
name='consv_code',
field=models.CharField(blank=True, help_text='', max_length=100, null=True, verbose_name='Conservation Code'),
),
migrations.AlterField(
model_name='hbvspecies',
name='naturalised',
field=models.CharField(blank=True, help_text='', max_length=100, null=True, verbose_name='Naturalised'),
),
migrations.AlterField(
model_name='hbvspecies',
name='ranking',
field=models.CharField(blank=True, help_text='', max_length=100, null=True, verbose_name='Ranking'),
),
migrations.AlterField(
model_name='hbvvernacular',
name='name',
field=models.CharField(blank=True, help_text='', max_length=1000, null=True, verbose_name='Name'),
),
migrations.AlterField(
model_name='hbvvernacular',
name='vernacular',
field=models.CharField(blank=True, help_text='', max_length=1000, null=True, verbose_name='Vernacular Name'),
),
]
|
mit
| 3,855,156,913,284,721,700
| 40.089744
| 132
| 0.582527
| false
| 3.991283
| false
| false
| false
|
jamespcole/home-assistant
|
homeassistant/components/openuv/binary_sensor.py
|
1
|
3751
|
"""Support for OpenUV binary sensors."""
import logging
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util.dt import as_local, parse_datetime, utcnow
from . import (
BINARY_SENSORS, DATA_OPENUV_CLIENT, DATA_PROTECTION_WINDOW, DOMAIN,
TOPIC_UPDATE, TYPE_PROTECTION_WINDOW, OpenUvEntity)
_LOGGER = logging.getLogger(__name__)
ATTR_PROTECTION_WINDOW_ENDING_TIME = 'end_time'
ATTR_PROTECTION_WINDOW_ENDING_UV = 'end_uv'
ATTR_PROTECTION_WINDOW_STARTING_TIME = 'start_time'
ATTR_PROTECTION_WINDOW_STARTING_UV = 'start_uv'
DEPENDENCIES = ['openuv']
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up an OpenUV sensor based on existing config."""
pass
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up an OpenUV sensor based on a config entry."""
openuv = hass.data[DOMAIN][DATA_OPENUV_CLIENT][entry.entry_id]
binary_sensors = []
for sensor_type in openuv.binary_sensor_conditions:
name, icon = BINARY_SENSORS[sensor_type]
binary_sensors.append(
OpenUvBinarySensor(
openuv, sensor_type, name, icon, entry.entry_id))
async_add_entities(binary_sensors, True)
class OpenUvBinarySensor(OpenUvEntity, BinarySensorDevice):
"""Define a binary sensor for OpenUV."""
def __init__(self, openuv, sensor_type, name, icon, entry_id):
"""Initialize the sensor."""
super().__init__(openuv)
self._async_unsub_dispatcher_connect = None
self._entry_id = entry_id
self._icon = icon
self._latitude = openuv.client.latitude
self._longitude = openuv.client.longitude
self._name = name
self._sensor_type = sensor_type
self._state = None
@property
def icon(self):
"""Return the icon."""
return self._icon
@property
def is_on(self):
"""Return the status of the sensor."""
return self._state
@property
def should_poll(self):
"""Disable polling."""
return False
@property
def unique_id(self) -> str:
"""Return a unique, HASS-friendly identifier for this entity."""
return '{0}_{1}_{2}'.format(
self._latitude, self._longitude, self._sensor_type)
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def update():
"""Update the state."""
self.async_schedule_update_ha_state(True)
self._async_unsub_dispatcher_connect = async_dispatcher_connect(
self.hass, TOPIC_UPDATE, update)
async def async_will_remove_from_hass(self):
"""Disconnect dispatcher listener when removed."""
if self._async_unsub_dispatcher_connect:
self._async_unsub_dispatcher_connect()
async def async_update(self):
"""Update the state."""
data = self.openuv.data[DATA_PROTECTION_WINDOW]
if not data:
return
if self._sensor_type == TYPE_PROTECTION_WINDOW:
self._state = parse_datetime(
data['from_time']) <= utcnow() <= parse_datetime(
data['to_time'])
self._attrs.update({
ATTR_PROTECTION_WINDOW_ENDING_TIME:
as_local(parse_datetime(data['to_time'])),
ATTR_PROTECTION_WINDOW_ENDING_UV: data['to_uv'],
ATTR_PROTECTION_WINDOW_STARTING_UV: data['from_uv'],
ATTR_PROTECTION_WINDOW_STARTING_TIME:
as_local(parse_datetime(data['from_time'])),
})
|
apache-2.0
| 5,144,571,123,839,647,000
| 32.491071
| 72
| 0.625966
| false
| 3.952582
| false
| false
| false
|
gokmen/Rasta
|
rasta_lib/model.py
|
1
|
1631
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
''' Rasta RST Editor
2010 - Gökmen Göksel <gokmeng:gmail.com> '''
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as Published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
from PyQt4.QtCore import Qt
from PyQt4.QtCore import QVariant
from PyQt4.QtCore import QAbstractTableModel
# i18n Support
import gettext
_ = gettext.translation('rasta', fallback=True).ugettext
class LogTableModel(QAbstractTableModel):
''' Log table model for showing the logs in a proper way '''
def __init__(self, logs, parent=None, *args):
QAbstractTableModel.__init__(self, parent, *args)
self.arraydata = logs
self.headerdata = [_('Line'), _('Message')]
def rowCount(self, parent):
''' Return number of logs '''
return len(self.arraydata)
def columnCount(self, parent):
''' It always returns 2 for now: Line and Message '''
return len(self.headerdata)
def data(self, index, role):
''' Return data for given index and role '''
if not index.isValid():
return QVariant()
elif role != Qt.DisplayRole:
return QVariant()
return QVariant(self.arraydata[index.row()][index.column()])
def headerData(self, col, orientation, role):
''' Return Header data for given column '''
if orientation == Qt.Horizontal and role == Qt.DisplayRole:
return QVariant(self.headerdata[col])
return QVariant()
|
gpl-2.0
| 9,176,064,816,522,145,000
| 32.9375
| 79
| 0.654389
| false
| 4.022222
| false
| false
| false
|
mdavoodi/konkourse-python
|
messages/views.py
|
1
|
2496
|
# Create your views here.
from django.http import HttpResponse
from django.template import loader
from django.template.context import Context
from django.template import RequestContext
from django.shortcuts import redirect
from django.shortcuts import render
from conversation.models import ConvoWall, ConversationPost
def messages(request):
if request.user.is_authenticated():
first_name = request.user.first_name
last_name = request.user.last_name
username = request.user.username
messageWall = request.user.get_profile().messages
messages = ConversationPost.objects.filter(wall=request.user.get_profile().messages)
variables_for_template = {
'first_name': first_name,
'last_name': last_name,
'username': username,
'messages': messages,
'messageWall': messageWall,
}
return render(request, 'website/messages.html', variables_for_template,
context_instance=RequestContext(request))
def messages_compose(request):
if request.user.is_authenticated():
first_name = request.user.first_name
last_name = request.user.last_name
username = request.user.username
messageWall = request.user.get_profile().messages
messages = ConversationPost.objects.filter(wall=request.user.get_profile().messages)
variables_for_template = {
'first_name': first_name,
'last_name': last_name,
'username': username,
'messages': messages,
'messageWall': messageWall,
}
return render(request, 'website/messages_compose.html', variables_for_template,
context_instance=RequestContext(request))
def messages_view(request):
if request.user.is_authenticated():
first_name = request.user.first_name
last_name = request.user.last_name
username = request.user.username
messageWall = request.user.get_profile().messages
messages = ConversationPost.objects.filter(wall=request.user.get_profile().messages)
variables_for_template = {
'first_name': first_name,
'last_name': last_name,
'username': username,
'messages': messages,
'messageWall': messageWall,
}
return render(request, 'website/messages_view.html', variables_for_template,
context_instance=RequestContext(request))
|
mit
| -7,466,661,102,449,948,000
| 37.4
| 92
| 0.648638
| false
| 4.441281
| false
| false
| false
|
jelly/calibre
|
src/calibre/gui2/tweak_book/ui.py
|
1
|
39312
|
#!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
import os
from functools import partial
from itertools import product
from future_builtins import map
from PyQt5.Qt import (
QDockWidget, Qt, QLabel, QIcon, QAction, QApplication, QWidget, QEvent,
QVBoxLayout, QStackedWidget, QTabWidget, QImage, QPixmap, pyqtSignal,
QMenu, QHBoxLayout, QTimer, QUrl, QSize)
from calibre import prints
from calibre.constants import __appname__, get_version, isosx, DEBUG
from calibre.gui2 import elided_text, open_url
from calibre.gui2.dbus_export.widgets import factory
from calibre.gui2.keyboard import Manager as KeyboardManager
from calibre.gui2.main_window import MainWindow
from calibre.gui2.throbber import ThrobbingButton
from calibre.gui2.tweak_book import (
current_container, tprefs, actions, capitalize, toolbar_actions, editors, update_mark_text_action)
from calibre.gui2.tweak_book.file_list import FileListWidget
from calibre.gui2.tweak_book.job import BlockingJob
from calibre.gui2.tweak_book.boss import Boss
from calibre.gui2.tweak_book.undo import CheckpointView
from calibre.gui2.tweak_book.preview import Preview
from calibre.gui2.tweak_book.plugin import create_plugin_actions
from calibre.gui2.tweak_book.search import SearchPanel
from calibre.gui2.tweak_book.check import Check
from calibre.gui2.tweak_book.check_links import CheckExternalLinks
from calibre.gui2.tweak_book.spell import SpellCheck
from calibre.gui2.tweak_book.search import SavedSearches
from calibre.gui2.tweak_book.text_search import TextSearch
from calibre.gui2.tweak_book.toc import TOCViewer
from calibre.gui2.tweak_book.char_select import CharSelect
from calibre.gui2.tweak_book.live_css import LiveCSS
from calibre.gui2.tweak_book.reports import Reports
from calibre.gui2.tweak_book.manage_fonts import ManageFonts
from calibre.gui2.tweak_book.function_replace import DebugOutput
from calibre.gui2.tweak_book.editor.widget import register_text_editor_actions
from calibre.gui2.tweak_book.editor.insert_resource import InsertImage
from calibre.utils.icu import character_name, sort_key
from calibre.utils.localization import localize_user_manual_link
def open_donate():
open_url(QUrl('https://calibre-ebook.com/donate'))
class Central(QStackedWidget): # {{{
' The central widget, hosts the editors '
current_editor_changed = pyqtSignal()
close_requested = pyqtSignal(object)
def __init__(self, parent=None):
QStackedWidget.__init__(self, parent)
self.welcome = w = QLabel('<p>'+_(
'Double click a file in the left panel to start editing'
' it.'))
self.addWidget(w)
w.setWordWrap(True)
w.setAlignment(Qt.AlignTop | Qt.AlignHCenter)
self.container = c = QWidget(self)
self.addWidget(c)
l = c.l = QVBoxLayout(c)
c.setLayout(l)
l.setContentsMargins(0, 0, 0, 0)
self.editor_tabs = t = QTabWidget(c)
l.addWidget(t)
t.setDocumentMode(True)
t.setTabsClosable(True)
t.setMovable(True)
pal = self.palette()
if pal.color(pal.WindowText).lightness() > 128:
i = QImage(I('modified.png'))
i.invertPixels()
self.modified_icon = QIcon(QPixmap.fromImage(i))
else:
self.modified_icon = QIcon(I('modified.png'))
self.editor_tabs.currentChanged.connect(self.current_editor_changed)
self.editor_tabs.tabCloseRequested.connect(self._close_requested)
self.search_panel = SearchPanel(self)
l.addWidget(self.search_panel)
self.restore_state()
self.editor_tabs.tabBar().installEventFilter(self)
def _close_requested(self, index):
editor = self.editor_tabs.widget(index)
self.close_requested.emit(editor)
def add_editor(self, name, editor):
fname = name.rpartition('/')[2]
index = self.editor_tabs.addTab(editor, fname)
self.editor_tabs.setTabToolTip(index, _('Full path:') + ' ' + name)
editor.modification_state_changed.connect(self.editor_modified)
@property
def tab_order(self):
ans = []
rmap = {v:k for k, v in editors.iteritems()}
for i in xrange(self.editor_tabs.count()):
name = rmap.get(self.editor_tabs.widget(i))
if name is not None:
ans.append(name)
return ans
def rename_editor(self, editor, name):
for i in xrange(self.editor_tabs.count()):
if self.editor_tabs.widget(i) is editor:
fname = name.rpartition('/')[2]
self.editor_tabs.setTabText(i, fname)
self.editor_tabs.setTabToolTip(i, _('Full path:') + ' ' + name)
def show_editor(self, editor):
self.setCurrentIndex(1)
self.editor_tabs.setCurrentWidget(editor)
def close_editor(self, editor):
for i in xrange(self.editor_tabs.count()):
if self.editor_tabs.widget(i) is editor:
self.editor_tabs.removeTab(i)
if self.editor_tabs.count() == 0:
self.setCurrentIndex(0)
return True
return False
def editor_modified(self, *args):
tb = self.editor_tabs.tabBar()
for i in xrange(self.editor_tabs.count()):
editor = self.editor_tabs.widget(i)
modified = getattr(editor, 'is_modified', False)
tb.setTabIcon(i, self.modified_icon if modified else QIcon())
def close_current_editor(self):
ed = self.current_editor
if ed is not None:
self.close_requested.emit(ed)
def close_all_but_current_editor(self):
self.close_all_but(self.current_editor)
def close_all_but(self, ed):
close = []
if ed is not None:
for i in xrange(self.editor_tabs.count()):
q = self.editor_tabs.widget(i)
if q is not None and q is not ed:
close.append(q)
for q in close:
self.close_requested.emit(q)
@property
def current_editor(self):
return self.editor_tabs.currentWidget()
def save_state(self):
tprefs.set('search-panel-visible', self.search_panel.isVisible())
self.search_panel.save_state()
for ed in editors.itervalues():
ed.save_state()
if self.current_editor is not None:
self.current_editor.save_state() # Ensure the current editor saves it state last
def restore_state(self):
self.search_panel.setVisible(tprefs.get('search-panel-visible', False))
self.search_panel.restore_state()
def show_find(self):
self.search_panel.show_panel()
def pre_fill_search(self, text):
self.search_panel.pre_fill(text)
def eventFilter(self, obj, event):
base = super(Central, self)
if obj is not self.editor_tabs.tabBar() or event.type() != QEvent.MouseButtonPress or event.button() not in (Qt.RightButton, Qt.MidButton):
return base.eventFilter(obj, event)
index = self.editor_tabs.tabBar().tabAt(event.pos())
if index < 0:
return base.eventFilter(obj, event)
if event.button() == Qt.MidButton:
self._close_requested(index)
ed = self.editor_tabs.widget(index)
if ed is not None:
menu = QMenu(self)
menu.addAction(actions['close-current-tab'].icon(), _('Close tab'), partial(self.close_requested.emit, ed))
menu.addSeparator()
menu.addAction(actions['close-all-but-current-tab'].icon(), _('Close other tabs'), partial(self.close_all_but, ed))
menu.exec_(self.editor_tabs.tabBar().mapToGlobal(event.pos()))
return True
# }}}
class CursorPositionWidget(QWidget): # {{{
def __init__(self, parent):
QWidget.__init__(self, parent)
self.l = QHBoxLayout(self)
self.setLayout(self.l)
self.la = QLabel('')
self.l.addWidget(self.la)
self.l.setContentsMargins(0, 0, 0, 0)
f = self.la.font()
f.setBold(False)
self.la.setFont(f)
def update_position(self, line=None, col=None, character=None):
if line is None:
self.la.setText('')
else:
try:
name = character_name(character) if character and tprefs['editor_show_char_under_cursor'] else None
except Exception:
name = None
text = _('Line: {0} : {1}').format(line, col)
if not name:
name = {'\t':'TAB'}.get(character, None)
if name and tprefs['editor_show_char_under_cursor']:
text = name + ' : ' + text
self.la.setText(text)
# }}}
class Main(MainWindow):
APP_NAME = _('Edit book')
STATE_VERSION = 0
def __init__(self, opts, notify=None):
MainWindow.__init__(self, opts, disable_automatic_gc=True)
self.setWindowTitle(self.APP_NAME)
self.boss = Boss(self, notify=notify)
self.setWindowIcon(QIcon(I('tweak.png')))
self.opts = opts
self.path_to_ebook = None
self.container = None
self.current_metadata = None
self.blocking_job = BlockingJob(self)
self.keyboard = KeyboardManager(self, config_name='shortcuts/tweak_book')
self.central = Central(self)
self.setCentralWidget(self.central)
self.check_book = Check(self)
self.spell_check = SpellCheck(parent=self)
self.toc_view = TOCViewer(self)
self.text_search = TextSearch(self)
self.saved_searches = SavedSearches(self)
self.image_browser = InsertImage(self, for_browsing=True)
self.reports = Reports(self)
self.check_external_links = CheckExternalLinks(self)
self.insert_char = CharSelect(self)
self.manage_fonts = ManageFonts(self)
self.sr_debug_output = DebugOutput(self)
self.create_actions()
self.create_toolbars()
self.create_docks()
self.create_menubar()
self.status_bar = self.statusBar()
self.status_bar.addPermanentWidget(self.boss.save_manager.status_widget)
self.cursor_position_widget = CursorPositionWidget(self)
self.status_bar.addPermanentWidget(self.cursor_position_widget)
self.status_bar_default_msg = la = QLabel(' ' + _('{0} {1} created by {2}').format(__appname__, get_version(), 'Kovid Goyal'))
la.base_template = unicode(la.text())
self.status_bar.addWidget(la)
f = self.status_bar.font()
f.setBold(True)
self.status_bar.setFont(f)
self.boss(self)
g = QApplication.instance().desktop().availableGeometry(self)
self.resize(g.width()-50, g.height()-50)
self.restore_state()
self.apply_settings()
def apply_settings(self):
self.keyboard.finalize()
self.setDockNestingEnabled(tprefs['nestable_dock_widgets'])
for v, h in product(('top', 'bottom'), ('left', 'right')):
p = 'dock_%s_%s' % (v, h)
pref = tprefs[p] or tprefs.defaults[p]
area = getattr(Qt, '%sDockWidgetArea' % capitalize({'vertical':h, 'horizontal':v}[pref]))
self.setCorner(getattr(Qt, '%s%sCorner' % tuple(map(capitalize, (v, h)))), area)
self.preview.apply_settings()
self.live_css.apply_theme()
for bar in (self.global_bar, self.tools_bar, self.plugins_bar):
bar.setIconSize(QSize(tprefs['toolbar_icon_size'], tprefs['toolbar_icon_size']))
def show_status_message(self, msg, timeout=5):
self.status_bar.showMessage(msg, int(timeout*1000))
def elided_text(self, text, width=300):
return elided_text(text, font=self.font(), width=width)
@property
def editor_tabs(self):
return self.central.editor_tabs
def create_actions(self):
group = _('Global actions')
def reg(icon, text, target, sid, keys, description, toolbar_allowed=False):
if not isinstance(icon, QIcon):
icon = QIcon(I(icon))
ac = actions[sid] = QAction(icon, text, self) if icon else QAction(text, self)
ac.setObjectName('action-' + sid)
if toolbar_allowed:
toolbar_actions[sid] = ac
if target is not None:
ac.triggered.connect(target)
if isinstance(keys, type('')):
keys = (keys,)
self.keyboard.register_shortcut(
sid, unicode(ac.text()).replace('&', ''), default_keys=keys, description=description, action=ac, group=group)
self.addAction(ac)
return ac
def treg(icon, text, target, sid, keys, description):
return reg(icon, text, target, sid, keys, description, toolbar_allowed=icon is not None)
self.action_new_file = treg('document-new.png', _('&New file (images/fonts/HTML/etc.)'), self.boss.add_file,
'new-file', (), _('Create a new file in the current book'))
self.action_import_files = treg('document-import.png', _('&Import files into book'), self.boss.add_files, 'new-files', (), _('Import files into book'))
self.action_open_book = treg('document_open.png', _('&Open book'), self.boss.open_book, 'open-book', 'Ctrl+O', _('Open a new book'))
self.action_open_book_folder = treg('mimetypes/dir.png', _('Open &folder (unzipped EPUB) as book'), partial(self.boss.open_book, open_folder=True),
'open-folder-as-book', (), _('Open a folder (unzipped EPUB) as a book'))
# Qt does not generate shortcut overrides for cmd+arrow on os x which
# means these shortcuts interfere with editing
self.action_global_undo = treg('back.png', _('&Revert to before'), self.boss.do_global_undo, 'global-undo', () if isosx else 'Ctrl+Left',
_('Revert book to before the last action (Undo)'))
self.action_global_redo = treg('forward.png', _('&Revert to after'), self.boss.do_global_redo, 'global-redo', () if isosx else 'Ctrl+Right',
_('Revert book state to after the next action (Redo)'))
self.action_save = treg('save.png', _('&Save'), self.boss.save_book, 'save-book', 'Ctrl+S', _('Save book'))
self.action_save.setEnabled(False)
self.action_save_copy = treg('save.png', _('Save a ©'), self.boss.save_copy, 'save-copy', 'Ctrl+Alt+S', _('Save a copy of the book'))
self.action_quit = treg('window-close.png', _('&Quit'), self.boss.quit, 'quit', 'Ctrl+Q', _('Quit'))
self.action_preferences = treg('config.png', _('&Preferences'), self.boss.preferences, 'preferences', 'Ctrl+P', _('Preferences'))
self.action_new_book = treg('plus.png', _('Create &new, empty book'), self.boss.new_book, 'new-book', (), _('Create a new, empty book'))
self.action_import_book = treg('add_book.png', _('&Import an HTML or DOCX file as a new book'),
self.boss.import_book, 'import-book', (), _('Import an HTML or DOCX file as a new book'))
self.action_quick_edit = treg('modified.png', _('&Quick open a file to edit'), self.boss.quick_open, 'quick-open', ('Ctrl+T'), _(
'Quickly open a file from the book to edit it'))
# Editor actions
group = _('Editor actions')
self.action_editor_undo = reg('edit-undo.png', _('&Undo'), self.boss.do_editor_undo, 'editor-undo', 'Ctrl+Z',
_('Undo typing'))
self.action_editor_redo = reg('edit-redo.png', _('R&edo'), self.boss.do_editor_redo, 'editor-redo', 'Ctrl+Y',
_('Redo typing'))
self.action_editor_cut = reg('edit-cut.png', _('Cut &text'), self.boss.do_editor_cut, 'editor-cut', ('Ctrl+X', 'Shift+Delete', ),
_('Cut text'))
self.action_editor_copy = reg('edit-copy.png', _('&Copy to clipboard'), self.boss.do_editor_copy, 'editor-copy', ('Ctrl+C', 'Ctrl+Insert'),
_('Copy to clipboard'))
self.action_editor_paste = reg('edit-paste.png', _('P&aste from clipboard'), self.boss.do_editor_paste, 'editor-paste', ('Ctrl+V', 'Shift+Insert', ),
_('Paste from clipboard'))
self.action_editor_cut.setEnabled(False)
self.action_editor_copy.setEnabled(False)
self.action_editor_undo.setEnabled(False)
self.action_editor_redo.setEnabled(False)
# Tool actions
group = _('Tools')
self.action_toc = treg('toc.png', _('&Edit Table of Contents'), self.boss.edit_toc, 'edit-toc', (), _('Edit Table of Contents'))
self.action_inline_toc = treg('chapters.png', _('&Insert inline Table of Contents'),
self.boss.insert_inline_toc, 'insert-inline-toc', (), _('Insert inline Table of Contents'))
self.action_fix_html_current = reg('html-fix.png', _('&Fix HTML'), partial(self.boss.fix_html, True), 'fix-html-current', (),
_('Fix HTML in the current file'))
self.action_fix_html_all = treg('html-fix.png', _('&Fix HTML - all files'), partial(self.boss.fix_html, False), 'fix-html-all', (),
_('Fix HTML in all files'))
self.action_pretty_current = reg('beautify.png', _('&Beautify current file'), partial(self.boss.pretty_print, True), 'pretty-current', (),
_('Beautify current file'))
self.action_pretty_all = treg('beautify.png', _('&Beautify all files'), partial(self.boss.pretty_print, False), 'pretty-all', (),
_('Beautify all files'))
self.action_insert_char = treg('character-set.png', _('&Insert special character'), self.boss.insert_character, 'insert-character', (),
_('Insert special character'))
self.action_rationalize_folders = treg('mimetypes/dir.png', _('&Arrange into folders'), self.boss.rationalize_folders, 'rationalize-folders', (),
_('Arrange into folders'))
self.action_set_semantics = treg('tags.png', _('Set &semantics'), self.boss.set_semantics, 'set-semantics', (),
_('Set semantics'))
self.action_filter_css = treg('filter.png', _('&Filter style information'), self.boss.filter_css, 'filter-css', (),
_('Filter style information'))
self.action_manage_fonts = treg('font.png', _('&Manage fonts'), self.boss.manage_fonts, 'manage-fonts', (), _('Manage fonts in the book'))
self.action_add_cover = treg('default_cover.png', _('Add &cover'), self.boss.add_cover, 'add-cover', (), _('Add a cover to the book'))
self.action_reports = treg(
'reports.png', _('&Reports'), self.boss.show_reports, 'show-reports', ('Ctrl+Shift+R',), _('Show a report on various aspects of the book'))
self.action_check_external_links = treg('insert-link.png', _('Check &external links'), self.boss.check_external_links, 'check-external-links', (), _(
'Check external links in the book'))
self.action_compress_images = treg('compress-image.png', _('C&ompress images losslessly'), self.boss.compress_images, 'compress-images', (), _(
'Compress images losslessly'))
self.action_transform_styles = treg('wizard.png', _('Transform &styles'), self.boss.transform_styles, 'transform-styles', (), _(
'Transform styles used in the book'))
self.action_get_ext_resources = treg('download-metadata.png', _('Download external &resources'),
self.boss.get_external_resources, 'get-external-resources', (), _(
'Download external resources in the book (images/stylesheets/etc/ that are not included in the book)'))
def ereg(icon, text, target, sid, keys, description):
return reg(icon, text, partial(self.boss.editor_action, target), sid, keys, description)
register_text_editor_actions(ereg, self.palette())
# Polish actions
group = _('Polish book')
self.action_subset_fonts = treg(
'subset-fonts.png', _('&Subset embedded fonts'), partial(
self.boss.polish, 'subset', _('Subset fonts')), 'subset-fonts', (), _('Subset embedded fonts'))
self.action_embed_fonts = treg(
'embed-fonts.png', _('&Embed referenced fonts'), partial(
self.boss.polish, 'embed', _('Embed fonts')), 'embed-fonts', (), _('Embed referenced fonts'))
self.action_smarten_punctuation = treg(
'smarten-punctuation.png', _('&Smarten punctuation (works best for English)'), partial(
self.boss.polish, 'smarten_punctuation', _('Smarten punctuation')),
'smarten-punctuation', (), _('Smarten punctuation'))
self.action_remove_unused_css = treg(
'edit-clear.png', _('Remove &unused CSS rules'), partial(
self.boss.polish, 'remove_unused_css', _('Remove unused CSS rules')), 'remove-unused-css', (), _('Remove unused CSS rules'))
# Preview actions
group = _('Preview')
self.action_auto_reload_preview = reg('auto-reload.png', _('Auto reload preview'), None, 'auto-reload-preview', (), _('Auto reload preview'))
self.action_auto_sync_preview = reg('sync-right.png', _('Sync preview position to editor position'), None, 'sync-preview-to-editor', (), _(
'Sync preview position to editor position'))
self.action_reload_preview = reg('view-refresh.png', _('Refresh preview'), None, 'reload-preview', ('F5',), _('Refresh preview'))
self.action_split_in_preview = reg('document-split.png', _('Split this file'), None, 'split-in-preview', (), _(
'Split file in the preview panel'))
self.action_find_next_preview = reg('arrow-down.png', _('Find next'), None, 'find-next-preview', (), _('Find next in preview'))
self.action_find_prev_preview = reg('arrow-up.png', _('Find previous'), None, 'find-prev-preview', (), _('Find previous in preview'))
# Search actions
group = _('Search')
self.action_find = treg('search.png', _('&Find/replace'), self.boss.show_find, 'find-replace', ('Ctrl+F',), _('Show the Find/replace panel'))
def sreg(name, text, action, overrides={}, keys=(), description=None, icon=None):
return reg(icon, text, partial(self.boss.search_action_triggered, action, overrides), name, keys, description or text.replace('&', ''))
self.action_find_next = sreg('find-next', _('Find &next'),
'find', {'direction':'down'}, ('F3', 'Ctrl+G'), _('Find next match'))
self.action_find_previous = sreg('find-previous', _('Find &previous'),
'find', {'direction':'up'}, ('Shift+F3', 'Shift+Ctrl+G'), _('Find previous match'))
self.action_replace = sreg('replace', _('&Replace'),
'replace', keys=('Ctrl+R'), description=_('Replace current match'))
self.action_replace_next = sreg('replace-next', _('&Replace and find next'),
'replace-find', {'direction':'down'}, ('Ctrl+]'), _('Replace current match and find next'))
self.action_replace_previous = sreg('replace-previous', _('R&eplace and find previous'),
'replace-find', {'direction':'up'}, ('Ctrl+['), _('Replace current match and find previous'))
self.action_replace_all = sreg('replace-all', _('Replace &all'),
'replace-all', keys=('Ctrl+A'), description=_('Replace all matches'))
self.action_count = sreg('count-matches', _('&Count all'),
'count', keys=('Ctrl+N'), description=_('Count number of matches'))
self.action_mark = reg(None, _('&Mark selected text'), self.boss.mark_selected_text, 'mark-selected-text', ('Ctrl+Shift+M',),
_('Mark selected text or unmark already marked text'))
self.action_mark.default_text = self.action_mark.text()
self.action_go_to_line = reg(None, _('Go to &line'), self.boss.go_to_line_number, 'go-to-line-number', ('Ctrl+.',), _('Go to line number'))
self.action_saved_searches = treg('folder_saved_search.png', _('Sa&ved searches'),
self.boss.saved_searches, 'saved-searches', (), _('Show the saved searches dialog'))
self.action_text_search = treg('view.png', _('&Search ignoring HTML markup'),
self.boss.show_text_search, 'text-search', (), _('Show the text search panel'))
# Check Book actions
group = _('Check book')
self.action_check_book = treg('debug.png', _('&Check book'), self.boss.check_requested, 'check-book', ('F7'), _('Check book for errors'))
self.action_spell_check_book = treg('spell-check.png', _('Check &spelling'), self.boss.spell_check_requested, 'spell-check-book', ('Alt+F7'), _(
'Check book for spelling errors'))
self.action_check_book_next = reg('forward.png', _('&Next error'), partial(
self.check_book.next_error, delta=1), 'check-book-next', ('Ctrl+F7'), _('Show next error'))
self.action_check_book_previous = reg('back.png', _('&Previous error'), partial(
self.check_book.next_error, delta=-1), 'check-book-previous', ('Ctrl+Shift+F7'), _('Show previous error'))
self.action_spell_check_next = reg('forward.png', _('&Next spelling mistake'),
self.boss.next_spell_error, 'spell-next', ('F8'), _('Go to next spelling mistake'))
# Miscellaneous actions
group = _('Miscellaneous')
self.action_create_checkpoint = treg(
'marked.png', _('&Create checkpoint'), self.boss.create_checkpoint, 'create-checkpoint', (), _(
'Create a checkpoint with the current state of the book'))
self.action_close_current_tab = reg(
'window-close.png', _('&Close current tab'), self.central.close_current_editor, 'close-current-tab', 'Ctrl+W', _(
'Close the currently open tab'))
self.action_close_all_but_current_tab = reg(
'edit-clear.png', _('&Close other tabs'), self.central.close_all_but_current_editor, 'close-all-but-current-tab', 'Ctrl+Alt+W', _(
'Close all tabs except the current tab'))
self.action_help = treg(
'help.png', _('User &Manual'), lambda : open_url(QUrl(localize_user_manual_link(
'https://manual.calibre-ebook.com/edit.html'))), 'user-manual', 'F1', _(
'Show User Manual'))
self.action_browse_images = treg(
'view-image.png', _('&Browse images in book'), self.boss.browse_images, 'browse-images', (), _(
'Browse images in the books visually'))
self.action_multiple_split = treg(
'document-split.png', _('&Split at multiple locations'), self.boss.multisplit, 'multisplit', (), _(
'Split HTML file at multiple locations'))
self.action_compare_book = treg('diff.png', _('Compare to &another book'), self.boss.compare_book, 'compare-book', (), _(
'Compare to another book'))
self.action_manage_snippets = treg(
'snippets.png', _('Manage &Snippets'), self.boss.manage_snippets, 'manage-snippets', (), _(
'Manage user created snippets'))
self.plugin_menu_actions = []
create_plugin_actions(actions, toolbar_actions, self.plugin_menu_actions)
def create_menubar(self):
if isosx:
p, q = self.create_application_menubar()
q.triggered.connect(self.action_quit.trigger)
p.triggered.connect(self.action_preferences.trigger)
f = factory(app_id='com.calibre-ebook.EditBook-%d' % os.getpid())
b = f.create_window_menubar(self)
f = b.addMenu(_('&File'))
f.addAction(self.action_new_file)
f.addAction(self.action_import_files)
f.addSeparator()
f.addAction(self.action_open_book)
f.addAction(self.action_new_book)
f.addAction(self.action_import_book)
f.addAction(self.action_open_book_folder)
self.recent_books_menu = f.addMenu(_('&Recently opened books'))
self.update_recent_books()
f.addSeparator()
f.addAction(self.action_save)
f.addAction(self.action_save_copy)
f.addSeparator()
f.addAction(self.action_compare_book)
f.addAction(self.action_quit)
e = b.addMenu(_('&Edit'))
e.addAction(self.action_global_undo)
e.addAction(self.action_global_redo)
e.addAction(self.action_create_checkpoint)
e.addSeparator()
e.addAction(self.action_editor_undo)
e.addAction(self.action_editor_redo)
e.addSeparator()
e.addAction(self.action_editor_cut)
e.addAction(self.action_editor_copy)
e.addAction(self.action_editor_paste)
e.addAction(self.action_insert_char)
e.addSeparator()
e.addAction(self.action_quick_edit)
e.addAction(self.action_preferences)
e = b.addMenu(_('&Tools'))
tm = e.addMenu(_('Table of Contents'))
tm.addAction(self.action_toc)
tm.addAction(self.action_inline_toc)
e.addAction(self.action_manage_fonts)
e.addAction(self.action_embed_fonts)
e.addAction(self.action_subset_fonts)
e.addAction(self.action_compress_images)
e.addAction(self.action_smarten_punctuation)
e.addAction(self.action_remove_unused_css)
e.addAction(self.action_transform_styles)
e.addAction(self.action_fix_html_all)
e.addAction(self.action_pretty_all)
e.addAction(self.action_rationalize_folders)
e.addAction(self.action_add_cover)
e.addAction(self.action_set_semantics)
e.addAction(self.action_filter_css)
e.addAction(self.action_spell_check_book)
er = e.addMenu(_('External &links'))
er.addAction(self.action_check_external_links)
er.addAction(self.action_get_ext_resources)
e.addAction(self.action_check_book)
e.addAction(self.action_reports)
e = b.addMenu(_('&View'))
t = e.addMenu(_('Tool&bars'))
e.addSeparator()
for name in sorted(actions, key=lambda x:sort_key(actions[x].text())):
ac = actions[name]
if name.endswith('-dock'):
e.addAction(ac)
elif name.endswith('-bar'):
t.addAction(ac)
e.addAction(self.action_browse_images)
e.addSeparator()
e.addAction(self.action_close_current_tab)
e.addAction(self.action_close_all_but_current_tab)
e = b.addMenu(_('&Search'))
a = e.addAction
a(self.action_find)
e.addSeparator()
a(self.action_find_next)
a(self.action_find_previous)
e.addSeparator()
a(self.action_replace)
a(self.action_replace_next)
a(self.action_replace_previous)
a(self.action_replace_all)
e.addSeparator()
a(self.action_count)
e.addSeparator()
a(self.action_mark)
e.addSeparator()
a(self.action_go_to_line)
e.addSeparator()
a(self.action_saved_searches)
e.aboutToShow.connect(self.search_menu_about_to_show)
e.addSeparator()
a(self.action_text_search)
if self.plugin_menu_actions:
e = b.addMenu(_('&Plugins'))
for ac in sorted(self.plugin_menu_actions, key=lambda x:sort_key(unicode(x.text()))):
e.addAction(ac)
e = b.addMenu(_('&Help'))
a = e.addAction
a(self.action_help)
a(QIcon(I('donate.png')), _('&Donate to support calibre development'), open_donate)
a(self.action_preferences)
def search_menu_about_to_show(self):
ed = self.central.current_editor
update_mark_text_action(ed)
def update_recent_books(self):
m = self.recent_books_menu
m.clear()
books = tprefs.get('recent-books', [])
for path in books:
m.addAction(self.elided_text(path, width=500), partial(self.boss.open_book, path=path))
def create_toolbars(self):
def create(text, name):
name += '-bar'
b = self.addToolBar(text)
b.setObjectName(name) # Needed for saveState
actions[name] = b.toggleViewAction()
b.setIconSize(QSize(tprefs['toolbar_icon_size'], tprefs['toolbar_icon_size']))
return b
self.global_bar = create(_('Book tool bar'), 'global')
self.tools_bar = create(_('Tools tool bar'), 'tools')
self.plugins_bar = create(_('Plugins tool bar'), 'plugins')
self.populate_toolbars(animate=True)
def populate_toolbars(self, animate=False):
self.global_bar.clear(), self.tools_bar.clear(), self.plugins_bar.clear()
def add(bar, ac):
if ac is None:
bar.addSeparator()
elif ac == 'donate':
self.donate_button = b = ThrobbingButton(self)
b.clicked.connect(open_donate)
b.setAutoRaise(True)
b.setToolTip(_('Donate to support calibre development'))
if animate:
QTimer.singleShot(10, b.start_animation)
bar.addWidget(b)
else:
try:
bar.addAction(actions[ac])
except KeyError:
if DEBUG:
prints('Unknown action for toolbar %r: %r' % (unicode(bar.objectName()), ac))
for x in tprefs['global_book_toolbar']:
add(self.global_bar, x)
for x in tprefs['global_tools_toolbar']:
add(self.tools_bar, x)
for x in tprefs['global_plugins_toolbar']:
add(self.plugins_bar, x)
self.plugins_bar.setVisible(bool(tprefs['global_plugins_toolbar']))
def create_docks(self):
def create(name, oname):
oname += '-dock'
d = QDockWidget(name, self)
d.setObjectName(oname) # Needed for saveState
ac = d.toggleViewAction()
desc = _('Toggle %s') % name.replace('&', '')
self.keyboard.register_shortcut(
oname, desc, description=desc, action=ac, group=_('Windows'))
actions[oname] = ac
setattr(self, oname.replace('-', '_'), d)
return d
d = create(_('File browser'), 'files-browser')
d.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea)
self.file_list = FileListWidget(d)
d.setWidget(self.file_list)
self.addDockWidget(Qt.LeftDockWidgetArea, d)
d = create(_('File preview'), 'preview')
d.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea)
self.preview = Preview(d)
d.setWidget(self.preview)
self.addDockWidget(Qt.RightDockWidgetArea, d)
d = create(_('Live CSS'), 'live-css')
d.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea | Qt.BottomDockWidgetArea | Qt.TopDockWidgetArea)
self.live_css = LiveCSS(self.preview, parent=d)
d.setWidget(self.live_css)
self.addDockWidget(Qt.RightDockWidgetArea, d)
d.close() # Hidden by default
d = create(_('Check book'), 'check-book')
d.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea | Qt.BottomDockWidgetArea | Qt.TopDockWidgetArea)
d.setWidget(self.check_book)
self.addDockWidget(Qt.TopDockWidgetArea, d)
d.close() # By default the check window is closed
d = create(_('Inspector'), 'inspector')
d.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea | Qt.BottomDockWidgetArea | Qt.TopDockWidgetArea)
d.setWidget(self.preview.inspector)
self.preview.inspector.setParent(d)
self.addDockWidget(Qt.BottomDockWidgetArea, d)
d.close() # By default the inspector window is closed
d.setFeatures(d.DockWidgetClosable | d.DockWidgetMovable) # QWebInspector does not work in a floating dock
d = create(_('Table of Contents'), 'toc-viewer')
d.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea | Qt.BottomDockWidgetArea | Qt.TopDockWidgetArea)
d.setWidget(self.toc_view)
self.addDockWidget(Qt.LeftDockWidgetArea, d)
d.close() # Hidden by default
d = create(_('Text search'), 'text-search')
d.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea | Qt.BottomDockWidgetArea | Qt.TopDockWidgetArea)
d.setWidget(self.text_search)
self.addDockWidget(Qt.LeftDockWidgetArea, d)
d.close() # Hidden by default
d = create(_('Checkpoints'), 'checkpoints')
d.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea | Qt.BottomDockWidgetArea | Qt.TopDockWidgetArea)
self.checkpoints = CheckpointView(self.boss.global_undo, parent=d)
d.setWidget(self.checkpoints)
self.addDockWidget(Qt.LeftDockWidgetArea, d)
d.close() # Hidden by default
d = create(_('Saved searches'), 'saved-searches')
d.setAllowedAreas(Qt.LeftDockWidgetArea | Qt.RightDockWidgetArea | Qt.BottomDockWidgetArea | Qt.TopDockWidgetArea)
d.setWidget(self.saved_searches)
self.addDockWidget(Qt.LeftDockWidgetArea, d)
d.close() # Hidden by default
def resizeEvent(self, ev):
self.blocking_job.resize(ev.size())
return super(Main, self).resizeEvent(ev)
def update_window_title(self):
fname = os.path.basename(current_container().path_to_ebook)
self.setWindowTitle(self.current_metadata.title + ' [%s] :: %s :: %s' %(current_container().book_type.upper(), fname, self.APP_NAME))
def closeEvent(self, e):
if self.boss.quit():
e.accept()
else:
e.ignore()
def save_state(self):
tprefs.set('main_window_geometry', bytearray(self.saveGeometry()))
tprefs.set('main_window_state', bytearray(self.saveState(self.STATE_VERSION)))
self.central.save_state()
self.saved_searches.save_state()
self.check_book.save_state()
self.text_search.save_state()
def restore_state(self):
geom = tprefs.get('main_window_geometry', None)
if geom is not None:
self.restoreGeometry(geom)
state = tprefs.get('main_window_state', None)
if state is not None:
self.restoreState(state, self.STATE_VERSION)
self.central.restore_state()
self.saved_searches.restore_state()
def contextMenuEvent(self, ev):
ev.ignore()
|
gpl-3.0
| 1,148,555,098,009,233,500
| 49.142857
| 159
| 0.604777
| false
| 3.720965
| false
| false
| false
|
andrewlewis/camera-id
|
test_characteristic.py
|
1
|
1197
|
#!/usr/bin/env python
from make_characteristic import get_noise_from_file
import cPickle
import glob
import numpy
import sys
from PIL import Image, ImageOps
TILE_OVERLAP = 8
if len(sys.argv) != 3:
print "Usage:\n\t%s noise_file_name path_with_png_files" % (sys.argv[0],)
sys.exit(0)
noise_file_name = sys.argv[1]
image_path_name = sys.argv[2]
# Load the camera noise.
camera_noise = numpy.loadtxt(noise_file_name, dtype=numpy.float)
camera_noise_average = numpy.average(camera_noise)
camera_noise -= camera_noise_average
camera_noise_norm = numpy.sqrt(numpy.sum(camera_noise * camera_noise))
file_list = glob.glob(image_path_name + '/*.png')
print "Processing %d images" % (len(file_list),)
for f in file_list:
# Get this image's noise.
image_noise = get_noise_from_file(f)[1]
image_noise_average = numpy.average(image_noise)
image_noise -= image_noise_average
image_noise_norm = numpy.sqrt(numpy.sum(image_noise * image_noise))
# Calculate the correlation between the two signals.
print "Dot product %s is: %s" % (f,
numpy.sum(camera_noise * image_noise) /
(camera_noise_norm * image_noise_norm))
|
mit
| 6,780,480,121,013,508,000
| 29.692308
| 76
| 0.675856
| false
| 3.133508
| false
| false
| false
|
unt-libraries/django-nomination
|
nomination/migrations/0004_auto_20190927_1904.py
|
1
|
7862
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.24 on 2019-09-27 19:04
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('nomination', '0003_project_archive_url'),
]
operations = [
migrations.AlterField(
model_name='metadata',
name='name',
field=models.SlugField(help_text='Assign a name for the metadata field (letters, numbers, underscores, and hyphens are permissible).'),
),
migrations.AlterField(
model_name='metadata',
name='value_sets',
field=models.ManyToManyField(blank=True, help_text='In addition to values manually assigned, values in selected pre-defined sets will also be available to metadata fields.', to='nomination.ValueSet', verbose_name='metadata value sets'),
),
migrations.AlterField(
model_name='metadata',
name='values',
field=models.ManyToManyField(blank=True, help_text='Allowed value for metadata field.', through='nomination.Metadata_Values', to='nomination.Value', verbose_name='values'),
),
migrations.AlterField(
model_name='metadata_values',
name='value_order',
field=models.PositiveIntegerField(default=1, help_text='Change the ordering of the value fields, ordered lowest to highest'),
),
migrations.AlterField(
model_name='nominator',
name='nominator_email',
field=models.CharField(help_text='An email address for identifying your nominations in the system.', max_length=100),
),
migrations.AlterField(
model_name='nominator',
name='nominator_institution',
field=models.CharField(help_text='Your institutional affiliation.', max_length=100),
),
migrations.AlterField(
model_name='nominator',
name='nominator_name',
field=models.CharField(help_text='Your name.', max_length=100),
),
migrations.AlterField(
model_name='project',
name='admin_email',
field=models.CharField(help_text='Email address of project administrator.', max_length=80),
),
migrations.AlterField(
model_name='project',
name='admin_name',
field=models.CharField(help_text='Name of project administrator.', max_length=80),
),
migrations.AlterField(
model_name='project',
name='archive_url',
field=models.URLField(blank=True, help_text='Base URL for accessing site archives.', null=True),
),
migrations.AlterField(
model_name='project',
name='nomination_end',
field=models.DateTimeField(help_text='Date to stop accepting URL nominations.'),
),
migrations.AlterField(
model_name='project',
name='nomination_start',
field=models.DateTimeField(help_text='Date to start accepting URL nominations.'),
),
migrations.AlterField(
model_name='project',
name='project_description',
field=models.TextField(help_text='Description of project.'),
),
migrations.AlterField(
model_name='project',
name='project_end',
field=models.DateTimeField(help_text='Ending date for project.'),
),
migrations.AlterField(
model_name='project',
name='project_name',
field=models.CharField(help_text='Name given to nomination project.', max_length=250),
),
migrations.AlterField(
model_name='project',
name='project_slug',
field=models.CharField(help_text='Up to 25 character identifier for the project (used in URLS, etc.).', max_length=25, unique=True),
),
migrations.AlterField(
model_name='project',
name='project_start',
field=models.DateTimeField(help_text='Starting date for project.'),
),
migrations.AlterField(
model_name='project',
name='project_url',
field=models.CharField(help_text='Project affiliated URL.', max_length=255),
),
migrations.AlterField(
model_name='project_metadata',
name='description',
field=models.CharField(help_text='Used as a descriptive title for the metadata field on Web forms.', max_length=255),
),
migrations.AlterField(
model_name='project_metadata',
name='form_type',
field=models.CharField(choices=[('checkbox', 'checkbox'), ('date', 'date'), ('radio', 'radio button'), ('select', 'menu-select multiple values'), ('selectsingle', 'menu-select single value'), ('text', 'text input'), ('textarea', 'text area')], help_text='Type of HTML form element that should represent the field.', max_length=30),
),
migrations.AlterField(
model_name='project_metadata',
name='help',
field=models.CharField(blank=True, help_text='String used on Web forms to prompt users for accurate data.', max_length=255),
),
migrations.AlterField(
model_name='project_metadata',
name='metadata_order',
field=models.PositiveIntegerField(default=1, help_text='Change the ordering of the metadata fields, ordered lowest to highest'),
),
migrations.AlterField(
model_name='project_metadata',
name='required',
field=models.BooleanField(help_text='Are users required to submit data for this field when nominating a URL?'),
),
migrations.AlterField(
model_name='url',
name='attribute',
field=models.CharField(help_text='A property of the URL you wish to describe.', max_length=255),
),
migrations.AlterField(
model_name='url',
name='entity',
field=models.CharField(help_text='The URL to nominate for capture.', max_length=300),
),
migrations.AlterField(
model_name='url',
name='url_project',
field=models.ForeignKey(help_text='The project for which you want to add a URL.', on_delete=django.db.models.deletion.CASCADE, to='nomination.Project'),
),
migrations.AlterField(
model_name='url',
name='value',
field=models.CharField(help_text='The value of the associated attribute.', max_length=255),
),
migrations.AlterField(
model_name='value',
name='key',
field=models.CharField(help_text='Up to 35 character identifier for the metadata field.', max_length=35, unique=True),
),
migrations.AlterField(
model_name='value',
name='value',
field=models.CharField(help_text='Permitted value for associated metadata field.', max_length=255),
),
migrations.AlterField(
model_name='valueset',
name='name',
field=models.CharField(help_text='Name given to value set.', max_length=75, unique=True),
),
migrations.AlterField(
model_name='valueset',
name='values',
field=models.ManyToManyField(through='nomination.Valueset_Values', to='nomination.Value', verbose_name='values'),
),
migrations.AlterField(
model_name='valueset_values',
name='value_order',
field=models.PositiveIntegerField(default=1, help_text='Change the ordering of the value fields, ordered lowest to highest'),
),
]
|
bsd-3-clause
| 2,851,599,570,489,149,000
| 43.670455
| 343
| 0.598067
| false
| 4.627428
| false
| false
| false
|
medunigraz/outpost
|
src/outpost/django/kages/api.py
|
1
|
1284
|
import logging
import ldap
from rest_framework import (
exceptions,
permissions,
viewsets,
)
from rest_framework.response import Response
from . import models
from .conf import settings
logger = logging.getLogger(__name__)
class TranslateViewSet(viewsets.ViewSet):
permission_classes = (
permissions.IsAuthenticated,
)
def list(self, request):
return Response()
def retrieve(self, request, pk=None):
if not pk:
return Response(False)
try:
conn = ldap.initialize(settings.AUTH_LDAP_SERVER_URI)
conn.simple_bind_s(
settings.AUTH_LDAP_BIND_DN,
settings.AUTH_LDAP_BIND_PASSWORD
)
result = conn.search_s(
settings.AUTH_LDAP_USER_SEARCH.base_dn,
settings.AUTH_LDAP_USER_SEARCH.scope,
settings.KAGES_PERS_ID_FILTER.format(id=int(pk)),
settings.KAGES_PERS_FIELDS
)
found = len(result) == 1
except Exception as e:
logger.warn(
f'LDAP query failed when matching KAGes ID: {e}'
)
found = False
logger.debug(f'Matched KAGes ID: {found}')
return Response({'exists': found})
|
bsd-2-clause
| -7,632,317,316,049,999,000
| 26.319149
| 65
| 0.576324
| false
| 4.196078
| false
| false
| false
|
jscn/django
|
django/contrib/contenttypes/models.py
|
1
|
7319
|
from __future__ import unicode_literals
from django.apps import apps
from django.db import models
from django.db.utils import IntegrityError, OperationalError, ProgrammingError
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
class ContentTypeManager(models.Manager):
use_in_migrations = True
def __init__(self, *args, **kwargs):
super(ContentTypeManager, self).__init__(*args, **kwargs)
# Cache shared by all the get_for_* methods to speed up
# ContentType retrieval.
self._cache = {}
def get_by_natural_key(self, app_label, model):
try:
ct = self._cache[self.db][(app_label, model)]
except KeyError:
ct = self.get(app_label=app_label, model=model)
self._add_to_cache(self.db, ct)
return ct
def _get_opts(self, model, for_concrete_model):
if for_concrete_model:
model = model._meta.concrete_model
return model._meta
def _get_from_cache(self, opts):
key = (opts.app_label, opts.model_name)
return self._cache[self.db][key]
def get_for_model(self, model, for_concrete_model=True):
"""
Returns the ContentType object for a given model, creating the
ContentType if necessary. Lookups are cached so that subsequent lookups
for the same model don't hit the database.
"""
opts = self._get_opts(model, for_concrete_model)
try:
return self._get_from_cache(opts)
except KeyError:
pass
# The ContentType entry was not found in the cache, therefore we
# proceed to load or create it.
try:
try:
# We start with get() and not get_or_create() in order to use
# the db_for_read (see #20401).
ct = self.get(app_label=opts.app_label, model=opts.model_name)
except self.model.DoesNotExist:
# Not found in the database; we proceed to create it. This time we
# use get_or_create to take care of any race conditions.
ct, created = self.get_or_create(
app_label=opts.app_label,
model=opts.model_name,
)
except (OperationalError, ProgrammingError, IntegrityError):
# It's possible to migrate a single app before contenttypes,
# as it's not a required initial dependency (it's contrib!)
# Have a nice error for this.
raise RuntimeError(
"Error creating new content types. Please make sure contenttypes "
"is migrated before trying to migrate apps individually."
)
self._add_to_cache(self.db, ct)
return ct
def get_for_models(self, *models, **kwargs):
"""
Given *models, returns a dictionary mapping {model: content_type}.
"""
for_concrete_models = kwargs.pop('for_concrete_models', True)
# Final results
results = {}
# models that aren't already in the cache
needed_app_labels = set()
needed_models = set()
needed_opts = set()
for model in models:
opts = self._get_opts(model, for_concrete_models)
try:
ct = self._get_from_cache(opts)
except KeyError:
needed_app_labels.add(opts.app_label)
needed_models.add(opts.model_name)
needed_opts.add(opts)
else:
results[model] = ct
if needed_opts:
cts = self.filter(
app_label__in=needed_app_labels,
model__in=needed_models
)
for ct in cts:
model = ct.model_class()
if model._meta in needed_opts:
results[model] = ct
needed_opts.remove(model._meta)
self._add_to_cache(self.db, ct)
for opts in needed_opts:
# These weren't in the cache, or the DB, create them.
ct = self.create(
app_label=opts.app_label,
model=opts.model_name,
)
self._add_to_cache(self.db, ct)
results[ct.model_class()] = ct
return results
def get_for_id(self, id):
"""
Lookup a ContentType by ID. Uses the same shared cache as get_for_model
(though ContentTypes are obviously not created on-the-fly by get_by_id).
"""
try:
ct = self._cache[self.db][id]
except KeyError:
# This could raise a DoesNotExist; that's correct behavior and will
# make sure that only correct ctypes get stored in the cache dict.
ct = self.get(pk=id)
self._add_to_cache(self.db, ct)
return ct
def clear_cache(self):
"""
Clear out the content-type cache. This needs to happen during database
flushes to prevent caching of "stale" content type IDs (see
django.contrib.contenttypes.management.update_contenttypes for where
this gets called).
"""
self._cache.clear()
def _add_to_cache(self, using, ct):
"""Insert a ContentType into the cache."""
# Note it's possible for ContentType objects to be stale; model_class() will return None.
# Hence, there is no reliance on model._meta.app_label here, just using the model fields instead.
key = (ct.app_label, ct.model)
self._cache.setdefault(using, {})[key] = ct
self._cache.setdefault(using, {})[ct.id] = ct
@python_2_unicode_compatible
class ContentType(models.Model):
app_label = models.CharField(max_length=100)
model = models.CharField(_('python model class name'), max_length=100)
objects = ContentTypeManager()
class Meta:
verbose_name = _('content type')
verbose_name_plural = _('content types')
db_table = 'django_content_type'
unique_together = (('app_label', 'model'),)
def __str__(self):
return self.name
@property
def name(self):
model = self.model_class()
if not model:
return self.model
return force_text(model._meta.verbose_name)
def model_class(self):
"Returns the Python model class for this type of content."
try:
return apps.get_model(self.app_label, self.model)
except LookupError:
return None
def get_object_for_this_type(self, **kwargs):
"""
Returns an object of this type for the keyword arguments given.
Basically, this is a proxy around this object_type's get_object() model
method. The ObjectNotExist exception, if thrown, will not be caught,
so code that calls this method should catch it.
"""
return self.model_class()._base_manager.using(self._state.db).get(**kwargs)
def get_all_objects_for_this_type(self, **kwargs):
"""
Returns all objects of this type for the keyword arguments given.
"""
return self.model_class()._base_manager.using(self._state.db).filter(**kwargs)
def natural_key(self):
return (self.app_label, self.model)
|
bsd-3-clause
| 8,636,261,011,933,846,000
| 37.119792
| 105
| 0.585872
| false
| 4.179897
| false
| false
| false
|
Acidburn0zzz/archiso-gui
|
releng/root-image/usr/share/cnchi/src/misc.py
|
1
|
28903
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
#
# Copyright (c) 2012 Canonical Ltd.
# Copyright (c) 2013 Antergos
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from collections import namedtuple
import contextlib
import grp
import os
import pwd
import re
import shutil
import subprocess
import syslog
import socket
import osextras
def copytree(src, dst, symlinks=False, ignore=None):
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if os.path.isdir(s):
shutil.copytree(s, d, symlinks, ignore)
else:
shutil.copy2(s, d)
def utf8(s, errors="strict"):
"""Decode a string as UTF-8 if it isn't already Unicode."""
if isinstance(s, str):
return s
else:
return str(s, "utf-8", errors)
def is_swap(device):
try:
with open('/proc/swaps') as fp:
for line in fp:
if line.startswith(device + ' '):
return True
except Exception:
pass
return False
_dropped_privileges = 0
def set_groups_for_uid(uid):
if uid == os.geteuid() or uid == os.getuid():
return
user = pwd.getpwuid(uid).pw_name
try:
os.setgroups([g.gr_gid for g in grp.getgrall() if user in g.gr_mem])
except OSError:
import traceback
for line in traceback.format_exc().split('\n'):
syslog.syslog(syslog.LOG_ERR, line)
def drop_all_privileges():
# gconf needs both the UID and effective UID set.
global _dropped_privileges
uid = os.environ.get('SUDO_UID')
gid = os.environ.get('SUDO_GID')
if uid is not None:
uid = int(uid)
set_groups_for_uid(uid)
if gid is not None:
gid = int(gid)
os.setregid(gid, gid)
if uid is not None:
uid = int(uid)
os.setreuid(uid, uid)
os.environ['HOME'] = pwd.getpwuid(uid).pw_dir
os.environ['LOGNAME'] = pwd.getpwuid(uid).pw_name
_dropped_privileges = None
def drop_privileges():
global _dropped_privileges
assert _dropped_privileges is not None
if _dropped_privileges == 0:
uid = os.environ.get('SUDO_UID')
gid = os.environ.get('SUDO_GID')
if uid is not None:
uid = int(uid)
set_groups_for_uid(uid)
if gid is not None:
gid = int(gid)
os.setegid(gid)
if uid is not None:
os.seteuid(uid)
_dropped_privileges += 1
def regain_privileges():
global _dropped_privileges
assert _dropped_privileges is not None
_dropped_privileges -= 1
if _dropped_privileges == 0:
os.seteuid(0)
os.setegid(0)
os.setgroups([])
def drop_privileges_save():
"""Drop the real UID/GID as well, and hide them in saved IDs."""
# At the moment, we only know how to handle this when effective
# privileges were already dropped.
assert _dropped_privileges is not None and _dropped_privileges > 0
uid = os.environ.get('SUDO_UID')
gid = os.environ.get('SUDO_GID')
if uid is not None:
uid = int(uid)
set_groups_for_uid(uid)
if gid is not None:
gid = int(gid)
os.setresgid(gid, gid, 0)
if uid is not None:
os.setresuid(uid, uid, 0)
def regain_privileges_save():
"""Recover our real UID/GID after calling drop_privileges_save."""
assert _dropped_privileges is not None and _dropped_privileges > 0
os.setresuid(0, 0, 0)
os.setresgid(0, 0, 0)
os.setgroups([])
@contextlib.contextmanager
def raised_privileges():
"""As regain_privileges/drop_privileges, but in context manager style."""
regain_privileges()
try:
yield
finally:
drop_privileges()
def raise_privileges(func):
"""As raised_privileges, but as a function decorator."""
from functools import wraps
@wraps(func)
def helper(*args, **kwargs):
with raised_privileges():
return func(*args, **kwargs)
return helper
@raise_privileges
def grub_options():
""" Generates a list of suitable targets for grub-installer
@return empty list or a list of ['/dev/sda1','Ubuntu Hardy 8.04'] """
from ubiquity.parted_server import PartedServer
l = []
try:
oslist = {}
subp = subprocess.Popen(
['os-prober'], stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True)
result = subp.communicate()[0].splitlines()
for res in result:
res = res.split(':')
oslist[res[0]] = res[1]
p = PartedServer()
for disk in p.disks():
p.select_disk(disk)
with open(p.device_entry('model')) as fp:
mod = fp.readline()
with open(p.device_entry('device')) as fp:
dev = fp.readline()
with open(p.device_entry('size')) as fp:
size = fp.readline()
if dev and mod:
if size.isdigit():
size = format_size(int(size))
l.append([dev, '%s (%s)' % (mod, size)])
else:
l.append([dev, mod])
for part in p.partitions():
ostype = ''
if part[4] == 'linux-swap':
continue
if part[4] == 'free':
continue
if os.path.exists(p.part_entry(part[1], 'format')):
# Don't bother looking for an OS type.
pass
elif part[5] in oslist.keys():
ostype = oslist[part[5]]
l.append([part[5], ostype])
except:
import traceback
for line in traceback.format_exc().split('\n'):
syslog.syslog(syslog.LOG_ERR, line)
return l
@raise_privileges
def boot_device():
from ubiquity.parted_server import PartedServer
boot = None
root = None
try:
p = PartedServer()
for disk in p.disks():
p.select_disk(disk)
for part in p.partitions():
part = part[1]
if p.has_part_entry(part, 'mountpoint'):
mp = p.readline_part_entry(part, 'mountpoint')
if mp == '/boot':
boot = disk.replace('=', '/')
elif mp == '/':
root = disk.replace('=', '/')
except Exception:
import traceback
for line in traceback.format_exc().split('\n'):
syslog.syslog(syslog.LOG_ERR, line)
if boot:
return boot
return root
def is_removable(device):
if device is None:
return None
device = os.path.realpath(device)
devpath = None
is_partition = False
removable_bus = False
subp = subprocess.Popen(['udevadm', 'info', '-q', 'property',
'-n', device],
stdout=subprocess.PIPE, universal_newlines=True)
for line in subp.communicate()[0].splitlines():
line = line.strip()
if line.startswith('DEVPATH='):
devpath = line[8:]
elif line == 'DEVTYPE=partition':
is_partition = True
elif line == 'ID_BUS=usb' or line == 'ID_BUS=ieee1394':
removable_bus = True
if devpath is not None:
if is_partition:
devpath = os.path.dirname(devpath)
is_removable = removable_bus
try:
with open('/sys%s/removable' % devpath) as removable:
if removable.readline().strip() != '0':
is_removable = True
except IOError:
pass
if is_removable:
try:
subp = subprocess.Popen(['udevadm', 'info', '-q', 'name',
'-p', devpath],
stdout=subprocess.PIPE,
universal_newlines=True)
return ('/dev/%s' %
subp.communicate()[0].splitlines()[0].strip())
except Exception:
pass
return None
def mount_info(path):
"""Return filesystem name, type, and ro/rw for a given mountpoint."""
fsname = ''
fstype = ''
writable = ''
with open('/proc/mounts') as fp:
for line in fp:
line = line.split()
if line[1] == path:
fsname = line[0]
fstype = line[2]
writable = line[3].split(',')[0]
return fsname, fstype, writable
def udevadm_info(args):
fullargs = ['udevadm', 'info', '-q', 'property']
fullargs.extend(args)
udevadm = {}
subp = subprocess.Popen(
fullargs, stdout=subprocess.PIPE, universal_newlines=True)
for line in subp.communicate()[0].splitlines():
line = line.strip()
if '=' not in line:
continue
name, value = line.split('=', 1)
udevadm[name] = value
return udevadm
def partition_to_disk(partition):
"""Convert a partition device to its disk device, if any."""
udevadm_part = udevadm_info(['-n', partition])
if ('DEVPATH' not in udevadm_part or
udevadm_part.get('DEVTYPE') != 'partition'):
return partition
disk_syspath = '/sys%s' % udevadm_part['DEVPATH'].rsplit('/', 1)[0]
udevadm_disk = udevadm_info(['-p', disk_syspath])
return udevadm_disk.get('DEVNAME', partition)
def is_boot_device_removable(boot=None):
if boot:
return is_removable(boot)
else:
return is_removable(boot_device())
def cdrom_mount_info():
"""Return mount information for /cdrom.
This is the same as mount_info, except that the partition is converted to
its containing disk, and we don't care whether the mount point is
writable.
"""
cdsrc, cdfs, _ = mount_info('/cdrom')
cdsrc = partition_to_disk(cdsrc)
return cdsrc, cdfs
@raise_privileges
def grub_device_map():
"""Return the contents of the default GRUB device map."""
subp = subprocess.Popen(['grub-mkdevicemap', '--no-floppy', '-m', '-'],
stdout=subprocess.PIPE, universal_newlines=True)
return subp.communicate()[0].splitlines()
def grub_default(boot=None):
"""Return the default GRUB installation target."""
# Much of this is intentionally duplicated from grub-installer, so that
# we can show the user what device GRUB will be installed to before
# grub-installer is run. Pursuant to that, we intentionally run this in
# the installer root as /target might not yet be available.
bootremovable = is_boot_device_removable(boot=boot)
if bootremovable is not None:
return bootremovable
devices = grub_device_map()
target = None
if devices:
try:
target = os.path.realpath(devices[0].split('\t')[1])
except (IndexError, OSError):
pass
# last resort
if target is None:
target = '(hd0)'
cdsrc, cdfs = cdrom_mount_info()
try:
# The target is usually under /dev/disk/by-id/, so string equality
# is insufficient.
same = os.path.samefile(cdsrc, target)
except OSError:
same = False
if ((same or target == '(hd0)') and
((cdfs and cdfs != 'iso9660') or is_removable(cdsrc))):
# Installing from removable media other than a CD. Make sure that
# we don't accidentally install GRUB to it.
boot = boot_device()
try:
if boot:
target = boot
else:
# Try the next disk along (which can't also be the CD source).
target = os.path.realpath(devices[1].split('\t')[1])
target = re.sub(r'(/dev/(cciss|ida)/c[0-9]d[0-9]|/dev/[a-z]+).*',
r'\1', target)
except (IndexError, OSError):
pass
return target
_os_prober_oslist = {}
_os_prober_osvers = {}
_os_prober_called = False
def find_in_os_prober(device, with_version=False):
"""Look for the device name in the output of os-prober.
Return the friendly name of the device, or the empty string on error.
"""
try:
oslist, osvers = os_prober()
if device in oslist:
ret = oslist[device]
elif is_swap(device):
ret = 'swap'
else:
syslog.syslog('Device %s not found in os-prober output' % device)
ret = ''
ret = utf8(ret, errors='replace')
ver = utf8(osvers.get(device, ''), errors='replace')
if with_version:
return ret, ver
else:
return ret
except (KeyboardInterrupt, SystemExit):
pass
except:
import traceback
syslog.syslog(syslog.LOG_ERR, "Error in find_in_os_prober:")
for line in traceback.format_exc().split('\n'):
syslog.syslog(syslog.LOG_ERR, line)
return ''
@raise_privileges
def os_prober():
global _os_prober_oslist
global _os_prober_osvers
global _os_prober_called
if not _os_prober_called:
_os_prober_called = True
subp = subprocess.Popen(
['os-prober'], stdout=subprocess.PIPE, stderr=subprocess.PIPE,
universal_newlines=True)
result = subp.communicate()[0].splitlines()
for res in result:
res = res.split(':')
if res[2] == 'Ubuntu':
version = [v for v in re.findall('[0-9.]*', res[1]) if v][0]
# Get rid of the superfluous (development version) (11.04)
text = re.sub('\s*\(.*\).*', '', res[1])
_os_prober_oslist[res[0]] = text
_os_prober_osvers[res[0]] = version
else:
# Get rid of the bootloader indication. It's not relevant here.
_os_prober_oslist[res[0]] = res[1].replace(' (loader)', '')
return _os_prober_oslist, _os_prober_osvers
@raise_privileges
def remove_os_prober_cache():
osextras.unlink_force('/var/lib/ubiquity/os-prober-cache')
shutil.rmtree('/var/lib/ubiquity/linux-boot-prober-cache',
ignore_errors=True)
def windows_startup_folder(mount_path):
locations = [
# Windows 8
'ProgramData/Microsoft/Windows/Start Menu/Programs/StartUp',
# Windows 7
'ProgramData/Microsoft/Windows/Start Menu/Programs/Startup',
# Windows XP
'Documents and Settings/All Users/Start Menu/Programs/Startup',
# Windows NT
'Winnt/Profiles/All Users/Start Menu/Programs/Startup',
]
for location in locations:
path = os.path.join(mount_path, location)
if os.path.exists(path):
return path
return ''
ReleaseInfo = namedtuple('ReleaseInfo', 'name, version')
def get_release():
if get_release.release_info is None:
try:
with open('/cdrom/.disk/info') as fp:
line = fp.readline()
if line:
line = line.split()
if line[2] == 'LTS':
line[1] += ' LTS'
get_release.release_info = ReleaseInfo(
name=line[0], version=line[1])
except:
syslog.syslog(syslog.LOG_ERR, 'Unable to determine the release.')
if not get_release.release_info:
get_release.release_info = ReleaseInfo(name='Ubuntu', version='')
return get_release.release_info
get_release.release_info = None
def get_release_name():
import warnings
warnings.warn('get_release_name() is deprecated, '
'use get_release().name instead.',
category=DeprecationWarning)
if not get_release_name.release_name:
try:
with open('/cdrom/.disk/info') as fp:
line = fp.readline()
if line:
line = line.split()
if line[2] == 'LTS':
get_release_name.release_name = ' '.join(line[:3])
else:
get_release_name.release_name = ' '.join(line[:2])
except:
syslog.syslog(
syslog.LOG_ERR,
"Unable to determine the distribution name from "
"/cdrom/.disk/info")
if not get_release_name.release_name:
get_release_name.release_name = 'Ubuntu'
return get_release_name.release_name
get_release_name.release_name = ''
@raise_privileges
def get_install_medium():
if not get_install_medium.medium:
try:
if os.access('/cdrom', os.W_OK):
get_install_medium.medium = 'USB'
else:
get_install_medium.medium = 'CD'
except:
syslog.syslog(
syslog.LOG_ERR, "Unable to determine install medium.")
get_install_medium.medium = 'CD'
return get_install_medium.medium
get_install_medium.medium = ''
def execute(*args):
"""runs args* in shell mode. Output status is taken."""
log_args = ['log-output', '-t', 'ubiquity']
log_args.extend(args)
try:
status = subprocess.call(log_args)
except IOError as e:
syslog.syslog(syslog.LOG_ERR, ' '.join(log_args))
syslog.syslog(syslog.LOG_ERR,
"OS error(%s): %s" % (e.errno, e.strerror))
return False
else:
if status != 0:
syslog.syslog(syslog.LOG_ERR, ' '.join(log_args))
return False
syslog.syslog(' '.join(log_args))
return True
@raise_privileges
def execute_root(*args):
return execute(*args)
def format_size(size):
"""Format a partition size."""
if size < 1000:
unit = 'B'
factor = 1
elif size < 1000 * 1000:
unit = 'kB'
factor = 1000
elif size < 1000 * 1000 * 1000:
unit = 'MB'
factor = 1000 * 1000
elif size < 1000 * 1000 * 1000 * 1000:
unit = 'GB'
factor = 1000 * 1000 * 1000
else:
unit = 'TB'
factor = 1000 * 1000 * 1000 * 1000
return '%.1f %s' % (float(size) / factor, unit)
def debconf_escape(text):
escaped = text.replace('\\', '\\\\').replace('\n', '\\n')
return re.sub(r'(\s)', r'\\\1', escaped)
def create_bool(text):
if text == 'true':
return True
elif text == 'false':
return False
else:
return text
@raise_privileges
def dmimodel():
model = ''
kwargs = {}
if os.geteuid() != 0:
# Silence annoying warnings during the test suite.
kwargs['stderr'] = open('/dev/null', 'w')
try:
proc = subprocess.Popen(
['dmidecode', '--string', 'system-manufacturer'],
stdout=subprocess.PIPE, universal_newlines=True, **kwargs)
manufacturer = proc.communicate()[0]
if not manufacturer:
return
manufacturer = manufacturer.lower()
if 'to be filled' in manufacturer:
# Don't bother with products in development.
return
if 'bochs' in manufacturer or 'vmware' in manufacturer:
model = 'virtual machine'
# VirtualBox sets an appropriate system-product-name.
else:
if 'lenovo' in manufacturer or 'ibm' in manufacturer:
key = 'system-version'
else:
key = 'system-product-name'
proc = subprocess.Popen(['dmidecode', '--string', key],
stdout=subprocess.PIPE,
universal_newlines=True)
model = proc.communicate()[0]
if 'apple' in manufacturer:
# MacBook4,1 - strip the 4,1
model = re.sub('[^a-zA-Z\s]', '', model)
# Replace each gap of non-alphanumeric characters with a dash.
# Ensure the resulting string does not begin or end with a dash.
model = re.sub('[^a-zA-Z0-9]+', '-', model).rstrip('-').lstrip('-')
if model.lower() == 'not-available':
return
except Exception:
syslog.syslog(syslog.LOG_ERR, 'Unable to determine the model from DMI')
finally:
if 'stderr' in kwargs:
kwargs['stderr'].close()
return model
def set_indicator_keymaps(lang):
import xml.etree.cElementTree as ElementTree
from gi.repository import Xkl, GdkX11
# GdkX11.x11_get_default_xdisplay() segfaults if Gtk hasn't been
# imported; possibly finer-grained than this, but anything using this
# will already have imported Gtk anyway ...
from gi.repository import Gtk
from ubiquity import gsettings
# pacify pyflakes
Gtk
gsettings_key = ['org.gnome.libgnomekbd.keyboard', 'layouts']
lang = lang.split('_')[0]
variants = []
# Map inspired from that of gfxboot-theme-ubuntu that's itself
# based on console-setup's. This one has been restricted to
# language => keyboard layout not locale => keyboard layout as
# we don't actually know the exact locale
default_keymap = {
'ar': 'ara',
'bs': 'ba',
'de': 'de',
'el': 'gr',
'en': 'us',
'eo': 'epo',
'fr': 'fr_oss',
'gu': 'in_guj',
'hi': 'in',
'hr': 'hr',
'hy': 'am',
'ka': 'ge',
'kn': 'in_kan',
'lo': 'la',
'ml': 'in_mal',
'pa': 'in_guru',
'sr': 'rs',
'sv': 'se',
'ta': 'in_tam',
'te': 'in_tel',
'zh': 'cn',
}
def item_str(s):
'''Convert a zero-terminated byte array to a proper str'''
i = s.find(b'\x00')
return s[:i].decode()
def process_variant(*args):
if hasattr(args[2], 'name'):
variants.append(
'%s\t%s' % (item_str(args[1].name), item_str(args[2].name)))
else:
variants.append(item_str(args[1].name))
def restrict_list(variants):
new_variants = []
# Start by looking by an explicit default layout in the keymap
if lang in default_keymap:
if default_keymap[lang] in variants:
variants.remove(default_keymap[lang])
new_variants.append(default_keymap[lang])
else:
tab_keymap = default_keymap[lang].replace('_', '\t')
if tab_keymap in variants:
variants.remove(tab_keymap)
new_variants.append(tab_keymap)
# Prioritize the layout matching the language (if any)
if lang in variants:
variants.remove(lang)
new_variants.append(lang)
# Uniquify our list (just in case)
variants = list(set(variants))
if len(variants) > 4:
# We have a problem, X only supports 4
# Add as many entry as we can that are layouts without variant
country_variants = sorted(
entry for entry in variants if '\t' not in entry)
for entry in country_variants[:4 - len(new_variants)]:
new_variants.append(entry)
variants.remove(entry)
if len(new_variants) < 4:
# We can add some more
simple_variants = sorted(
entry for entry in variants if '_' not in entry)
for entry in simple_variants[:4 - len(new_variants)]:
new_variants.append(entry)
variants.remove(entry)
if len(new_variants) < 4:
# Now just add anything left
for entry in variants[:4 - len(new_variants)]:
new_variants.append(entry)
variants.remove(entry)
else:
new_variants += list(variants)
# gsettings doesn't understand utf8
new_variants = [str(variant) for variant in new_variants]
return new_variants
def call_setxkbmap(variants):
kb_layouts = []
kb_variants = []
for entry in variants:
fields = entry.split('\t')
if len(fields) > 1:
kb_layouts.append(fields[0])
kb_variants.append(fields[1])
else:
kb_layouts.append(fields[0])
kb_variants.append("")
execute(
"setxkbmap", "-layout", ",".join(kb_layouts),
"-variant", ",".join(kb_variants))
iso_639_3 = ElementTree.parse('/usr/share/xml/iso-codes/iso_639_3.xml')
nodes = [element for element in iso_639_3.findall('iso_639_3_entry')
if element.get('part1_code') == lang]
display = GdkX11.x11_get_default_xdisplay()
engine = Xkl.Engine.get_instance(display)
if nodes:
configreg = Xkl.ConfigRegistry.get_instance(engine)
configreg.load(False)
# Apparently part2_code doesn't always work (fails with French)
for prop in ('part2_code', 'id', 'part1_code'):
code = nodes[0].get(prop)
if code is not None:
configreg.foreach_language_variant(code, process_variant, None)
if variants:
restricted_variants = restrict_list(variants)
call_setxkbmap(restricted_variants)
gsettings.set_list(
gsettings_key[0], gsettings_key[1],
restricted_variants)
break
else:
# Use the system default if no other keymaps can be determined.
gsettings.set_list(gsettings_key[0], gsettings_key[1], [])
engine.lock_group(0)
NM = 'org.freedesktop.NetworkManager'
NM_STATE_CONNECTED_GLOBAL = 70
def get_prop(obj, iface, prop):
import dbus
try:
return obj.Get(iface, prop, dbus_interface=dbus.PROPERTIES_IFACE)
except dbus.DBusException as e:
if e.get_dbus_name() == 'org.freedesktop.DBus.Error.UnknownMethod':
return None
else:
raise
def is_wireless_enabled():
import dbus
bus = dbus.SystemBus()
manager = bus.get_object(NM, '/org/freedesktop/NetworkManager')
return get_prop(manager, NM, 'WirelessEnabled')
def has_connection():
import dbus
bus = dbus.SystemBus()
manager = bus.get_object(NM, '/org/freedesktop/NetworkManager')
state = get_prop(manager, NM, 'state')
return state == NM_STATE_CONNECTED_GLOBAL
def add_connection_watch(func):
import dbus
def connection_cb(state):
func(state == NM_STATE_CONNECTED_GLOBAL)
bus = dbus.SystemBus()
bus.add_signal_receiver(connection_cb, 'StateChanged', NM, NM)
try:
func(has_connection())
except dbus.DBusException:
# We can't talk to NM, so no idea. Wild guess: we're connected
# using ssh with X forwarding, and are therefore connected. This
# allows us to proceed with a minimum of complaint.
func(True)
def install_size():
if min_install_size:
return min_install_size
# Fallback size to 5 GB
size = 5 * 1024 * 1024 * 1024
# Maximal size to 8 GB
max_size = 8 * 1024 * 1024 * 1024
try:
with open('/cdrom/casper/filesystem.size') as fp:
size = int(fp.readline())
except IOError:
pass
# TODO substitute into the template for the state box.
min_disk_size = size * 2 # fudge factor
# Set minimum size to 8GB if current minimum size is larger
# than 8GB and we still have an extra 20% of free space
if min_disk_size > max_size and size * 1.2 < max_size:
min_disk_size = max_size
return min_disk_size
min_install_size = None
# vim:ai:et:sts=4:tw=80:sw=4:
def get_network():
intip = False
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(("antergos.com",1234))
except:
return ""
myip = s.getsockname()[0]
s.close()
spip = myip.split(".")
if spip[0] == '192':
if spip[1] == '168':
intip = True
elif spip[0] == '10':
intip = True
elif spip[0] == '172':
if int(spip[1]) > 15 and int(spip[1]) < 32:
intip = True
if intip:
ipran = '.'.join(spip[:-1]) + ".0/24"
else:
ipran = '.'.join(spip)
return ipran
|
gpl-3.0
| -3,050,432,571,631,182,000
| 30.484749
| 79
| 0.558593
| false
| 3.836342
| false
| false
| false
|
mosbasik/fluidspaces
|
src/fluidspaces/i3_commands.py
|
1
|
1215
|
import subprocess
class i3Commands(object):
@staticmethod
def send_to_wp(i3_name):
'''Send the currently focused window/container to the named workspace'''
subprocess.Popen(['i3-msg', 'move container to workspace', i3_name], stdout=subprocess.PIPE)
@staticmethod
def go_to_wp(i3_name):
'''Go to the named workspace'''
subprocess.Popen(['i3-msg', 'workspace', i3_name], stdout=subprocess.PIPE)
@staticmethod
def get_wps_str():
'''Query i3 for current workspaces and return stdout as a string'''
completed_proc = subprocess.run(['i3-msg', '-t', 'get_workspaces'], stdout=subprocess.PIPE)
stdout = completed_proc.stdout.decode('utf-8')
return stdout
@staticmethod
def rename_wp(old_i3_name, new_i3_name):
subprocess.run([
'i3-msg',
'rename workspace',
'"{}"'.format(old_i3_name),
'to',
'"{}"'.format(new_i3_name),
], stdout=subprocess.PIPE)
@staticmethod
def rename_wps(old_i3_names, new_i3_names):
for old_i3_name, new_i3_name in zip(old_i3_names, new_i3_names):
i3Commands.rename_wp(old_i3_name, new_i3_name)
|
mit
| -4,274,740,972,160,197,000
| 32.75
| 100
| 0.604938
| false
| 3.441926
| false
| false
| false
|
gigglearrows/anniesbot
|
pajbot/models/timer.py
|
1
|
6625
|
import json
import logging
from pajbot.models.db import DBManager, Base
from pajbot.models.action import ActionParser
from pajbot.tbutil import find
from sqlalchemy import orm
from sqlalchemy import Column, Integer, String, Boolean
from sqlalchemy.dialects.mysql import TEXT
log = logging.getLogger('pajbot')
class Timer(Base):
__tablename__ = 'tb_timer'
id = Column(Integer, primary_key=True)
name = Column(String(256), nullable=False)
action_json = Column('action', TEXT, nullable=False)
interval_online = Column(Integer, nullable=False)
interval_offline = Column(Integer, nullable=False)
enabled = Column(Boolean, nullable=False, default=True)
def __init__(self, **options):
self.id = None
self.name = '??'
self.action_json = '{}'
self.interval_online = 5
self.interval_offline = 30
self.enabled = True
self.refresh_tts()
self.set(**options)
def set(self, **options):
self.name = options.get('name', self.name)
log.debug(options)
if 'action' in options:
log.info('new action!')
self.action_json = json.dumps(options['action'])
self.action = ActionParser.parse(self.action_json)
self.interval_online = options.get('interval_online', self.interval_online)
self.interval_offline = options.get('interval_offline', self.interval_offline)
self.enabled = options.get('enabled', self.enabled)
@orm.reconstructor
def init_on_load(self):
self.action = ActionParser.parse(self.action_json)
self.refresh_tts()
def refresh_tts(self):
self.time_to_send_online = self.interval_online
self.time_to_send_offline = self.interval_offline
def refresh_action(self):
self.action = ActionParser.parse(self.action_json)
def run(self, bot):
self.action.run(bot, source=None, message=None)
class TimerManager:
def __init__(self, bot):
self.bot = bot
self.bot.execute_every(60, self.tick)
if self.bot:
self.bot.socket_manager.add_handler('timer.update', self.on_timer_update)
self.bot.socket_manager.add_handler('timer.remove', self.on_timer_remove)
def on_timer_update(self, data, conn):
try:
timer_id = int(data['timer_id'])
except (KeyError, ValueError):
log.warn('No timer ID found in on_timer_update')
return False
updated_timer = find(lambda timer: timer.id == timer_id, self.timers)
if updated_timer:
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
db_session.add(updated_timer)
db_session.refresh(updated_timer)
updated_timer.refresh_action()
db_session.expunge(updated_timer)
else:
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
updated_timer = db_session.query(Timer).filter_by(id=timer_id).one_or_none()
# Add the updated timer to the timer lists if required
if updated_timer:
if updated_timer not in self.timers:
self.timers.append(updated_timer)
if updated_timer not in self.online_timers and updated_timer.interval_online > 0:
self.online_timers.append(updated_timer)
updated_timer.refresh_tts()
if updated_timer not in self.offline_timers and updated_timer.interval_offline > 0:
self.offline_timers.append(updated_timer)
updated_timer.refresh_tts()
for timer in self.online_timers:
if timer.enabled is False or timer.interval_online <= 0:
self.online_timers.remove(timer)
for timer in self.offline_timers:
if timer.enabled is False or timer.interval_offline <= 0:
self.offline_timers.remove(timer)
def on_timer_remove(self, data, conn):
try:
timer_id = int(data['timer_id'])
except (KeyError, ValueError):
log.warn('No timer ID found in on_timer_update')
return False
removed_timer = find(lambda timer: timer.id == timer_id, self.timers)
if removed_timer:
if removed_timer in self.timers:
self.timers.remove(removed_timer)
if removed_timer in self.online_timers:
self.online_timers.remove(removed_timer)
if removed_timer in self.offline_timers:
self.offline_timers.remove(removed_timer)
def tick(self):
if self.bot.is_online:
for timer in self.online_timers:
timer.time_to_send_online -= 1
timer = find(lambda timer: timer.time_to_send_online <= 0, self.online_timers)
if timer:
timer.run(self.bot)
timer.time_to_send_online = timer.interval_online
self.online_timers.remove(timer)
self.online_timers.append(timer)
else:
for timer in self.offline_timers:
timer.time_to_send_offline -= 1
timer = find(lambda timer: timer.time_to_send_offline <= 0, self.offline_timers)
if timer:
timer.run(self.bot)
timer.time_to_send_offline = timer.interval_offline
self.offline_timers.remove(timer)
self.offline_timers.append(timer)
def redistribute_timers(self):
for x in range(0, len(self.offline_timers)):
timer = self.offline_timers[x]
timer.time_to_send_offline = timer.interval_offline * ((x + 1) / len(self.offline_timers))
for x in range(0, len(self.online_timers)):
timer = self.online_timers[x]
timer.time_to_send_online = timer.interval_online * ((x + 1) / len(self.online_timers))
def load(self):
self.timers = []
with DBManager.create_session_scope(expire_on_commit=False) as db_session:
self.timers = db_session.query(Timer).order_by(Timer.interval_online, Timer.interval_offline, Timer.name).all()
db_session.expunge_all()
self.online_timers = [timer for timer in self.timers if timer.interval_online > 0 and timer.enabled]
self.offline_timers = [timer for timer in self.timers if timer.interval_offline > 0 and timer.enabled]
self.redistribute_timers()
log.info('Loaded {} timers ({} online/{} offline)'.format(len(self.timers), len(self.online_timers), len(self.offline_timers)))
return self
|
mit
| -3,706,695,500,516,823,600
| 38.201183
| 135
| 0.616453
| false
| 3.892479
| false
| false
| false
|
white-lab/pyproteome
|
brainrnaseq/__init__.py
|
1
|
2373
|
from . import cache, mapping, enrichments
CELL_TYPE_COLS = {
'Homo sapiens': {
'Astrocyte': [
'8yo',
'13yo', '16yo', '21yo.1', '22yo.1', '35yo', '47yo', '51yo', '53yo',
'60yo', '63yo - 1', '63yo - 2',
],
'Neuron': [
'25yo',
],
'OPC': [
'22yoGC', '63yoGC - 1',
'63yo GC - 2', '47yoO4', '63yoO4',
],
'New Oligodendrocytes': [
'22yoGC', '63yoGC - 1',
'63yo GC - 2', '47yoO4', '63yoO4',
],
'Myelinating Oligodendrocytes': [
'22yoGC', '63yoGC - 1',
'63yo GC - 2', '47yoO4', '63yoO4',
],
'Microglia': [
'45yo', '51yo.1', '63yo',
],
'Endothelia': [
'13yo.1',
'47yo.1',
],
},
'Mus musculus': {
'Astrocyte': [
# 'FACS - p69',
# 'FACS p70',
'1 month',
'4 months',
'7 months',
'9 months',
],
'Neuron': [
'Neuron 3',
'Neuron 4',
],
'OPC': [
'Oligodendrocyte precursor cell 3',
'Oligodendrocyte precursor cell 4',
],
'New Oligodendrocytes': [
'Newly formed oligodendrocyte 3',
'Newly formed oligodendrocyte 4',
],
'Myelinating Oligodendrocytes': [
'Myelinating oligodendrocyte 4',
'Myelinating oligodenrocyte 5',
],
'Microglia': [
'Microglia 1',
'Microglia 2',
],
'Endothelia': [
'Endo 1',
'Endo 2',
],
},
}
CELL_TYPES = [
'Astrocyte',
'Endothelia',
'Microglia',
'Myelinating Oligodendrocytes',
'Neuron',
'New Oligodendrocytes',
'OPC',
]
DEFAULT_CELL_TYPES = [
i
for i in CELL_TYPES
if i not in ['OPC', 'New Oligodendrocytes']
]
CELL_COLORS = colors = {
'Astrocyte': '#bfee90',
'Endothelia': '#ff9b90',
'Microglia': '#5bd3ff',
'Myelinating Oligodendrocytes': '#ff39ff',
'Neuron': '#ffc467',
'New Oligodendrocytes': 'lightpurple',
'OPC': 'darkpurple',
}
__all__ = [
'cache',
'mapping',
'enrichments',
'CELL_TYPE_COLS',
'CELL_TYPES',
'DEFAULT_CELL_TYPES',
'CELL_COLORS',
]
|
bsd-2-clause
| 7,507,329,908,707,769,000
| 21.6
| 79
| 0.435735
| false
| 2.926017
| false
| false
| false
|
synthesio/infra-ovh-ansible-module
|
plugins/modules/dedicated_server_install.py
|
1
|
2843
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.basic import AnsibleModule
DOCUMENTATION = '''
---
module: dedicated_server_install
short_description: Install a new dedicated server
description:
- Install a new dedicated server
author: Synthesio SRE Team
requirements:
- ovh >= 0.5.0
options:
service_name:
required: true
description: Ovh name of the server
hostname:
required: true
description: Name of the new dedicated server
template:
required: true
description: template to use to spawn the server
'''
EXAMPLES = '''
synthesio.ovh.dedicated_server_install:
service_name: "ns12345.ip-1-2-3.eu"
hostname: "server01.example.net"
template: "debian10_64"
delegate_to: localhost
'''
RETURN = ''' # '''
from ansible_collections.synthesio.ovh.plugins.module_utils.ovh import ovh_api_connect, ovh_argument_spec
try:
from ovh.exceptions import APIError
HAS_OVH = True
except ImportError:
HAS_OVH = False
def run_module():
module_args = ovh_argument_spec()
module_args.update(dict(
service_name=dict(required=True),
hostname=dict(required=True),
template=dict(required=True)
))
module = AnsibleModule(
argument_spec=module_args,
supports_check_mode=True
)
client = ovh_api_connect(module)
service_name = module.params['service_name']
hostname = module.params['hostname']
template = module.params['template']
if module.check_mode:
module.exit_json(msg="Installation in progress on {} as {} with template {} - (dry run mode)".format(service_name, hostname, template),
changed=True)
try:
compatible_templates = client.get(
'/dedicated/server/%s/install/compatibleTemplates' % service_name
)
if template not in compatible_templates["ovh"] and template not in compatible_templates["personal"]:
module.fail_json(msg="{} doesn't exist in compatibles templates".format(template))
except APIError as api_error:
return module.fail_json(msg="Failed to call OVH API: {0}".format(api_error))
details = {"details":
{"language": "en",
"customHostname": hostname}
}
try:
client.post(
'/dedicated/server/%s/install/start' % service_name, **details, templateName=template)
module.exit_json(msg="Installation in progress on {} as {} with template {}!".format(service_name, hostname, template), changed=True)
except APIError as api_error:
module.fail_json(msg="Failed to call OVH API: {0}".format(api_error))
def main():
run_module()
if __name__ == '__main__':
main()
|
mit
| 5,904,091,304,778,387,000
| 27.148515
| 143
| 0.646148
| false
| 3.841892
| false
| false
| false
|
certik/sfepy
|
tests/test_parsing.py
|
1
|
3180
|
from sfepy.base.testing import TestCommon
##
# 16.07.2007, c
class Test( TestCommon ):
##
# 16.07.2007, c
def from_conf( conf, options ):
return Test( conf = conf, options = options )
from_conf = staticmethod( from_conf )
##
# c: 16.07.2007, r: 08.07.2008
def test_parse_equations( self ):
from sfepy.fem.parseEq import create_bnf
test_strs = [
"""- d_volume.i1.Omega( uc )""",
"""2 * dw_term.i1.Omega( uc ) = - 3.0 * dw_term2.i1.Omega2( uc )""",
"""d_term1.Y( fluid, u, w, Nu, dcf, mode )
+ d_term2.Omega( u, w, Nu, dcf, mode )
- d_another_term.Elsewhere( w, p, Nu, dcf, mode )
= - dw_rhs.Y3.a( u, q, Nu, dcf, mode )""",
"""no_args() = 0""",
"""+ something( a, b, c ) = + something_else( c, a, d[-1] )""",
"""term_.a.a( u )""",
"""term.i1.Omega( v, du/dt ) + term2.i2.Gamma( v, dphi/dt)"""
]
n_fail = 0
term_descs = []
for test_str in test_strs:
term_descs[:] = []
try:
bnf = create_bnf( term_descs, {} )
bnf.parseString( test_str )
except:
self.report( 'failed: %s' % test_str )
if self.options.debug:
raise
n_fail += 1
for td in term_descs:
print td
self.report( '%d failure(s)' % n_fail )
if n_fail:
raise AssertionError
return True
##
# c: 16.07.2007, r: 14.07.2008
def test_parse_regions( self ):
from sfepy.fem.parseReg import create_bnf, _test_strs
test_strs = ['nodes of surface -n r.Omega',
'r.Y_2 +n copy r.Y_1',
'nodes in (y <= 0.00001) & (x < 0.11)',
'nodes in ((y <= 0.00001) & (x < 0.11))',
'nodes in (((y <= 0.00001) & (x < 0.11)))',
'nodes in (((0.00001 < y) & (x < 0.11)))',
'all -n nodes in (y == 0.00001)',
'all -n nodes of surface',
'all -e r.DOmega_100',
'r.Y_1 -n nodes of surface *e r.Z_8 *n nodes in (y > 0)',
'nodes of surface +n nodes by pokus( x, y, z )',
'elements of group 6 +e nodes by fn2_3c( x )',
"""r.Y_1 *n (r.Y_2 +e (nodes in (y > 0) *n r.Y_32))
-n nodes of surface -e r.Y_5""",
'nodes by noargs()',
'nodes by extraargs( x, y, z, abc,3 )',
'node in r.Gamma_3',
'node 10',
'elements by afun( domain )']
stack = []
bnf = create_bnf( stack )
n_fail = 0
for test_str in test_strs:
stack[:] = []
try:
out = bnf.parseString( test_str )
except:
self.report( 'failed: %s' % test_str )
n_fail += 1
self.report( '%d failures' % n_fail )
if n_fail:
raise AssertionError
return True
|
bsd-3-clause
| -26,389,724,490,425,080
| 33.193548
| 80
| 0.416981
| false
| 3.336831
| true
| false
| false
|
waidyanatha/pingsam
|
visualize.py
|
1
|
8668
|
import numpy as np
import datetime as dtm
from dateutil import rrule
import pandas as pd
import csv
import matplotlib.pylab as plt
import sys, os
#lets first create the csv file
#
#change this to actual csv file name
pingfile="weeklylogs.csv"
#paramters @plotinterval = 10 minutes
plotinterval = 10
#csv file columns
col_seq=0
col_pingtime=1
col_domain=2
col_state=3
#
########## FUNCTION TO SYNTHESEIZE MISSING DATA POINTS ##########
#
def synth_data(synthdf, interval):
#create a temporary dataframe to hold the syntheseized data
tmpdf = pd.DataFrame(columns=['seqnum', 'pingdatetime', 'domain', 'statenow'])
#first check we have a none empty dataframe
if not synthdf.empty:
#pick the originating TS data point
synthdf.sort_values(by='pingdatetime')
#check if first timestamp starts at 00:00:00; if not add a dumy record
startseqnum = synthdf.index[0]
startpingdt = synthdf.iloc[0]['pingdatetime']
startdomain = synthdf.iloc[0]['domain']
startstate = synthdf.iloc[0]['statenow']
#loop through each TS data point to synthetically add new TS points
#to fill the gap between two consecutive data points
for i, row in synthdf.iterrows():
#initiate the synthesiezed data point to the origin
nextdatapoint = 0
pingdt_plus_interval = startpingdt
#stepwise loop to add syntheseized points from relative origin to the next TS data point
while row['pingdatetime'] > pingdt_plus_interval + dtm.timedelta(minutes = interval) :
nextdatapoint += 1
pingdt_plus_interval = startpingdt + dtm.timedelta(minutes = nextdatapoint*interval)
tmpdf.loc[len(tmpdf.index)] = [startseqnum,pingdt_plus_interval,startdomain,startstate]
startseqnum = i
startpingdt = row['pingdatetime']
startstate = row['statenow']
#after completing through all the TS datapoints check if a none empty dataframe was created
if not tmpdf.empty:
tmpdf = pd.concat([tmpdf,synthdf])
tmpdf = tmpdf.set_index('seqnum')
#whether null or not return a dataframe with syntheseized TS data
tmpdf.dropna(thresh=2)
return tmpdf
#
########## PLOT HISTOGRAM TO FIGURE ##########
#
def plot_hist_to_fig(histdf, dname):
#get date range of the plot to use in suptitile
begdt = histdf['pingdatetime'].min().date()
findt = histdf['pingdatetime'].max().date()
#create a new x-axis index using dataframe index; starting from 1 instead of 0
histdf['pingdate'] = histdf['pingdatetime'].apply(lambda x: x.date())
downdf = pd.DataFrame(columns=['xlabel','pingdate', 'downcount'])
datelist = list(histdf.pingdate.unique())
for uniquedate in datelist:
xlabel = str('{:02d}'.format(uniquedate.month))+'-'+str('{:02d}'.format(uniquedate.day))
downcount = len(histdf[(histdf.statenow == '0') & (histdf.pingdate == uniquedate)])
totalcount = len(histdf[(histdf.pingdate == uniquedate)])
downdf.loc[len(downdf.index)] = [xlabel, uniquedate,100*downcount//totalcount]
downdf = downdf.as_matrix()
#x-axis values are in the newly generated xvalues column
xl = np.array(downdf[:,0])
x = np.array(downdf[:,1])
#y-axis values (1 or 0) are in the dateframe statenow column
y = np.array(downdf[:,2])
histfig, ax = plt.subplots()
ax.bar(x,y,color='red',width=0.5, align="center")
#to give enough spacing for the suptitle; otherwise overlaps with title
histfig.subplots_adjust(top=0.87)
# plt.figure(figsize=(8,6), dpi=150)
#beautify the plot and name the labels, titles
ax.set_title('Percentage of time Server Failed each Day', fontsize=14, fontweight='bold', color='gray')
histfig.suptitle(dname+'\n'+str(begdt)+' --- '+str(findt), fontsize=10, color='blue')
ax.set_xlabel('Month-Day', fontsize=12, color='gray')
ax.set_ylabel('Faile Rate (%)', fontsize=12, color='gray')
plt.yticks(fontsize=10, color='gray', rotation='horizontal')
plt.xticks(x, xl, fontsize=10, color='gray', rotation='vertical')
ax.grid(True)
return histfig
#
########## PLOT DOWN TIMES FREQUENCY TO FIGURE ##########
#
def plot_freq_to_fig(plotdf, dname):
#get date range of the plot to use in suptitile
begdt = plotdf['pingdatetime'].min().date()
findt = plotdf['pingdatetime'].max().date()
failrate = 100-(sum(100*plotdf['statenow'].astype(int))/len(plotdf))
failrate = failrate.astype(float)
#create a new x-axis index using dataframe index; starting from 1 instead of 0
plotdf['xvalues'] = range(1,len(plotdf)+1)
plotdf = plotdf.as_matrix()
#x-axis values are in the newly generated xvalues column
x = np.array(plotdf[:,3].astype(int))
#y-axis values (1 or 0) are in the dateframe statenow column
y = np.array(plotdf[:,2].astype(int))
#setup to catputure the plot into a figure
plotfig = plt.figure(num=None, figsize=(8, 6), dpi=150, facecolor='y', edgecolor='k')
ax = plotfig.add_subplot(311)
ax.fill_between(x, 0, y, color='green')
ax.plot(x,y,color='green',lw=2)
#to give enough spacing for the suptitle; otherwise overlaps with title
plotfig.subplots_adjust(top=0.87)
#beautify the plot and name the labels, titles
ax.set_title('Frequency of Server Access Failure ('+str(failrate)+'%)', fontsize=14, fontweight='bold', color='gray')
plotfig.suptitle(dname+'\n'+str(begdt)+' --- '+str(findt), fontsize=10, color='blue')
ax.set_xlabel('Attempted Machine Accesss Times', fontsize=12, color='gray')
ax.set_ylabel('Machine State', fontsize=12, color='gray')
plt.yticks(y, ['UP','DOWN'], fontsize=10, color='gray', rotation='vertical')
plt.xticks(fontsize=10, color='gray', rotation='horizontal')
plt.ylim(0,1.1)
plt.xlim(0,x.max()+10)
ax.grid(True)
return plotfig
#
############# MAIN ################################
#
print("Complile data from file the log files")
#os.system('./analytics.sh')
print("Reading data from file "+pingfile)
with open(pingfile, 'rb') as f:
data = [i.split(",") for i in f.read().split()]
df = pd.DataFrame(data, columns=['seqnum', 'pingdatetime', 'domain', 'statenow'])
for index, row in df.iterrows():
row[col_pingtime] = dtm.datetime.strptime(row[col_pingtime], '%Y-%m-%d:%H:%M:%S')
#to avoid duplicate data and to reflect ping time to be on the minute
row[col_pingtime] = row[col_pingtime].replace(second = 0)
#format pingdatetime as proper datetime, set it as the indext and then order them
df['pingdatetime'] = pd.to_datetime(df['pingdatetime'])
df.sort_values(by='pingdatetime')
df = df.set_index('seqnum')
#begin processing for each unique domain
print(str(len(df.index))+" data rows added to the dataframe, ready for processing ...")
print ('-----------------------------------------------------')
for thedomain in df.domain.unique():
#insert syntheseised data points
dompingdf = df[df['domain']==thedomain]
print("Begin data synthesis for "+thedomain+" with data rows = "+str(len(dompingdf.index)))
amenddf = synth_data(dompingdf,plotinterval)
if not amenddf.empty:
#output the syntheseized dataframe to output file
print(str(len(amenddf.index))+" data rows of syntheseised added to "+thedomain )
amenddf['pingdatetime'] = pd.to_datetime(amenddf.pingdatetime)
amenddf = amenddf.sort(['pingdatetime'])
amenddf.index = range(0,len(amenddf))
print('writing data to file: ./data/syndata_'+thedomain+'.csv')
amenddf.to_csv('./data/syndata_'+thedomain+'.csv')
#plot timeseries with function (need to add if conditions to check if function returns valid fig)
fig = plot_freq_to_fig(amenddf, thedomain)
fig.savefig('./plots/freqplot_'+thedomain+'.png', bbox_inches='tight')
print ('frequency plot created in file: ./plots/freqplot_'+thedomain+'.png')
fig = plot_hist_to_fig(amenddf, thedomain)
fig.savefig('./plots/histplot_'+thedomain+'.png', bbox_inches='tight')
print ('histogram plot created in file: ./plots/histplot_'+thedomain+'.png')
print ('process complete for '+thedomain)
print ('-----------------------------------------------------')
else:
print ("Warning: no syntheseized data was added to: "+thedomain)
print ('-----------------------------------------------------')
print ('End processing data for visualization !!! ')
|
mit
| -8,745,131,300,681,286,000
| 48.531429
| 121
| 0.639132
| false
| 3.552459
| false
| false
| false
|
pbauman/libmesh
|
doc/statistics/libmesh_pagehits.py
|
1
|
10542
|
#!/usr/bin/env python
import matplotlib.pyplot as plt
import numpy as np
# Import stuff for working with dates
from datetime import datetime
from matplotlib.dates import date2num
# Hits/month, pages, and gigabytes served.
# To get the Google analytics data:
# .) Go to analytics.google.com.
# .) There should be (as of July 2017) a "Google Analytics Home" box at the top left of the dashboard.
# .) Click the "Audience Overview" link at the bottom right corner of this box.
# .) Adjust date range to previous month.
# .) Record the number of "Pageviews" in the "Hits" column below.
# The data below are from the libmesh.github.io site, which uses the
# number UA-24978333-1.
#
# Note: we do not have control over the analytics for the
# https://www.github.com/libMesh/libmesh page. If you look at the page
# source, analytics code UA-3769691-2 appears, but if I try to add
# this property in my analytics account, Google assigns me the number
# UA-24978333-{2,3,...} (where the last digit may change depending on
# how many times you tried to add/remove this property in the
# Analytics Dashboard) and there does not seem to be a straightforward
# way of inserting this code into the source. There have been some
# README.md based hacks for doing this in the past, but I don't think
# they are particularly reliable...
# Hits, pages, GB served
data = [
# 'Jan 2003', 616, 616, 0
# 'Feb 2003', 2078, 2078, 0,
# 'Mar 2003', 3157, 3157, 0,
# 'Apr 2003', 7800, 7800, 0,
# 'May 2003', 4627, 4627, 0,
# 'Jun 2003', 6156, 6156, 0,
# 'Jul 2003', 6389, 6389, 0,
# 'Aug 2003', 10136, 10136, 0,
# 'Sep 2003', 8871, 8871, 0,
# 'Oct 2003', 9703, 9703, 0,
# 'Nov 2003', 9802, 9802, 0,
# 'Dec 2003', 9123, 9123, 0,
# 'Jan 2004', 13599, 13599, 0,
# 'Feb 2004', 11018, 11018, 0,
# 'Mar 2004', 11713, 11713, 0,
# 'Apr 2004', 14995, 14995, 0,
# 'May 2004', 11285, 11285, 0,
# 'Jun 2004', 12974, 12974, 0,
# 'Jul 2004', 12939, 12939, 0,
# 'Aug 2004', 9708, 9708, 0,
# 'Sep 2004', 7994, 7994, 0,
# 'Oct 2004', 6920, 6920, 0,
# 'Nov 2004', 10261, 10261, 0,
# 'Dec 2004', 7483, 7483, 0,
# 'Jan 2005', 3184, 3184, 0,
# 'Feb 2005', 37733, 14077, .4373,
# 'Mar 2005', 43927, 16408, .5637,
# 'Apr 2005', 29792, 8518, .2890,
# 'May 2005', 51288, 17629, .5689,
# 'Jun 2005', 40617, 16599, .5379,
# 'Jul 2005', 29944, 10006, .3363,
# 'Aug 2005', 39592, 14556, .4577,
# 'Sep 2005', 57638, 14666, .4881,
# 'Oct 2005', 48336, 17976, .5749,
# 'Nov 2005', 49563, 15308, .5810,
# 'Dec 2005', 90863, 40736, .9415,
# 'Jan 2006', 46723, 13487, .5662,
# 'Feb 2006', 62285, 26567, .8229,
# 'Mar 2006', 47446, 14711, .6534,
# 'Apr 2006', 90314, 29635, .9762,
# 'May 2006', 68209, 20998, .7949,
# 'Jun 2006', 50495, 17128, .6881,
# 'Jul 2006', 42387, 10958, .6016,
# 'Aug 2006', 55658, 11793, .6174,
# 'Sep 2006', 54919, 20591, .9056,
# 'Oct 2006', 52916, 17944, .9015,
# 'Nov 2006', 55382, 19833, .9439,
# 'Dec 2006', 54265, 22688, .9162,
# 'Jan 2007', 53813, 19881, 1.0 ,
# 'Feb 2007', 52434, 17920, .9472,
# 'Mar 2007', 61530, 21172, 1.2,
# 'Apr 2007', 125578, 77539, 1.3,
# 'May 2007', 182764, 129596, 1.6,
# 'Jun 2007', 115730, 38571, 1.7,
# 'Jul 2007', 121054, 42757, 1.8,
# 'Aug 2007', 81192, 28187, 1.3,
# 'Sep 2007', 143553, 39734, 2.3,
# 'Oct 2007', 110449, 42111, 2.4,
# 'Nov 2007', 128307, 57851, 2.3,
# 'Dec 2007', 80584, 42631, 2.0,
# 'Jan 2008', 69623, 34155, 2.0,
# 'Feb 2008', 144881, 111751, 2.5,
# 'Mar 2008', 69801, 29211, 1.9,
# 'Apr 2008', 74023, 31149, 2.0,
# 'May 2008', 63123, 23277, 1.8,
# 'Jun 2008', 66055, 25418, 2.1,
# 'Jul 2008', 60046, 22082, 2.0,
# 'Aug 2008', 60206, 24543, 2.0,
# 'Sep 2008', 53057, 18635, 1.6,
# 'Oct 2008', 64828, 27042, 2.1,
# 'Nov 2008', 72406, 29767, 2.3,
# 'Dec 2008', 76248, 31690, 2.3,
# 'Jan 2009', 73002, 29744, 2.0,
# 'Feb 2009', 70801, 29156, 2.1,
# 'Mar 2009', 78200, 31139, 2.1,
# 'Apr 2009', 70888, 26182, 1.7,
# 'May 2009', 67263, 26210, 1.8,
# 'Jun 2009', 73146, 31328, 2.6,
# 'Jul 2009', 77828, 33711, 2.4,
# 'Aug 2009', 64378, 28542, 1.9,
# 'Sep 2009', 76167, 33484, 2.2,
# 'Oct 2009', 95727, 41062, 2.8,
# 'Nov 2009', 88042, 38869, 2.5,
# 'Dec 2009', 76148, 37609, 2.3,
# 'Jan 2010', 268856, 45983, 3.2,
# 'Feb 2010', 208210, 42680, 3.0,
# 'Mar 2010', 116263, 42660, 2.6,
# 'Apr 2010', 102493, 32942, 2.4,
# 'May 2010', 117023, 37107, 2.5,
# 'Jun 2010', 128589, 38019, 2.5,
# 'Jul 2010', 87183, 34026, 2.2,
# 'Aug 2010', 99161, 33199, 2.5,
# 'Sep 2010', 81657, 32305, 2.5,
# 'Oct 2010', 98236, 42091, 3.4,
# 'Nov 2010', 115603, 48695, 3.4,
# 'Dec 2010', 105030, 45570, 3.4,
# 'Jan 2011', 133476, 43549, 3.1,
# 'Feb 2011', 34483, 15002, 1.1,
# 'Mar 2011', 0, 0, 0.0,
# 'Apr 2011', 0, 0, 0.0,
# 'May 2011', 0, 0, 0.0,
# 'Jun 2011', 0, 0, 0.0,
# 'Jul 2011', 0, 0, 0.0,
'Aug 2011', 10185, 0, 0.0, # New "Pageviews" data from google analytics, does not seem comparable to sf.net pagehits data
'Sep 2011', 10305, 0, 0.0,
'Oct 2011', 14081, 0, 0.0,
'Nov 2011', 13397, 0, 0.0,
'Dec 2011', 13729, 0, 0.0,
'Jan 2012', 11050, 0, 0.0,
'Feb 2012', 12779, 0, 0.0,
'Mar 2012', 12970, 0, 0.0,
'Apr 2012', 13051, 0, 0.0,
'May 2012', 11857, 0, 0.0,
'Jun 2012', 12584, 0, 0.0,
'Jul 2012', 12995, 0, 0.0,
'Aug 2012', 13204, 0, 0.0,
'Sep 2012', 13170, 0, 0.0,
'Oct 2012', 13335, 0, 0.0,
'Nov 2012', 11337, 0, 0.0,
'Dec 2012', 10108, 0, 0.0, # libmesh switched to github on December 10, 2012
'Jan 2013', 13029, 0, 0.0,
'Feb 2013', 10420, 0, 0.0,
'Mar 2013', 13400, 0, 0.0,
'Apr 2013', 14416, 0, 0.0,
'May 2013', 13875, 0, 0.0,
'Jun 2013', 13747, 0, 0.0,
'Jul 2013', 14019, 0, 0.0,
'Aug 2013', 10828, 0, 0.0,
'Sep 2013', 9969, 0, 0.0,
'Oct 2013', 13083, 0, 0.0,
'Nov 2013', 12938, 0, 0.0,
'Dec 2013', 9079, 0, 0.0,
'Jan 2014', 9736, 0, 0.0,
'Feb 2014', 11824, 0, 0.0,
'Mar 2014', 10861, 0, 0.0,
'Apr 2014', 12711, 0, 0.0,
'May 2014', 11177, 0, 0.0,
'Jun 2014', 10738, 0, 0.0,
'Jul 2014', 10349, 0, 0.0,
'Aug 2014', 8877, 0, 0.0,
'Sep 2014', 9226, 0, 0.0,
'Oct 2014', 8052, 0, 0.0, # Google analytics number moved over to libmesh.github.io in Oct 2014
'Nov 2014', 9243, 0, 0.0,
'Dec 2014', 10714, 0, 0.0,
'Jan 2015', 11508, 0, 0.0,
'Feb 2015', 11278, 0, 0.0,
'Mar 2015', 13305, 0, 0.0,
'Apr 2015', 12347, 0, 0.0,
'May 2015', 11368, 0, 0.0,
'Jun 2015', 11203, 0, 0.0,
'Jul 2015', 10419, 0, 0.0,
'Aug 2015', 11282, 0, 0.0,
'Sep 2015', 13535, 0, 0.0,
'Oct 2015', 12912, 0, 0.0,
'Nov 2015', 13894, 0, 0.0,
'Dec 2015', 11694, 0, 0.0,
'Jan 2016', 11837, 0, 0.0,
'Feb 2016', 14102, 0, 0.0,
'Mar 2016', 13212, 0, 0.0,
'Apr 2016', 13355, 0, 0.0,
'May 2016', 12486, 0, 0.0,
'Jun 2016', 13973, 0, 0.0,
'Jul 2016', 10688, 0, 0.0,
'Aug 2016', 10048, 0, 0.0,
'Sep 2016', 10847, 0, 0.0,
'Oct 2016', 10984, 0, 0.0,
'Nov 2016', 12233, 0, 0.0,
'Dec 2016', 11430, 0, 0.0,
'Jan 2017', 10327, 0, 0.0,
'Feb 2017', 11039, 0, 0.0,
'Mar 2017', 12986, 0, 0.0,
'Apr 2017', 9773, 0, 0.0,
'May 2017', 10880, 0, 0.0,
'Jun 2017', 9179, 0, 0.0,
'Jul 2017', 8344, 0, 0.0,
'Aug 2017', 8617, 0, 0.0,
'Sep 2017', 8576, 0, 0.0,
'Oct 2017', 11255, 0, 0.0,
'Nov 2017', 10362, 0, 0.0,
'Dec 2017', 7948, 0, 0.0,
'Jan 2018', 9376, 0, 0.0,
'Feb 2018', 8864, 0, 0.0,
'Mar 2018', 10339, 0, 0.0,
'Apr 2018', 10958, 0, 0.0,
'May 2018', 10151, 0, 0.0,
'Jun 2018', 8981, 0, 0.0,
'Jul 2018', 8619, 0, 0.0,
'Aug 2018', 9226, 0, 0.0,
'Sep 2018', 8507, 0, 0.0,
'Oct 2018', 9150, 0, 0.0,
'Nov 2018', 8135, 0, 0.0,
'Dec 2018', 7522, 0, 0.0,
'Jan 2019', 8643, 0, 0.0,
'Feb 2019', 8729, 0, 0.0,
'Mar 2019', 7916, 0, 0.0,
]
# Extract number of hits/month
n_hits_month = data[1::4]
# Divide by 1000 for plotting...
n_hits_month = np.divide(n_hits_month, 1000.)
# Extract list of date strings
date_strings = data[0::4]
# Convert date strings into numbers
date_nums = []
for d in date_strings:
date_nums.append(date2num(datetime.strptime(d, '%b %Y')))
# Get a reference to the figure
fig = plt.figure()
# 111 is equivalent to Matlab's subplot(1,1,1) command
ax = fig.add_subplot(111)
# Make the bar chart. We have one number/month, there are about 30
# days in each month, this defines the bar width...
# The color used comes from sns.color_palette("muted").as_hex() They
# are the "same basic order of hues as the default matplotlib color
# cycle but more attractive colors."
ax.plot(date_nums, n_hits_month, marker='o', linewidth=2, color=u'#4878cf')
# Create title
fig.suptitle('libmesh.github.io Hits/Month (in Thousands)')
# Set up x-tick locations -- August of each year
ticks_names = ['2012', '2013', '2014', '2015', '2016', '2017', '2018', '2019']
# Get numerical values for the names
tick_nums = []
for x in ticks_names:
tick_nums.append(date2num(datetime.strptime('Jan ' + x, '%b %Y')))
# Set tick labels and positions
ax.set_xticks(tick_nums)
ax.set_xticklabels(ticks_names)
# Set x limits for the plot
plt.xlim(date_nums[0], date_nums[-1]+30);
# Make x-axis ticks point outward
ax.get_xaxis().set_tick_params(direction='out')
# Save as PDF
plt.savefig('libmesh_pagehits.pdf')
# Local Variables:
# python-indent: 2
# End:
|
lgpl-2.1
| -5,319,884,989,136,103,000
| 36.119718
| 131
| 0.525232
| false
| 2.376465
| false
| false
| false
|
not-na/peng3d
|
peng3d/gui/layout.py
|
1
|
8877
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# layout.py
#
# Copyright 2020 notna <notna@apparat.org>
#
# This file is part of peng3d.
#
# peng3d is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# peng3d is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with peng3d. If not, see <http://www.gnu.org/licenses/>.
#
__all__ = [
"Layout", "GridLayout",
"LayoutCell",
]
import peng3d
from peng3d import util
from peng3d.util import WatchingList
try:
import pyglet
from pyglet.gl import *
except ImportError:
pass # Headless mode
class Layout(util.ActionDispatcher):
"""
Base Layout class.
This class does not serve any purpose directly other than to be a common base class
for all layouts.
Note that layouts can be nested, e.g. usually the first layouts parent is a SubMenu
and sub-layouts get a LayoutCell of their parent layout as their parent.
"""
def __init__(self, peng, parent):
self.peng = peng
self.parent = parent
@property
def pos(self):
return self.parent.pos
@property
def size(self):
return self.parent.size
class GridLayout(Layout):
"""
Grid-based layout helper class.
This class provides a grid-like layout to its sub-widgets. A border between widgets
can be defined. Additionally, all widgets using this layout should automatically scale
with screen size.
"""
def __init__(self, peng, parent, res, border):
super().__init__(peng, parent)
self.res = res
self.bordersize = border
@property
def cell_size(self):
"""
Helper property defining the current size of cells in both x and y axis.
:return: 2-tuple of float
"""
return self.size[0]/self.res[0], self.size[1]/self.res[1]
def get_cell(self, pos, size, anchor_x="left", anchor_y="bottom", border=1):
"""
Returns a grid cell suitable for use as the ``pos`` parameter of any widget.
The ``size`` parameter of the widget will automatically be overwritten.
:param pos: Grid position, in cell
:param size: Size, in cells
:param anchor_x: either ``left``\\ , ``center`` or ``right``
:param anchor_y: either ``bottom``\\ , ``center`` or ``top``
:return: LayoutCell subclass
"""
return _GridCell(self.peng, self, pos, size, anchor_x, anchor_y, border)
class LayoutCell(object):
"""
Base Layout Cell.
Not to be used directly. Usually subclasses of this class are returned by layouts.
Instances can be passed to Widgets as the ``pos`` argument. The ``size`` argument will
be automatically overridden.
Note that manually setting ``size`` will override the size set by the layout cell,
though the position will be kept.
"""
@property
def pos(self):
"""
Property accessing the position of the cell.
This usually refers to the bottom-left corner, but may change depending on arguments
passed during creation.
Note that results can be floats.
:return: 2-tuple of ``(x,y)``
"""
raise NotImplementedError("pos property has to be overridden")
@property
def size(self):
"""
Property accessing the size of the cell.
Note that results can be floats.
:return: 2-tuple of ``(width, height)``
"""
raise NotImplementedError("size property has to be overridden")
class DumbLayoutCell(LayoutCell):
"""
Dumb layout cell that behaves like a widget.
Note that this class is not actually widget and should only be used as the ``pos``
argument to a widget or the ``parent`` to another Layout.
It can be used to create, for example, a :py:class:`GridLayout()` over only a portion
of the screen.
Even though setting the :py:attr:`pos` and :py:attr:`size` attributes is possible,
sometimes a redraw cannot be triggered correctly if e.g. the parent is not submenu.
"""
def __init__(self, parent, pos, size):
self.parent = parent
self._pos = pos
self._size = size
@property
def pos(self):
"""
Property that will always be a 2-tuple representing the position of the widget.
Note that this method may call the method given as ``pos`` in the initializer.
The returned object will actually be an instance of a helper class to allow for setting only the x/y coordinate.
This property also respects any :py:class:`Container` set as its parent, any offset will be added automatically.
Note that setting this property will override any callable set permanently.
"""
if isinstance(self._pos, list) or isinstance(self._pos, tuple):
r = self._pos
elif callable(self._pos):
w, h = self.parent.size[:]
r = self._pos(w, h, *self.size)
elif isinstance(self._pos, LayoutCell):
r = self._pos.pos
else:
raise TypeError("Invalid position type")
ox, oy = self.parent.pos
r = r[0] + ox, r[1] + oy
# if isinstance(self.submenu,ScrollableContainer) and not self._is_scrollbar:# and self.name != "__scrollbar_%s"%self.submenu.name: # Widget inside scrollable container and not the scrollbar
# r = r[0],r[1]+self.submenu.offset_y
return WatchingList(r, self._wlredraw_pos)
@pos.setter
def pos(self, value):
self._pos = value
if hasattr(self.parent, "redraw"):
self.parent.redraw()
@property
def size(self):
"""
Similar to :py:attr:`pos` but for the size instead.
"""
if isinstance(getattr(self, "_pos", None), LayoutCell):
s = self._pos.size
elif isinstance(self._size, list) or isinstance(self._size, tuple):
s = self._size
elif callable(self._size):
w, h = self.parent.size[:]
s = self._size(w, h)
else:
raise TypeError("Invalid size type")
s = s[:]
if s[0] == -1 or s[1] == -1:
raise ValueError("Cannot set size to -1 in DumbLayoutCell")
# Prevents crashes with negative size
s = [max(s[0], 0), max(s[1], 0)]
return WatchingList(s, self._wlredraw_size)
@size.setter
def size(self, value):
self._size = value
if hasattr(self.parent, "redraw"):
self.parent.redraw()
def _wlredraw_pos(self,wl):
self._pos = wl[:]
if hasattr(self.parent, "redraw"):
self.parent.redraw()
def _wlredraw_size(self,wl):
self._size = wl[:]
if hasattr(self.parent, "redraw"):
self.parent.redraw()
class _GridCell(LayoutCell):
def __init__(self, peng, parent, offset, size, anchor_x, anchor_y, border=1):
self.peng = peng
self.parent = parent
self.offset = offset
self._size = size
self.anchor_x = anchor_x
self.anchor_y = anchor_y
self.border = border
@property
def pos(self):
dx, dy = self.parent.bordersize
dx *= self.border
dy *= self.border
px, py = self.parent.pos # Parent position in px
oxc, oyc = self.offset # Offset in cells
csx, csy = self.parent.cell_size # Cell size in px
ox, oy = oxc*csx, oyc*csy # Offset in px
sxc, sxy = self._size # Size in cells
sx, sy = sxc*csx, sxy*csy # Size in px
if self.anchor_x == "left":
x = px+ox+dx/2
elif self.anchor_x == "center":
x = px+ox+sx/2
elif self.anchor_x == "right":
x = px+ox+sx-dx/2
else:
raise ValueError(f"Invalid anchor_x of {self.anchor_x}")
if self.anchor_y == "bottom":
y = py+oy+dy/2
elif self.anchor_y == "center":
y = py+oy+sy/2
elif self.anchor_y == "top":
y = py+oy+sy-dy/2
else:
raise ValueError(f"Invalid anchor_y of {self.anchor_y}")
return x, y
@property
def size(self):
dx, dy = self.parent.bordersize
csx, csy = self.parent.cell_size # Cell size in px
sxc, sxy = self._size # Size in cells
sx, sy = sxc * csx-dx*self.border, sxy * csy-dy*self.border
return sx, sy
|
gpl-2.0
| -6,435,422,355,169,676,000
| 30.038462
| 198
| 0.600315
| false
| 3.806604
| false
| false
| false
|
andrewgiessel/folium
|
folium/utilities.py
|
1
|
19979
|
# -*- coding: utf-8 -*-
"""
Utilities
-------
Utility module for Folium helper functions.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import time
import math
import zlib
import struct
import json
import base64
from jinja2 import Environment, PackageLoader
try:
import pandas as pd
except ImportError:
pd = None
try:
import numpy as np
except ImportError:
np = None
from folium.six import iteritems, text_type, binary_type
def get_templates():
"""Get Jinja templates."""
return Environment(loader=PackageLoader('folium', 'templates'))
def legend_scaler(legend_values, max_labels=10.0):
"""
Downsamples the number of legend values so that there isn't a collision
of text on the legend colorbar (within reason). The colorbar seems to
support ~10 entries as a maximum.
"""
if len(legend_values) < max_labels:
legend_ticks = legend_values
else:
spacer = int(math.ceil(len(legend_values)/max_labels))
legend_ticks = []
for i in legend_values[::spacer]:
legend_ticks += [i]
legend_ticks += ['']*(spacer-1)
return legend_ticks
def linear_gradient(hexList, nColors):
"""
Given a list of hexcode values, will return a list of length
nColors where the colors are linearly interpolated between the
(r, g, b) tuples that are given.
Example:
linear_gradient([(0, 0, 0), (255, 0, 0), (255, 255, 0)], 100)
"""
def _scale(start, finish, length, i):
"""
Return the value correct value of a number that is in between start
and finish, for use in a loop of length *length*.
"""
base = 16
fraction = float(i) / (length - 1)
raynge = int(finish, base) - int(start, base)
thex = hex(int(int(start, base) + fraction * raynge)).split('x')[-1]
if len(thex) != 2:
thex = '0' + thex
return thex
allColors = []
# Separate (R, G, B) pairs.
for start, end in zip(hexList[:-1], hexList[1:]):
# Linearly intepolate between pair of hex ###### values and
# add to list.
nInterpolate = 765
for index in range(nInterpolate):
r = _scale(start[1:3], end[1:3], nInterpolate, index)
g = _scale(start[3:5], end[3:5], nInterpolate, index)
b = _scale(start[5:7], end[5:7], nInterpolate, index)
allColors.append(''.join(['#', r, g, b]))
# Pick only nColors colors from the total list.
result = []
for counter in range(nColors):
fraction = float(counter) / (nColors - 1)
index = int(fraction * (len(allColors) - 1))
result.append(allColors[index])
return result
def color_brewer(color_code, n=6):
"""
Generate a colorbrewer color scheme of length 'len', type 'scheme.
Live examples can be seen at http://colorbrewer2.org/
"""
maximum_n = 253
scheme_info = {'BuGn': 'Sequential',
'BuPu': 'Sequential',
'GnBu': 'Sequential',
'OrRd': 'Sequential',
'PuBu': 'Sequential',
'PuBuGn': 'Sequential',
'PuRd': 'Sequential',
'RdPu': 'Sequential',
'YlGn': 'Sequential',
'YlGnBu': 'Sequential',
'YlOrBr': 'Sequential',
'YlOrRd': 'Sequential',
'BrBg': 'Diverging',
'PiYG': 'Diverging',
'PRGn': 'Diverging',
'PuOr': 'Diverging',
'RdBu': 'Diverging',
'RdGy': 'Diverging',
'RdYlBu': 'Diverging',
'RdYlGn': 'Diverging',
'Spectral': 'Diverging',
'Accent': 'Qualitative',
'Dark2': 'Qualitative',
'Paired': 'Qualitative',
'Pastel1': 'Qualitative',
'Pastel2': 'Qualitative',
'Set1': 'Qualitative',
'Set2': 'Qualitative',
'Set3': 'Qualitative',
}
schemes = {'BuGn': ['#EDF8FB', '#CCECE6', '#CCECE6',
'#66C2A4', '#41AE76', '#238B45', '#005824'],
'BuPu': ['#EDF8FB', '#BFD3E6', '#9EBCDA',
'#8C96C6', '#8C6BB1', '#88419D', '#6E016B'],
'GnBu': ['#F0F9E8', '#CCEBC5', '#A8DDB5',
'#7BCCC4', '#4EB3D3', '#2B8CBE', '#08589E'],
'OrRd': ['#FEF0D9', '#FDD49E', '#FDBB84',
'#FC8D59', '#EF6548', '#D7301F', '#990000'],
'PuBu': ['#F1EEF6', '#D0D1E6', '#A6BDDB',
'#74A9CF', '#3690C0', '#0570B0', '#034E7B'],
'PuBuGn': ['#F6EFF7', '#D0D1E6', '#A6BDDB',
'#67A9CF', '#3690C0', '#02818A', '#016450'],
'PuRd': ['#F1EEF6', '#D4B9DA', '#C994C7',
'#DF65B0', '#E7298A', '#CE1256', '#91003F'],
'RdPu': ['#FEEBE2', '#FCC5C0', '#FA9FB5',
'#F768A1', '#DD3497', '#AE017E', '#7A0177'],
'YlGn': ['#FFFFCC', '#D9F0A3', '#ADDD8E',
'#78C679', '#41AB5D', '#238443', '#005A32'],
'YlGnBu': ['#FFFFCC', '#C7E9B4', '#7FCDBB',
'#41B6C4', '#1D91C0', '#225EA8', '#0C2C84'],
'YlOrBr': ['#FFFFD4', '#FEE391', '#FEC44F',
'#FE9929', '#EC7014', '#CC4C02', '#8C2D04'],
'YlOrRd': ['#FFFFB2', '#FED976', '#FEB24C',
'#FD8D3C', '#FC4E2A', '#E31A1C', '#B10026'],
'BrBg': ['#8c510a', '#d8b365', '#f6e8c3',
'#c7eae5', '#5ab4ac', '#01665e'],
'PiYG': ['#c51b7d', '#e9a3c9', '#fde0ef',
'#e6f5d0', '#a1d76a', '#4d9221'],
'PRGn': ['#762a83', '#af8dc3', '#e7d4e8',
'#d9f0d3', '#7fbf7b', '#1b7837'],
'PuOr': ['#b35806', '#f1a340', '#fee0b6',
'#d8daeb', '#998ec3', '#542788'],
'RdBu': ['#b2182b', '#ef8a62', '#fddbc7',
'#d1e5f0', '#67a9cf', '#2166ac'],
'RdGy': ['#b2182b', '#ef8a62', '#fddbc7',
'#e0e0e0', '#999999', '#4d4d4d'],
'RdYlBu': ['#d73027', '#fc8d59', '#fee090',
'#e0f3f8', '#91bfdb', '#4575b4'],
'RdYlGn': ['#d73027', '#fc8d59', '#fee08b',
'#d9ef8b', '#91cf60', '#1a9850'],
'Spectral': ['#d53e4f', '#fc8d59', '#fee08b',
'#e6f598', '#99d594', '#3288bd'],
'Accent': ['#7fc97f', '#beaed4', '#fdc086',
'#ffff99', '#386cb0', '#f0027f'],
'Dark2': ['#1b9e77', '#d95f02', '#7570b3',
'#e7298a', '#66a61e', '#e6ab02'],
'Paired': ['#a6cee3', '#1f78b4', '#b2df8a',
'#33a02c', '#fb9a99', '#e31a1c'],
'Pastel1': ['#fbb4ae', '#b3cde3', '#ccebc5',
'#decbe4', '#fed9a6', '#ffffcc'],
'Pastel2': ['#b3e2cd', '#fdcdac', '#cbd5e8',
'#f4cae4', '#e6f5c9', '#fff2ae'],
'Set1': ['#e41a1c', '#377eb8', '#4daf4a',
'#984ea3', '#ff7f00', '#ffff33'],
'Set2': ['#66c2a5', '#fc8d62', '#8da0cb',
'#e78ac3', '#a6d854', '#ffd92f'],
'Set3': ['#8dd3c7', '#ffffb3', '#bebada',
'#fb8072', '#80b1d3', '#fdb462'],
}
# Raise an error if the n requested is greater than the maximum.
if n > maximum_n:
raise ValueError("The maximum number of colors in a"
" ColorBrewer sequential color series is 253")
# Only if n is greater than six do we interpolate values.
if n > 6:
if color_code not in schemes:
color_scheme = None
else:
# Check to make sure that it is not a qualitative scheme.
if scheme_info[color_code] == 'Qualitative':
raise ValueError("Expanded color support is not available"
" for Qualitative schemes, restrict"
" number of colors to 6")
else:
color_scheme = linear_gradient(schemes.get(color_code), n)
else:
color_scheme = schemes.get(color_code, None)
return color_scheme
def transform_data(data):
"""
Transform Pandas DataFrame into JSON format.
Parameters
----------
data: DataFrame or Series
Pandas DataFrame or Series
Returns
-------
JSON compatible dict
Example
-------
>>> transform_data(df)
"""
if pd is None:
raise ImportError("The Pandas package is required"
" for this functionality")
if np is None:
raise ImportError("The NumPy package is required"
" for this functionality")
def type_check(value):
"""
Type check values for JSON serialization. Native Python JSON
serialization will not recognize some Numpy data types properly,
so they must be explicitly converted.
"""
if pd.isnull(value):
return None
elif (isinstance(value, pd.tslib.Timestamp) or
isinstance(value, pd.Period)):
return time.mktime(value.timetuple())
elif isinstance(value, (int, np.integer)):
return int(value)
elif isinstance(value, (float, np.float_)):
return float(value)
elif isinstance(value, str):
return str(value)
else:
return value
if isinstance(data, pd.Series):
json_data = [{type_check(x): type_check(y) for
x, y in iteritems(data)}]
elif isinstance(data, pd.DataFrame):
json_data = [{type_check(y): type_check(z) for
x, y, z in data.itertuples()}]
return json_data
def split_six(series=None):
"""
Given a Pandas Series, get a domain of values from zero to the 90% quantile
rounded to the nearest order-of-magnitude integer. For example, 2100 is
rounded to 2000, 2790 to 3000.
Parameters
----------
series: Pandas series, default None
Returns
-------
list
"""
if pd is None:
raise ImportError("The Pandas package is required"
" for this functionality")
if np is None:
raise ImportError("The NumPy package is required"
" for this functionality")
def base(x):
if x > 0:
base = pow(10, math.floor(math.log10(x)))
return round(x/base)*base
else:
return 0
quants = [0, 50, 75, 85, 90]
# Some weirdness in series quantiles a la 0.13.
arr = series.values
return [base(np.percentile(arr, x)) for x in quants]
def mercator_transform(data, lat_bounds, origin='upper', height_out=None):
"""Transforms an image computed in (longitude,latitude) coordinates into
the a Mercator projection image.
Parameters
----------
data: numpy array or equivalent list-like object.
Must be NxM (mono), NxMx3 (RGB) or NxMx4 (RGBA)
lat_bounds : length 2 tuple
Minimal and maximal value of the latitude of the image.
origin : ['upper' | 'lower'], optional, default 'upper'
Place the [0,0] index of the array in the upper left or lower left
corner of the axes.
height_out : int, default None
The expected height of the output.
If None, the height of the input is used.
"""
if np is None:
raise ImportError("The NumPy package is required"
" for this functionality")
mercator = lambda x: np.arcsinh(np.tan(x*np.pi/180.))*180./np.pi
array = np.atleast_3d(data).copy()
height, width, nblayers = array.shape
lat_min, lat_max = lat_bounds
if height_out is None:
height_out = height
# Eventually flip the image
if origin == 'upper':
array = array[::-1, :, :]
lats = (lat_min + np.linspace(0.5/height, 1.-0.5/height, height) *
(lat_max-lat_min))
latslats = (mercator(lat_min) +
np.linspace(0.5/height_out, 1.-0.5/height_out, height_out) *
(mercator(lat_max)-mercator(lat_min)))
out = np.zeros((height_out, width, nblayers))
for i in range(width):
for j in range(4):
out[:, i, j] = np.interp(latslats, mercator(lats), array[:, i, j])
# Eventually flip the image.
if origin == 'upper':
out = out[::-1, :, :]
return out
def image_to_url(image, mercator_project=False, colormap=None,
origin='upper', bounds=((-90, -180), (90, 180))):
"""Infers the type of an image argument and transforms it into a URL.
Parameters
----------
image: string, file or array-like object
* If string, it will be written directly in the output file.
* If file, it's content will be converted as embedded in the
output file.
* If array-like, it will be converted to PNG base64 string and
embedded in the output.
origin : ['upper' | 'lower'], optional, default 'upper'
Place the [0, 0] index of the array in the upper left or
lower left corner of the axes.
colormap : callable, used only for `mono` image.
Function of the form [x -> (r,g,b)] or [x -> (r,g,b,a)]
for transforming a mono image into RGB.
It must output iterables of length 3 or 4, with values between
0. and 1. Hint : you can use colormaps from `matplotlib.cm`.
mercator_project : bool, default False, used for array-like image.
Transforms the data to project (longitude,latitude)
coordinates to the Mercator projection.
bounds: list-like, default ((-90, -180), (90, 180))
Image bounds on the map in the form
[[lat_min, lon_min], [lat_max, lon_max]].
Only used if mercator_project is True.
"""
if hasattr(image, 'read'):
# We got an image file.
if hasattr(image, 'name'):
# We try to get the image format from the file name.
fileformat = image.name.lower().split('.')[-1]
else:
fileformat = 'png'
url = "data:image/{};base64,{}".format(
fileformat, base64.b64encode(image.read()).decode('utf-8'))
elif (not (isinstance(image, text_type) or
isinstance(image, binary_type))) and hasattr(image, '__iter__'):
# We got an array-like object.
if mercator_project:
data = mercator_transform(image,
[bounds[0][0], bounds[1][0]],
origin=origin)
else:
data = image
png = write_png(data, origin=origin, colormap=colormap)
url = "data:image/png;base64," + base64.b64encode(png).decode('utf-8')
else:
# We got an URL.
url = json.loads(json.dumps(image))
return url.replace('\n', ' ')
def write_png(data, origin='upper', colormap=None):
"""
Transform an array of data into a PNG string.
This can be written to disk using binary I/O, or encoded using base64
for an inline PNG like this:
>>> png_str = write_png(array)
>>> "data:image/png;base64,"+png_str.encode('base64')
Inspired from
http://stackoverflow.com/questions/902761/saving-a-numpy-array-as-an-image
Parameters
----------
data: numpy array or equivalent list-like object.
Must be NxM (mono), NxMx3 (RGB) or NxMx4 (RGBA)
origin : ['upper' | 'lower'], optional, default 'upper'
Place the [0,0] index of the array in the upper left or lower left
corner of the axes.
colormap : callable, used only for `mono` image.
Function of the form [x -> (r,g,b)] or [x -> (r,g,b,a)]
for transforming a mono image into RGB.
It must output iterables of length 3 or 4, with values between
0. and 1. Hint: you can use colormaps from `matplotlib.cm`.
Returns
-------
PNG formatted byte string
"""
if np is None:
raise ImportError("The NumPy package is required"
" for this functionality")
if colormap is None:
colormap = lambda x: (x, x, x, 1)
array = np.atleast_3d(data)
height, width, nblayers = array.shape
if nblayers not in [1, 3, 4]:
raise ValueError("Data must be NxM (mono), "
"NxMx3 (RGB), or NxMx4 (RGBA)")
assert array.shape == (height, width, nblayers)
if nblayers == 1:
array = np.array(list(map(colormap, array.ravel())))
nblayers = array.shape[1]
if nblayers not in [3, 4]:
raise ValueError("colormap must provide colors of"
"length 3 (RGB) or 4 (RGBA)")
array = array.reshape((height, width, nblayers))
assert array.shape == (height, width, nblayers)
if nblayers == 3:
array = np.concatenate((array, np.ones((height, width, 1))), axis=2)
nblayers = 4
assert array.shape == (height, width, nblayers)
assert nblayers == 4
# Normalize to uint8 if it isn't already.
if array.dtype != 'uint8':
array = array * 255./array.max(axis=(0, 1)).reshape((1, 1, 4))
array = array.astype('uint8')
# Eventually flip the image.
if origin == 'lower':
array = array[::-1, :, :]
# Transform the array to bytes.
raw_data = b''.join([b'\x00' + array[i, :, :].tobytes()
for i in range(height)])
def png_pack(png_tag, data):
chunk_head = png_tag + data
return (struct.pack("!I", len(data)) +
chunk_head +
struct.pack("!I", 0xFFFFFFFF & zlib.crc32(chunk_head)))
return b''.join([
b'\x89PNG\r\n\x1a\n',
png_pack(b'IHDR', struct.pack("!2I5B", width, height, 8, 6, 0, 0, 0)),
png_pack(b'IDAT', zlib.compress(raw_data, 9)),
png_pack(b'IEND', b'')])
def _camelify(out):
return (''.join(["_" + x.lower() if i < len(out)-1 and x.isupper() and out[i+1].islower() # noqa
else x.lower() + "_" if i < len(out)-1 and x.islower() and out[i+1].isupper() # noqa
else x.lower() for i, x in enumerate(list(out))])).lstrip('_').replace('__', '_') # noqa
def _parse_size(value):
try:
if isinstance(value, int) or isinstance(value, float):
value_type = 'px'
value = float(value)
assert value > 0
else:
value_type = '%'
value = float(value.strip('%'))
assert 0 <= value <= 100
except:
msg = "Cannot parse value {!r} as {!r}".format
raise ValueError(msg(value, value_type))
return value, value_type
def _locations_mirror(x):
"""Mirrors the points in a list-of-list-of-...-of-list-of-points.
For example:
>>> _locations_mirror([[[1, 2], [3, 4]], [5, 6], [7, 8]])
[[[2, 1], [4, 3]], [6, 5], [8, 7]]
"""
if hasattr(x, '__iter__'):
if hasattr(x[0], '__iter__'):
return list(map(_locations_mirror, x))
else:
return list(x[::-1])
else:
return x
def _locations_tolist(x):
"""Transforms recursively a list of iterables into a list of list.
"""
if hasattr(x, '__iter__'):
return list(map(_locations_tolist, x))
else:
return x
|
mit
| -2,512,162,047,460,299,000
| 34.740608
| 101
| 0.516743
| false
| 3.55056
| false
| false
| false
|
FDelporte/PiGameConsole
|
Main.py
|
1
|
5104
|
'''
Created on 22/09/2017
@author: Frank Delporte
'''
import thread
import Tkinter as tk
import tkFont
import time
from ButtonHandler import *
from KeyReader import *
from PongGui import *
from SlideShow import *
from ConsoleMenu import *
from Legend import *
try:
import keyboard # pip install keyboard
keyAvailable = True
except ImportError:
keyAvailable = False
class PiGameConsole():
# general vars
pongBusy = False
slideShowBusy = False
keepRunning = False
# frame holders
menu = None
legend = None
win = None
slideShow = None
pong = None
def __init__(self):
print("PiGameConsole initiated")
def preventScreensaver(self):
while (self.keepRunning):
if keyAvailable == True:
keyboard.write('A', delay=0)
time.sleep(10)
def checkInput(self):
btn = ButtonHandler()
key = KeyReader()
while (self.keepRunning):
if btn.getButton(2) == True or key.getKey("1") == True:
#print("Controller red")
if self.slideShowBusy == True and self.slideShow != None:
self.slideShow.stop()
self.startPong()
elif self.pongBusy == True and self.pong != None:
self.pong.stop()
self.startSlideShow()
if btn.getButton(1) == True or key.getKey("2") == True:
#print("Controller green")
print("Controller green")
if btn.getButton(4) == True or key.getKey("3") == True:
#print("Player1 red")
if self.pongBusy == True and self.pong != None:
self.pong.move_player(1, "up")
if btn.getButton(3) == True or key.getKey("4") == True:
#print("Player1 green")
if self.pongBusy == True and self.pong != None:
self.pong.move_player(1, "down")
if btn.getButton(6) == True or key.getKey("5") == True:
#print("Player2 red")
if self.pongBusy == True and self.pong != None:
self.pong.move_player(2, "up")
if btn.getButton(5) == True or key.getKey("6") == True:
#print("Player2 green")
if self.pongBusy == True and self.pong != None:
self.pong.move_player(2, "down")
time.sleep(0.1)
def startGUI(self):
# Start the GUI
self.win = tk.Tk()
self.win.title("PI Gaming console")
self.win.attributes("-fullscreen", True)
self.exitButton = tk.Button(self.win, text = "Quit", command = self.exitProgram)
self.exitButton.grid(row = 0, column = 0, sticky=tk.NW, padx=(10, 0), pady=(10, 0))
self.menu = ConsoleMenu(self.win, 300, 250)
self.menu.grid(row = 1, column = 0, sticky=tk.NW, padx=(10, 10), pady=(0, 0))
self.legend = Legend(self.win, 300, 400)
self.legend.grid(row = 2, column = 0, sticky=tk.NW, padx=(10, 10), pady=(0, 0))
self.startSlideShow()
self.win.mainloop()
def exitProgram(self):
self.keepRunning = False
print "Finished"
self.win.quit()
def clearWindow(self):
if self.slideShow != None:
self.slideShow.stop()
self.slideShow = None
if self.pong != None:
self.pong.stop()
self.pong = None
self.slideShowBusy = False
self.pongBusy = False
time.sleep(0.5)
def startSlideShow(self):
self.clearWindow()
self.menu.setSelected(1)
self.legend.setLegend(1)
self.slideShow = SlideShow(self.win, self.win.winfo_screenwidth() - 300, self.win.winfo_screenheight() - 50)
self.slideShow.grid(row = 0, column = 2, rowspan = 3, sticky=tk.NSEW, pady=(10, 10))
self.slideShowBusy = True
def startPong(self):
self.clearWindow()
self.menu.setSelected(2)
self.legend.setLegend(2)
self.pong = PongGui(self.win, self.win.winfo_screenwidth() - 300, self.win.winfo_screenheight() - 50)
self.pong.grid(row = 0, column = 2, rowspan = 3, sticky=tk.NSEW, pady=(10, 10))
self.pongBusy = True
if __name__ == "__main__":
piGameConsole = PiGameConsole()
# Start a thread to check if a game is running
piGameConsole.keepRunning = True
thread.start_new_thread(piGameConsole.preventScreensaver, ())
thread.start_new_thread(piGameConsole.checkInput, ())
piGameConsole.startGUI()
|
apache-2.0
| 330,923,120,042,564,540
| 30.121951
| 116
| 0.508817
| false
| 4.054011
| false
| false
| false
|
xiiicyw/Data-Wrangling-with-MongoDB
|
Lesson_4_Problem_Set/03-Updating_Schema/update.py
|
1
|
3482
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
In this problem set you work with another type of infobox data, audit it, clean it,
come up with a data model, insert it into a MongoDB and then run some queries against your database.
The set contains data about Arachnid class.
The data is already in the database. But you have been given a task to also include 'binomialAuthority'
information in the data, so you have to go through the data and update the existing entries.
The following things should be done in the function add_field:
- process the csv file and extract 2 fields - 'rdf-schema#label' and 'binomialAuthority_label'
- clean up the 'rdf-schema#label' same way as in the first exercise - removing redundant "(spider)" suffixes
- return a dictionary, with 'label' being the key, and 'binomialAuthority_label' the value
- if 'binomialAuthority_label' is "NULL", skip the item
The following should be done in the function update_db:
- query the database by using the field 'label'
- update the data, by adding a new item under 'classification' with a key 'binomialAuthority'
The resulting data should look like this:
- the output structure should be as follows:
{ 'label': 'Argiope',
'uri': 'http://dbpedia.org/resource/Argiope_(spider)',
'description': 'The genus Argiope includes rather large and spectacular spiders that often ...',
'name': 'Argiope',
'synonym': ["One", "Two"],
'classification': {
'binomialAuthority': None,
'family': 'Orb-weaver spider',
'class': 'Arachnid',
'phylum': 'Arthropod',
'order': 'Spider',
'kingdom': 'Animal',
'genus': None
}
}
"""
import codecs
import csv
import json
import pprint
DATAFILE = 'arachnid.csv'
FIELDS ={'rdf-schema#label': 'label',
'binomialAuthority_label': 'binomialAuthority'}
def add_field(filename, fields):
process_fields = fields.keys()
data = {}
with open(filename, "r") as f:
reader = csv.DictReader(f)
for i in range(3):
l = reader.next()
# YOUR CODE HERE
for line in reader:
# YOUR CODE HERE
for field in process_fields:
element = line[field]
if field == 'rdf-schema#label' and element.find('(') != -1:
element = element.split('(')[0].strip()
key = line['rdf-schema#label']
value = line['binomialAuthority_label']
if value != "NULL":
data[key] = value
return data
def update_db(data, db):
# YOUR CODE HERE
for element in data:
query = db.arachnid.update({'label': element},
{"$set": {"classification.binomialAuthority" : data[element]}})
def test():
# Please change only the add_field and update_db functions!
# Changes done to this function will not be taken into account
# when doing a Test Run or Submit, they are just for your own reference
# and as an example for running this code locally!
data = add_field(DATAFILE, FIELDS)
from pymongo import MongoClient
client = MongoClient("mongodb://localhost:27017")
db = client.examples
update_db(data, db)
updated = db.arachnid.find_one({'label': 'Opisthoncana'})
assert updated['classification']['binomialAuthority'] == 'Embrik Strand'
pprint.pprint(data)
if __name__ == "__main__":
test()
|
agpl-3.0
| -41,363,206,728,056,340
| 32.815534
| 108
| 0.63297
| false
| 3.903587
| false
| false
| false
|
yunhaowang/IDP-APA
|
utilities/py_idpapa_assign_sr.py
|
1
|
7753
|
#!/usr/bin/env python
import sys,re,time,argparse
from multiprocessing import cpu_count,Pool
def main(args):
# print >>sys.stdout, "Start analysis: " + time.strftime("%a,%d %b %Y %H:%M:%S")
output_gpd = args.output
iso_list = get_iso_info(args.isoform)
p = Pool(processes=args.cpu)
csize = 100
results = p.imap(func=assignment,iterable=generate_tx(args.short_reads,iso_list),chunksize=csize)
for res in results:
if not res: continue
output_gpd.write(res+"\n")
output_gpd.close()
# print >>sys.stdout, "Finish analysis: " + time.strftime("%a,%d %b %Y %H:%M:%S")
def generate_tx(input_sr,iso_list):
z = 0
for line in input_sr:
z += 1
yield (line,z,iso_list)
# align first mate without splice alignment
def align_first_mate_s(strand,iso_exon_start,iso_exon_end,sr_exon_start,sr_exon_end):
indic = "mismatch"
if strand == "+":
if int(sr_exon_start.split(",")[0]) >= int(iso_exon_start.split(",")[-2]) and int(sr_exon_end.split(",")[0]) <= int(iso_exon_end.split(",")[-2]):
indic = "match"
else:
indic = "mismatch"
else:
if int(sr_exon_start.split(",")[0]) >= int(iso_exon_start.split(",")[0]) and int(sr_exon_end.split(",")[0]) <= int(iso_exon_end.split(",")[0]):
indic = "match"
else:
indic = "mismatch"
return indic
# align first mate with splice alignment
def align_first_mate_m(strand,iso_exon_number,iso_exon_start,iso_exon_end,sr_exon_number,sr_exon_start,sr_exon_end):
iso_junc_list = []
sr_junc_list = []
indic = "mismatch"
for i in range(0,int(iso_exon_number)-1):
iso_junc_list.append(iso_exon_end.split(",")[i])
iso_junc_list.append(iso_exon_start.split(",")[i+1])
iso_junc_set = "," + ",".join(iso_junc_list) + ","
iso_whole_set = "," + iso_exon_start.split(",")[0] + iso_junc_set + iso_exon_end.split(",")[-2] + ","
for i in range(0,int(sr_exon_number)-1):
sr_junc_list.append(sr_exon_end.split(",")[i])
sr_junc_list.append(sr_exon_start.split(",")[i+1])
sr_junc_set = "," + ",".join(sr_junc_list) + ","
if strand == "+":
pattern = sr_junc_set + "$"
if int(sr_exon_end.split(",")[-2]) <= int(iso_exon_end.split(",")[-2]) and re.search(pattern,iso_junc_set) and int(sr_exon_start.split(",")[0]) >= int(iso_whole_set.split(sr_junc_set)[0].split(",")[-1]):
indic = "match"
else:
indic = "mismatch"
else:
pattern = "^" + sr_junc_set
if int(sr_exon_start.split(",")[0]) >= int(iso_exon_start.split(",")[0]) and re.search(pattern,iso_junc_set) and int(sr_exon_end.split(",")[-2]) <= int(iso_whole_set.split(sr_junc_set)[1].split(",")[0]):
indic = "match"
else:
indic = "mismatch"
return indic
# align second mate without splice alignment
def align_second_mate_s(iso_exon_number,iso_exon_start,iso_exon_end,sr_exon_start,sr_exon_end):
indic = "mismatch"
if int(iso_exon_number) == 1:
if int(sr_exon_start.split(",")[0]) >= int(iso_exon_start.split(",")[0]) and int(sr_exon_end.split(",")[0]) <= int(iso_exon_end.split(",")[0]):
indic = "match"
else:
indic = "mismatch"
else:
for i in range(0,int(iso_exon_number)):
if int(sr_exon_start.split(",")[0]) >= int(iso_exon_start.split(",")[i]) and int(sr_exon_end.split(",")[0]) <= int(iso_exon_end.split(",")[i]):
indic = "match"
break
else:
indic = "mismatch"
return indic
# align second mate with splice alignment
def align_second_mate_m(iso_exon_number,iso_exon_start,iso_exon_end,sr_exon_number,sr_exon_start,sr_exon_end):
iso_junc_list = []
sr_junc_list = []
indic = "mismatch"
for i in range(0,int(iso_exon_number)-1):
iso_junc_list.append(iso_exon_end.split(",")[i])
iso_junc_list.append(iso_exon_start.split(",")[i+1])
iso_junc_set = "," + ",".join(iso_junc_list) + ","
iso_whole_set = "," + iso_exon_start.split(",")[0] + iso_junc_set + iso_exon_end.split(",")[-2] + ","
for i in range(0,int(sr_exon_number)-1):
sr_junc_list.append(sr_exon_end.split(",")[i])
sr_junc_list.append(sr_exon_start.split(",")[i+1])
sr_junc_set = "," + ",".join(sr_junc_list) + ","
if re.search(sr_junc_set,iso_junc_set) and len(iso_whole_set.split(sr_junc_set)[0].split(","))%2 == 0 and int(sr_exon_start.split(",")[0]) >= int(iso_whole_set.split(sr_junc_set)[0].split(",")[-1]) and int(sr_exon_end.split(",")[-2]) <= int(iso_whole_set.split(sr_junc_set)[1].split(",")[0]):
indic = "match"
else:
indic = "mismatch"
return indic
# extract pseudo isoform information
def get_iso_info(iso_gpd):
iso_list = []
for line in iso_gpd:
iso_list.append(line.strip())
return iso_list
iso_gpd.close()
def assignment(inputs):
(line,z,iso_list) = inputs
read_id,chr,strand,start,end,mapq_1,sf_1,exon_number_1,exon_start_1,exon_end_1,mapq_2,sf_2,exon_number_2,exon_start_2,exon_end_2 = line.rstrip("\n").split("\t")
sr_info = line.rstrip("\n")
sr_polya_iso = []
for iso in iso_list:
gene_id,isoform_id,iso_chr,iso_strand,tss,tts,cds_start,cds_end,exon_number,exon_start,exon_end = iso.split("\t")
if iso_chr == chr and iso_strand == strand and int(tss) <= int(start) and int(tts) >= int(end) and int(exon_number) >= int(exon_number_1) and int(exon_number) >= int(exon_number_2):
if int(exon_number_1) == 1 and int(exon_number_2) == 1:
indic_1 = align_first_mate_s(strand,exon_start,exon_end,exon_start_1,exon_end_1)
indic_2 = align_second_mate_s(exon_number,exon_start,exon_end,exon_start_2,exon_end_2)
if indic_1 == "match" and indic_2 == "match":
sr_polya_iso.append(isoform_id)
elif int(exon_number_1) == 1 and int(exon_number_2) > 1:
indic_1 = align_first_mate_s(strand,exon_start,exon_end,exon_start_1,exon_end_1)
indic_2 = align_second_mate_m(exon_number,exon_start,exon_end,exon_number_2,exon_start_2,exon_end_2)
if indic_1 == "match" and indic_2 == "match":
sr_polya_iso.append(isoform_id)
elif int(exon_number_1) > 1 and int(exon_number_2) == 1:
indic_1 = align_first_mate_m(strand,exon_number,exon_start,exon_end,exon_number_1,exon_start_1,exon_end_1)
indic_2 = align_second_mate_s(exon_number,exon_start,exon_end,exon_start_2,exon_end_2)
if indic_1 == "match" and indic_2 == "match":
sr_polya_iso.append(isoform_id)
else:
indic_1 = align_first_mate_m(strand,exon_number,exon_start,exon_end,exon_number_1,exon_start_1,exon_end_1)
indic_2 = align_second_mate_m(exon_number,exon_start,exon_end,exon_number_2,exon_start_2,exon_end_2)
if indic_1 == "match" and indic_2 == "match":
sr_polya_iso.append(isoform_id)
if sr_polya_iso != []:
return line.rstrip("\n") + "\t" + ",".join(sr_polya_iso)
else:
return None
def do_inputs():
output_gpd_format = '''
1. read id
2. chromosome
3. strand
4. start site of alignment of fragment
5. end site of alignment of fragment
6. MAPQ of read1 (mate1)
7. Number of nucleotides that are softly-clipped by aligner (mate1)
8. exon number (mate1)
9. exon start set (mate1)
10. exon end set (mate1)
11. MAPQ of read1 (mate2)
12. Number of nucleotides that are softly-clipped by aligner (mate2)
13. exon number (mate2)
14. exon start set (mate2)
15. exon end set (mate2)
16. isoform set containing this polyA site'''
parser = argparse.ArgumentParser(description="Function: assign the polyA sites identified by short reads to specific isoforms",formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-r','--short_reads',type=argparse.FileType('r'),required=True,help="Short reads gpd file")
parser.add_argument('-i','--isoform',type=argparse.FileType('r'),required=True,help="Input: isoform gpd file")
parser.add_argument('-o','--output',type=argparse.FileType('w'),required=True,help="Output: short reads with assigned isoforms")
parser.add_argument('-p','--cpu',type=int,default=cpu_count(),help="Number of process")
args = parser.parse_args()
return args
if __name__=="__main__":
args = do_inputs()
main(args)
|
apache-2.0
| -6,389,944,135,169,664,000
| 43.815029
| 293
| 0.659229
| false
| 2.449605
| false
| false
| false
|
CybOXProject/python-cybox
|
cybox/objects/win_prefetch_object.py
|
1
|
2182
|
# Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import entities
from mixbox import fields
import cybox.bindings.win_prefetch_object as win_prefetch_binding
from cybox.objects.device_object import Device
from cybox.objects.win_volume_object import WinVolume
from cybox.common import String, DateTime, Long, ObjectProperties
class AccessedFileList(entities.EntityList):
_binding = win_prefetch_binding
_binding_class = win_prefetch_binding.AccessedFileListType
_namespace = "http://cybox.mitre.org/objects#WinPrefetchObject-2"
accessed_file = fields.TypedField("Accessed_File", String, multiple=True)
class AccessedDirectoryList(entities.EntityList):
_binding = win_prefetch_binding
_binding_class = win_prefetch_binding.AccessedDirectoryListType
_namespace = "http://cybox.mitre.org/objects#WinPrefetchObject-2"
accessed_directory = fields.TypedField("Accessed_Directory", String, multiple=True)
class Volume(entities.Entity):
_binding = win_prefetch_binding
_binding_class = win_prefetch_binding.VolumeType
_namespace = "http://cybox.mitre.org/objects#WinPrefetchObject-2"
volumeitem = fields.TypedField("VolumeItem", WinVolume, multiple=True)
deviceitem = fields.TypedField("DeviceItem", Device, multiple=True)
class WinPrefetch(ObjectProperties):
_binding = win_prefetch_binding
_binding_class = win_prefetch_binding.WindowsPrefetchObjectType
_namespace = "http://cybox.mitre.org/objects#WinPrefetchObject-2"
_XSI_NS = "WinPrefetchObj"
_XSI_TYPE = "WindowsPrefetchObjectType"
application_file_name = fields.TypedField("Application_File_Name", String)
prefetch_hash = fields.TypedField("Prefetch_Hash", String)
times_executed = fields.TypedField("Times_Executed", Long)
first_run = fields.TypedField("First_Run", DateTime)
last_run = fields.TypedField("Last_Run", DateTime)
volume = fields.TypedField("Volume", WinVolume)
accessed_file_list = fields.TypedField("Accessed_File_List", AccessedFileList)
accessed_directory_list = fields.TypedField("Accessed_Directory_List", AccessedDirectoryList)
|
bsd-3-clause
| -1,243,254,732,700,253,700
| 40.961538
| 97
| 0.762603
| false
| 3.673401
| false
| false
| false
|
Hannimal/raspicar
|
ps3Controller/ps3joy.py
|
1
|
1520
|
#!/usr/bin/env python
# coding: Latin-1
import sys
import smbus
import time
bus = smbus.SMBus(1)
address = 0x2a
try:
pipe = open('/dev/input/js0', 'r')
print('/dev/input/js0 Available')
except:
print('/dev/input/js0 not Available')
sys.exit(0)
msg = []
position = [0,0,0,0]
def StringToBytes(val):
retVal = []
for c in val:
retVal.append(ord(c))
return retVal
def sendData(val):
try:
#print(val)
bus.write_i2c_block_data(address, 1, val)
except:
pass
def getRange(device):
status = bus.read_byte(device)
#time.sleep(0.01)
return status
while 1:
try:
for char in pipe.read(1):
msg += [char]
#print(msg)
if len(msg) == 8:
# Button event if 6th byte is 1
if ord(msg[6]) == 1:
position[3] = ord(msg[7])
position[2] = ord(msg[4])
print(getRange(address))
# Axis event if 6th byte is 2
if ord(msg[6]) == 2: # define Axis
if ord(msg[7]) == 2: # define right joy
position[0] = ord(msg[5])
if ord(msg[7]) == 1: # define left joy
position[1] = ord(msg[5])
sendData(position)
msg = []
except KeyboardInterrupt:
sendData([0,0])
raise
except:
print ('Lost Connection')
sendData([0,0])
sys.exit(0)
|
unlicense
| 6,163,764,360,669,899,000
| 23.126984
| 59
| 0.483553
| false
| 3.478261
| false
| false
| false
|
shouldmakemusic/yaas
|
controller/RedFrameController.py
|
1
|
3038
|
# Copyright (C) 2015 Manuel Hirschauer (manuel@hirschauer.net)
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# For questions regarding this module contact
# Manuel Hirschauer <manuel@hirschauer.net>
"""
Control the behavior of the red frame
"""
from YaasController import *
class RedFrameController (YaasController):
"""
Control the behavior of the red frame
"""
def __init__(self, yaas):
YaasController.__init__(self, yaas)
self.log.debug("(RedFrameController) init")
def play_clip(self, params, value):
"""
Plays the xth clip in the red frame
At the moment this works only for the track style red frame
Has to be tested when triing different styles for the red frame
@param params[0]: clip_number
"""
self.log.verbose("(RedFrameController) play_clip called")
clip_number = params[0]
self.log.verbose("(RedFrameController) for clip " + str(clip_number))
self.log.verbose("(RedFrameController) scene_offset: " + str(self.yaas.get_session()._scene_offset))
#if (clip_number > 4):
"""clip_number = clip_number -1"""
clip_number = self.yaas.get_session()._scene_offset + clip_number
self.log.verbose("(RedFrameController) calculated number " + str(clip_number))
self.song_helper().get_selected_track().fire(clip_number);
def move_track_view_vertical(self, params, value):
"""
Moves the current position down or up
@param params[0]: True ? down : up
"""
self.log.verbose("(RedFrameController) move_track_view_vertical called")
down = params[0]
self.log.verbose("(RedFrameController) down? " + str(down))
self.view_helper().move_track_view_vertical(down)
def move_track_view_horizontal(self, params, value):
"""
Moves the red frame left or right
@param params[0]: True ? right : left
"""
self.log.verbose("(RedFrameController) move_track_view_horizontal called")
right = params[0]
self.log.verbose("(RedFrameController) right? " + str(right))
self.view_helper().move_track_view_horizontal(right)
|
gpl-2.0
| -3,350,587,529,291,833,000
| 38.973684
| 108
| 0.634628
| false
| 4.077852
| false
| false
| false
|
3dfxsoftware/cbss-addons
|
report_profit/wizard/wiz_trial_cost.py
|
1
|
3732
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
###########################################################################
# Module Writen to OpenERP, Open Source Management Solution
# Copyright (C) OpenERP Venezuela (<http://openerp.com.ve>).
# All Rights Reserved
# Credits######################################################
# Coded by: javier@vauxoo.com
# Planified by: Nhomar Hernandez
# Audited by: Vauxoo C.A.
#############################################################################
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##########################################################################
from openerp.osv import osv, fields
import openerp.tools as tools
from openerp.tools.translate import _
import openerp.netsvc as netsvc
import time
import datetime
from mx.DateTime import *
class trial_cost(osv.TransientModel):
logger = netsvc.Logger()
_name = "trial.cost"
_columns = {
'date_start': fields.date('Start Date', required=True),
'period_length': fields.integer('Period length (days)', required=True),
'user_res_id': fields.many2one('res.users', 'Salesman'),
'partner_res_id': fields.many2one('res.partner', 'Partner'),
'cat_res_id': fields.many2one('product.category', 'Category'),
'u_check': fields.boolean('Check salesman?'),
'p_check': fields.boolean('Check partner?'),
'c_check': fields.boolean('Check category?'),
}
_defaults = {
'period_length': lambda *a: 30,
}
def action_print(self, cr, uid, ids, data, context=None):
if context is None:
context = {}
data = {}
data['ids'] = context.get('active_ids', [])
data['model'] = context.get('active_model', 'ir.ui.menu')
data['form'] = self.read(cr, uid, ids[0])
form = data['form']
if not form['u_check'] and not form['p_check'] and not form['c_check']:
raise osv.except_osv(_('User Error'), _(
'You must check one box !'))
res = {}
period_length = data['form']['period_length']
if period_length <= 0:
raise osv.except_osv(_('UserError'), _(
'You must enter a period length that cannot be 0 or below !'))
start = datetime.date.fromtimestamp(time.mktime(
time.strptime(data['form']['date_start'], "%Y-%m-%d")))
start = DateTime(int(start.year), int(start.month), int(start.day))
for i in range(4)[::-1]:
stop = start - RelativeDateTime(days=period_length)
res[str(i)] = {
'name': str((4-(i+1))*period_length) +
'-' + str((4-i)*period_length),
'stop': start.strftime('%Y-%m-%d'),
'start': stop.strftime('%Y-%m-%d'),
}
start = stop - RelativeDateTime(days=1)
data['form'].update(res)
return {'type': 'ir.actions.report.xml',
'report_name': 'profit.trial.cost',
'datas': data}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
gpl-2.0
| 1,987,104,676,452,728,800
| 40.010989
| 79
| 0.55761
| false
| 3.949206
| false
| false
| false
|
gannetson/sportschooldeopenlucht
|
apps/fund/migrations/0002_add_recurring_direct_debit_payment.py
|
1
|
17485
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'RecurringDirectDebitPayment'
db.create_table(u'fund_recurringdirectdebitpayment', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['accounts.BlueBottleUser'], unique=True)),
('active', self.gf('django.db.models.fields.BooleanField')(default=False)),
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, blank=True)),
('updated', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=35)),
('city', self.gf('django.db.models.fields.CharField')(max_length=35)),
('account', self.gf('apps.fund.fields.DutchBankAccountField')(max_length=10)),
))
db.send_create_signal(u'fund', ['RecurringDirectDebitPayment'])
def backwards(self, orm):
# Deleting model 'RecurringDirectDebitPayment'
db.delete_table(u'fund_recurringdirectdebitpayment')
models = {
u'accounts.bluebottleuser': {
'Meta': {'object_name': 'BlueBottleUser'},
'about': ('django.db.models.fields.TextField', [], {'max_length': '265', 'blank': 'True'}),
'availability': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'}),
'available_time': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'birthdate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'contribution': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'deleted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'newsletter': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'picture': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'primary_language': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'share_money': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'share_time_knowledge': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'user_type': ('django.db.models.fields.CharField', [], {'default': "'person'", 'max_length': '25'}),
'username': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'why': ('django.db.models.fields.TextField', [], {'max_length': '265', 'blank': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'cowry.payment': {
'Meta': {'object_name': 'Payment'},
'amount': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '3'}),
'fee': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'payment_method_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '20', 'blank': 'True'}),
'payment_submethod_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '20', 'blank': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'polymorphic_cowry.payment_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '15', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'fund.customvoucherrequest': {
'Meta': {'object_name': 'CustomVoucherRequest'},
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']", 'null': 'True'}),
'contact_email': ('django.db.models.fields.EmailField', [], {'default': "''", 'max_length': '75', 'blank': 'True'}),
'contact_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'contact_phone': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '500', 'blank': 'True'}),
'number': ('django.db.models.fields.PositiveIntegerField', [], {}),
'organization': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '20'}),
'value': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'})
},
u'fund.donation': {
'Meta': {'object_name': 'Donation'},
'amount': ('django.db.models.fields.PositiveIntegerField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'donation_type': ('django.db.models.fields.CharField', [], {'default': "'one_off'", 'max_length': '20', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.Project']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']", 'null': 'True', 'blank': 'True'})
},
u'fund.order': {
'Meta': {'object_name': 'Order'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'payments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'orders'", 'symmetrical': 'False', 'to': u"orm['cowry.Payment']"}),
'recurring': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'current'", 'max_length': '20', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.BlueBottleUser']", 'null': 'True', 'blank': 'True'})
},
u'fund.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['fund.Order']"})
},
u'fund.recurringdirectdebitpayment': {
'Meta': {'object_name': 'RecurringDirectDebitPayment'},
'account': ('apps.fund.fields.DutchBankAccountField', [], {'max_length': '10'}),
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '35'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['accounts.BlueBottleUser']", 'unique': 'True'})
},
u'fund.voucher': {
'Meta': {'object_name': 'Voucher'},
'amount': ('django.db.models.fields.PositiveIntegerField', [], {}),
'code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}),
'donations': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['fund.Donation']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '2'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '500', 'blank': 'True'}),
'receiver': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'receiver'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'receiver_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'receiver_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sender'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'sender_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'sender_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '20', 'db_index': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'projects.partnerorganization': {
'Meta': {'object_name': 'PartnerOrganization'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'projects.project': {
'Meta': {'ordering': "['title']", 'object_name': 'Project'},
'coach': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'team_member'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owner'", 'to': u"orm['accounts.BlueBottleUser']"}),
'partner_organization': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.PartnerOrganization']", 'null': 'True', 'blank': 'True'}),
'phase': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'popularity': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['fund']
|
bsd-3-clause
| 2,081,662,664,729,660,000
| 83.883495
| 196
| 0.561796
| false
| 3.66178
| false
| false
| false
|
khosrow/metpx
|
sundew/lib/MasterConfigurator.py
|
1
|
10676
|
"""
MetPX Copyright (C) 2004-2007 Environment Canada
MetPX comes with ABSOLUTELY NO WARRANTY; For details type see the file
named COPYING in the root of the source directory tree.
"""
"""
#############################################################################################
# Name: MasterConfigurator.py
#
# Author: Daniel Lemay
#
# Date: 2007-11-15
#
# Description:
#
#############################################################################################
"""
import sys, os, os.path, commands, re, time, fnmatch
import PXPaths
from SystemManager import SystemManager
from PXManager import PXManager
class MasterConfigurator(object):
def __init__(self, rootPath=""):
if os.path.isdir('/users/dor/aspy/dan/data/master/'):
self.rootPath = '/users/dor/aspy/dan/data/master/' # developpment machine
elif rootPath:
self.rootPath = os.path.normpath(rootPath) + '/'
else:
self.rootPath = '/apps/master/' # path under wich are the clusters and all the configs. files
self.types = ['source', 'client', 'sourlient'] # Possible type of flows
self.initAll()
def initAll(self):
self.clusters = [] # cluster names (same as dsh)
self.dupSources = [] # Duplicate sources (when you combine sources from all clusters)
self.dupClients = [] # Duplicate clients (when you combine clients from all clusters)
self.dupSourlients = [] # Duplicate sourlients (when you combine sourlients from all clusters)
self.dupFlows = [] # Duplicate flows (when you combine flows (sources, clients, sourlients) from all clusters)
self.allSources = [] # All sources from all clusters (with duplicates removed)
self.allClients = [] # All clients from all clusters (with duplicates removed)
self.allSourlients = [] # All sourlients from all clusters (with duplicated removed)
self.allFlows = [] # All flows (sources, clients, sourlients) from all clusters (with duplicated removed)
self.sourceCluster = {} # A mapping from a source to it's cluster
self.clientCluster = {} # A mapping from a client to it's cluster
self.sourlientCluster = {} # A mapping from a sourlient to it's cluster
self.flowCluster = {} # A mapping from a flow to it's cluster
def printClusterInfos(self, flowCluster):
keys = flowCluster.keys()
keys.sort()
for key in keys:
print "%s: %s" % (key, flowCluster[key])
def setMachine(self, machine):
self.machine = machine
def setUser(self, user):
self.user = user
def setClusters(self, list):
self.clusters = list
def findClient(self, clusters=None, ip="", name=""):
"""
clusters: a list of clusters (ex: ['pds', 'px', 'pxatx'])
ip: IP address (ex: '192.168.1.1')
name: hostname (ex: 'metmgr')
Only one argument in (ip, name) must be non null
"""
import socket
clusters = clusters or self.clusters
cliClust = []
if ip:
try:
# get the first part of the fully qualified domain name
name = socket.gethostbyaddr(ip)[0].split('.')[0]
except:
pass
elif name:
try:
ip = socket.gethostbyname(name)
except:
pass
for cluster in clusters:
clusterRoot = self.rootPath + cluster
PXPaths.normalPaths(clusterRoot)
if ip and name:
command = "grep -l -E '%s|%s' %s" % (ip, name, PXPaths.TX_CONF + "*.conf")
elif ip:
command = "grep -l -E '%s' %s" % (ip, PXPaths.TX_CONF + "*.conf")
elif name:
command = "grep -l -E '%s' %s" % (name, PXPaths.TX_CONF + "*.conf")
#print "%s" % cluster.upper()
output = commands.getoutput(command)
clients = [ (os.path.basename(cli)[:-5], cluster) for cli in output.split()]
cliClust.extend(clients)
PXPaths.normalPaths() # Reset PXPaths variables
return cliClust
def getTypeCluster(self, flow, init=False):
"""
When init is not False, it is a cluster list
flow is the name of a client, source, sourlient
return a list of tuple
getTypeCluster('aftn') => [('sourlient', 'pxatx')]
getTypeCluster('pds5') => [('source', 'pxatx')]
getTypeCluster('metmgr3') => [('client', 'pds'), ('client', 'pxatx')]
"""
if init:
self.initAll()
self.clusters = init
self.getAllFlows()
return self.flowCluster.get(flow, [])
def getType(self, flow, init=False):
"""
When init is not False, it is a cluster list
flow is the name of a client, source, sourlient
return type of the flow
getType('aftn') => 'sourlient'
getType('pds5') => 'source'
getType('metmgr3') => 'client'
"""
if init:
self.initAll()
self.clusters = init
self.getAllFlows()
type_cluster = self.flowCluster.get(flow, [])
if len(type_cluster) == 1:
return type_cluster[0][0]
else:
return len(type_cluster)
def getCluster(self, flow, init=False):
"""
When init is not False, it is a cluster list
flow is the name of a client, source, sourlient
return the cluster's name on which the flow is present
or the number of clusters, if more than one.
getCluster('aftn') => 'pxatx'
getCluster('pds5') => 'pxatx'
gettCluster('metmgr3') => 2
"""
if init:
self.initAll()
self.clusters = init
self.getAllFlows()
type_cluster = self.flowCluster.get(flow, [])
if len(type_cluster) == 1:
return type_cluster[0][1]
else:
return len(type_cluster)
def createFlowDict(self):
mergedDict = SystemManager.mergeTwoDict(self.sourceCluster, self.clientCluster)
return SystemManager.mergeTwoDict(mergedDict, self.sourlientCluster)
def getAllFlows(self, noPrint=True):
if noPrint:
iprint = lambda *x: None
else:
iprint = lambda *x:sys.stdout.write(" ".join(map(str, x)) + '\n')
allSources = []
allClients = []
allSourlients = []
allFlows = []
if not os.path.isdir(self.rootPath):
return 1
for cluster in self.clusters:
pxm = PXManager(self.rootPath + cluster + '/')
if pxm.initNames():
#print (self.rootPath + cluster + " inexistant!")
continue
clients, sourlients, sources, aliases = pxm.getFlowNames(tuple=True)
# Populate flowCluster for current cluster
pxm.getFlowDict(self.sourceCluster, sources, 'source', cluster)
pxm.getFlowDict(self.clientCluster, clients, 'client', cluster)
pxm.getFlowDict(self.sourlientCluster, sourlients, 'sourlient', cluster)
allSources.extend(sources)
allClients.extend(clients)
allSourlients.extend(sourlients)
iprint("%s" % (80*'#'))
iprint("CLUSTER %s" % cluster.upper())
iprint("%s" % (80*'#'))
iprint("sources (%s): %s" % (len(sources), sources))
iprint("clients (%s): %s" % (len(clients), clients))
iprint("sourlients (%s): %s" % (len(sourlients), sourlients))
#print "aliases: %s" % aliases
iprint()
pxm = PXManager()
pxm.initNames()
self.flowCluster = self.createFlowDict()
self.dupSources = pxm.identifyDuplicate(allSources)
self.dupClients = pxm.identifyDuplicate(allClients)
self.dupSourlients = pxm.identifyDuplicate(allSourlients)
self.allSources = pxm.removeDuplicate(allSources)
self.allClients = pxm.removeDuplicate(allClients)
self.allSourlients = pxm.removeDuplicate(allSourlients)
self.allFlows.extend(allSources)
self.allFlows.extend(allClients)
self.allFlows.extend(allSourlients)
self.dupFlows = pxm.identifyDuplicate(allFlows)
self.allFlows = pxm.removeDuplicate(allFlows)
iprint("Duplicate between sources from all clusters: %s" % self.dupSources)
iprint("Duplicate between clients from all clusters: %s" % self.dupClients)
iprint("Duplicate between sourlients from all clusters: %s" % self.dupSourlients)
iprint("Duplicate beetween flows (sources, clients, sourlients) from all clusters: %s" % self.dupFlows)
iprint()
keys = self.flowCluster.keys()
keys.sort()
for key in keys:
if len(self.flowCluster[key]) > 1:
iprint("%s: %s" % (key, self.flowCluster[key]))
iprint("source cluster(%s)" % len(self.sourceCluster))
iprint(self.sourceCluster)
iprint("client cluster(%s)" % len(self.clientCluster))
iprint(self.clientCluster)
iprint("sourlient cluster(%s)" % len(self.sourlientCluster))
iprint(self.sourlientCluster)
iprint("flow cluster(%s)" % len(self.flowCluster))
iprint()
if __name__ == '__main__':
mc = MasterConfigurator()
mc.setClusters(['px', 'pds', 'pxatx'])
mc.getAllFlows(noPrint=True)
print("%s: %s" % ('metmgr1', mc.getTypeCluster('metmgr1')))
print mc.getType('metmgr1')
print mc.getCluster('metmgr1')
print("%s: %s" % ('aftn', mc.getTypeCluster('aftn')))
print("%s: %s" % ('pds5', mc.getTypeCluster('pds5')))
print("%s: %s" % ('metmgr3', mc.getTypeCluster('metmgr3')))
print mc.getType('metmgr3')
print mc.getCluster('metmgr3')
print("%s: %s" % ('px-stage', mc.getTypeCluster('px-stage')))
print mc.getType('px-stage')
print mc.getCluster('px-stage')
print("%s: %s" % ('pds_metser', mc.getTypeCluster('pds_metser')))
print mc.getType('pds_metser')
print mc.getCluster('pds_metser')
#print mc.sourceCluster
#print mc.clientCluster
#print mc.sourlientCluster
#print mc.flowCluster
mc1 = MasterConfigurator()
print mc1.getType('metmgr1', ['px', 'pds', 'pxatx'])
print mc1.getCluster('metmgr1')
mc1.findClient(ip='199.212.17.60', clusters=['px', 'pxatx', 'pds'])
|
gpl-2.0
| -6,967,763,091,637,412,000
| 35.941176
| 133
| 0.569314
| false
| 3.757832
| false
| false
| false
|
saicoco/mxnet_image_caption
|
old/main.py
|
1
|
6142
|
# -*- conding=utf-8 -*-
"""
train module
"""
import mxnet as mx
import numpy as np
import json
import config
import logging
import time
import collections
from sym import vgg16_fc7, caption_module
from data_provider import caption_dataIter, init_cnn
from mxnet.model import save_checkpoint
import argparse
logging.basicConfig(level=logging.INFO)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--epoches', default=20, type=int, help="epoches in training-stage", dest='epoches')
parser.add_argument('--batch_size', default=50, type=int, help="batch_size in training-stage", dest='batch_size')
parser.add_argument('--num_hidden', default=256, type=int, help="the number of hidden unit", dest='num_hidden')
parser.add_argument('--lr', default=0.01, type=float, help="learning rate in training-stage", dest='lr')
parser.add_argument('--freq_val', default=5, type=int, help="frequence of validation", dest='freq_val')
parser.add_argument('--num_embed', default=256, type=int, help="the number of embedding dimension", dest='num_embed')
parser.add_argument('--num_lstm_layer', default=256, type=int, help="the number of hidden_unit", dest='num_lstm_layer')
parser.add_argument('--gpu', default=None, type=str, help="wether run on gpu device", dest='gpu')
parser.add_argument('--prefix', default='./checkpoint/train', type=str, help="prefix of save checkpoint", dest='prefix')
parser.add_argument('--period', default=5, type=int, help="times to save checkpoint in training-stage", dest='period')
return parser.parse_args()
class callbacks:
def __init__(self, nbatch, eval_metric, epoch):
self.nbatch = nbatch
self.eval_metric = eval_metric
self.epoch = epoch
def main(args):
learning_rate = args.lr
epoches = args.epoches
batch_size = args.batch_size
num_hidden = args.num_hidden
num_embed = args.num_embed
num_lstm_layer = args.num_lstm_layer
freq_val = args.freq_val
val_flag = True if args.freq_val > 0 else False
ctx = mx.cpu(0) if args.gpu is None else mx.gpu(int(args.gpu))
prefix = args.prefix
period = args.period
with open(config.text_root, 'r') as f:
captions = json.load(f)
buckets = [10, 20, 30]
# buckets = None
train_data = caption_dataIter(
captions=captions, batch_size=batch_size, mode='train')
val_data = caption_dataIter(
captions=captions, batch_size=batch_size, mode='val')
##########################################################################
########################### custom train process #########################
##########################################################################
cnn_shapes = {
'image_data': (batch_size, 3, 224, 224)
}
cnn_sym = vgg16_fc7('image_data')
cnn_exec = cnn_sym.simple_bind(ctx=ctx, is_train=False, **cnn_shapes)
lstm = caption_module(num_lstm_layer=num_lstm_layer, seq_len=train_data.sent_length+2,
vocab_size=train_data.vocab_size, num_hidden=num_hidden, num_embed=num_embed, batch_size=batch_size)
lstm_shapes = {
'image_feature': (batch_size, 4096),
'word_data': (batch_size, train_data.sent_length+2),
'softmax_label': (batch_size, train_data.sent_length+2)
}
lstm_exec = lstm.simple_bind(
ctx=ctx, is_train=True, **lstm_shapes)
# init params
pretrain = mx.nd.load(config.vgg_pretrain)
init_cnn(cnn_exec, pretrain)
# init optimazer
optimazer = mx.optimizer.create('adam')
optimazer.lr = learning_rate
updater = mx.optimizer.get_updater(optimazer)
# init metric
perplexity = mx.metric.Perplexity(ignore_label=-1)
perplexity.reset()
# callback
params = callbacks(nbatch=0, eval_metric=perplexity, epoch=0)
speedometer = mx.callback.Speedometer(batch_size=batch_size, frequent=20)
for epoch in range(epoches):
for i, batch in enumerate(train_data):
# cnn forward, get image_feature
cnn_exec.arg_dict['image_data'] = batch.data[0]
cnn_exec.forward()
image_feature = cnn_exec.outputs[0]
# lstm forward
lstm_exec.arg_dict['image_feature'] = image_feature
lstm_exec.arg_dict['word_data'] = batch.data[1]
lstm_exec.arg_dict['softmax_label'] = batch.label
lstm_exec.forward(is_train=True)
print batch.label
params.eval_metric.update(labels=batch.label,
preds=lstm_exec.outputs)
lstm_exec.backward()
params.epoch = epoch
params.nbatch += 1
speedometer(params)
for j, name in enumerate(lstm.list_arguments()):
if name not in lstm_shapes.keys():
updater(j, lstm_exec.grad_dict[
name], lstm_exec.arg_dict[name])
train_data.reset()
params.nbatch = 0
if val_flag and epoch % freq_val == 0:
for i, batch in enumerate(val_data):
# cnn forward, get image_feature
cnn_exec.arg_dict['image_data'] = batch.data[0]
cnn_exec.forward()
image_feature = cnn_exec.outputs[0]
# lstm forward
lstm_exec.arg_dict['image_feature'] = image_feature
lstm_exec.arg_dict['word_data'] = batch.data[1]
lstm_exec.arg_dict['softmax_label'] = batch.label
lstm_exec.forward(is_train=False)
params.eval_metric.update(labels=batch.label,
preds=lstm_exec.outputs)
params.epoch = epoch
params.nbatch += 1
speedometer(params)
params.nbatch = 0
val_data.reset()
if period:
save_checkpoint(prefix=prefix, epoch=epoch, symbol=lstm,
arg_params=lstm_exec.arg_dict,
aux_params=lstm_exec.aux_dict)
if __name__ == '__main__':
args = parse_args()
main(args)
|
mit
| -3,475,309,289,568,188,000
| 38.371795
| 126
| 0.588733
| false
| 3.625738
| false
| false
| false
|
gghezzo/prettypython
|
PythonEveryDay2015/bigballs.py
|
1
|
1044
|
# Teaching Python Classes by Peter Farrell
# From http://hackingmathclass.blogspot.com/2015/08/finally-some-class.html
# Typer: Ginny C Ghezzo
# What I learned:
# why doesn't the first import bring in locals ??
import pygame
from pygame.locals import *
black = (0,0,0)
white = (255,255,255)
green = (0,255, 0)
# ball position
xcor = 100
ycor = 100
# velocity
xvel = 2
yvel = 1
diameter = 20
pygame.init()
screen = pygame.display.set_mode((600,500))
pygame.display.set_caption('Classy Balls')
done = False # loop until close is clicked
clock = pygame.time.Clock() # used to manage the screen updates
while not done:
for event in pygame.event.get():
if event.type == QUIT:
done = True
screen.fill(black)
if xcor < 0 or xcor > 600 - diameter:
xvel = -xvel # make it go the opposite direction
if ycor < 0 or ycor > 500 - diameter:
yvel = -yvel
xcor += xvel
ycor += yvel
pygame.draw.ellipse(screen, white, [xcor,ycor,diameter,diameter])
pygame.display.update()
clock.tick(120)
pygame.quit()
|
mit
| 98,853,344,471,153,950
| 24.487805
| 75
| 0.681992
| false
| 2.704663
| false
| false
| false
|
panholt/sparkpy
|
sparkpy/models/webhook.py
|
1
|
2523
|
# -*- coding: utf-8 -*-
from .base import SparkBase, SparkProperty
from .time import SparkTime
class SparkWebhook(SparkBase):
''' Cisco Spark Webhook Model
:param session: SparkSession object
:type session: `SparkSession`
:param \**kwargs: All standard Spark API properties for a Webhook
'''
# | Start of class attributes |-------------------------------------------|
API_BASE = 'https://api.ciscospark.com/v1/webhooks/'
WEBHOOK_RESOURCES = ['memberships', 'messages', 'rooms', 'all']
WEBHOOK_EVENTS = ['created', 'updated', 'deleted', 'all']
WEBHOOK_FILTERS = {'memberships': ['roomId',
'personId',
'personEmail',
'isModerator'],
'messages': ['roomId',
'roomType',
'personId',
'personEmail',
'mentionedPeople',
'hasFiles'],
'rooms': ['type',
'isLocked']}
PROPERTIES = {'id': SparkProperty('id'),
'name': SparkProperty('name', mutable=True),
'targetUrl': SparkProperty('targetUrl', mutable=True),
'event': SparkProperty('event'),
'resource': SparkProperty('resource'),
'filter': SparkProperty('filter', optional=True),
'secret': SparkProperty('secret', optional=True),
'orgId': SparkProperty('orgId', optional=True),
'createdBy': SparkProperty('createdBy', optional=True),
'appId': SparkProperty('appId', optional=True),
'ownedBy': SparkProperty('ownedBy', optional=True),
'status': SparkProperty('status', optional=True),
'created': SparkProperty('created', optional=True)}
# | Start of instance attributes |----------------------------------------|
def __init__(self, *args, **kwargs):
super().__init__(*args, path='webhooks', **kwargs)
def update(self, name, targetUrl):
data = {'name': name, 'targetUrl': targetUrl}
self.parent.session.put(self.API_BASE, json=data)
return
def __repr__(self):
return f'SparkWebhook("{self.id}")'
def __str__(self):
return f'SparkWebhook({self.name})'
|
mit
| 2,799,580,334,978,640,400
| 41.05
| 79
| 0.479191
| false
| 4.769376
| false
| false
| false
|
chris48s/UK-Polling-Stations
|
polling_stations/apps/feedback/migrations/0001_initial.py
|
1
|
1210
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Feedback',
fields=[
('id', models.AutoField(auto_created=True, verbose_name='ID', primary_key=True, serialize=False)),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(verbose_name='modified', auto_now=True)),
('found_useful', models.CharField(choices=[('YES', 'Yes'), ('NO', 'No')], max_length=100, blank=True)),
('comments', models.TextField(blank=True)),
('source_url', models.CharField(max_length=800, blank=True)),
('token', models.CharField(max_length=100, blank=True)),
],
options={
'ordering': ('-modified', '-created'),
'abstract': False,
'get_latest_by': 'modified',
},
),
]
|
bsd-3-clause
| 8,918,969,895,613,432,000
| 38.032258
| 124
| 0.567769
| false
| 4.368231
| false
| false
| false
|
Abjad/abjad
|
abjad/obgc.py
|
1
|
30314
|
import typing
from . import _iterate, mutate, typings
from .attach import attach, detach
from .bundle import LilyPondFormatBundle
from .duration import Duration
from .overrides import LilyPondLiteral, tweak
from .parentage import Parentage
from .pitch.sets import PitchSet
from .score import Chord, Container, Note, Voice
from .select import Selection
from .spanners import beam, slur
from .tag import Tag
class OnBeatGraceContainer(Container):
r"""
On-beat grace container.
.. note:: On-beat grace containers must be included in a named voice.
.. container:: example
On-beat grace containers implement custom formatting not available in
LilyPond:
>>> music_voice = abjad.Voice("c'4 d'4 e'4 f'4", name="Music_Voice")
>>> string = "<d' g'>8 a' b' c'' d'' c'' b' a' b' c'' d''"
>>> container = abjad.on_beat_grace_container(
... string, music_voice[1:3], leaf_duration=(1, 24)
... )
>>> abjad.attach(abjad.Articulation(">"), container[0])
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
c'4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceOne
<
\tweak font-size 0
\tweak transparent ##t
d'
g'
>8 * 1/3
- \accent
[
(
a'8 * 1/3
b'8 * 1/3
c''8 * 1/3
d''8 * 1/3
c''8 * 1/3
b'8 * 1/3
a'8 * 1/3
b'8 * 1/3
c''8 * 1/3
d''8 * 1/3
)
]
}
\context Voice = "Music_Voice"
{
\voiceTwo
d'4
e'4
}
>>
\oneVoice
f'4
}
}
"""
### CLASS VARIABLES ###
__slots__ = ("_leaf_duration",)
### INITIALIZER ###
def __init__(
self,
components=None,
identifier: str = None,
leaf_duration: typings.DurationTyping = None,
name: str = None,
tag: Tag = None,
) -> None:
super().__init__(components, identifier=identifier, name=name, tag=tag)
if leaf_duration is not None:
leaf_duration = Duration(leaf_duration)
self._leaf_duration = leaf_duration
### SPECIAL METHODS ###
def __getnewargs__(self):
"""
Gets new after grace container arguments.
Returns tuple of single empty list.
"""
return ([],)
### PRIVATE METHODS ###
# NOTE: format="absolute_before" for \oneVoice so that this works:
#
# \oneVoice
# \override Stem.direction = #down
#
# ... because this ...
#
# \override Stem.direction = #down
# \oneVoice
#
# ... doesn't work.
#
# This is hackish, and some sort of longer term solution should
# happen later.
def _attach_lilypond_one_voice(self):
anchor_leaf = self._get_on_beat_anchor_leaf()
anchor_voice = Parentage(anchor_leaf).get(Voice)
final_anchor_leaf = _iterate._get_leaf(anchor_voice, -1)
next_leaf = _iterate._get_leaf(final_anchor_leaf, 1)
literal = LilyPondLiteral(r"\oneVoice", format_slot="absolute_before")
if next_leaf._has_indicator(literal):
return
if isinstance(next_leaf._parent, OnBeatGraceContainer):
return
if self._is_on_beat_anchor_voice(next_leaf._parent):
return
site = "abjad.OnBeatGraceContainer._attach_lilypond_one_voice()"
tag = Tag(site)
tag = tag.append(Tag("ONE_VOICE_COMMAND"))
attach(literal, next_leaf, tag=tag)
def _format_invocation(self):
return r'\context Voice = "On_Beat_Grace_Container"'
def _format_open_brackets_slot(self, bundle):
indent = LilyPondFormatBundle.indent
result = []
if self.identifier:
open_bracket = f"{{ {self.identifier}"
else:
open_bracket = "{"
brackets_open = [open_bracket]
overrides = bundle.grob_overrides
settings = bundle.context_settings
if overrides or settings:
contributions = [self._format_invocation(), r"\with", "{"]
contributions = self._tag_strings(contributions)
contributions = tuple(contributions)
identifier_pair = ("context_brackets", "open")
result.append((identifier_pair, contributions))
contributions = [indent + _ for _ in overrides]
contributions = self._tag_strings(contributions)
contributions = tuple(contributions)
identifier_pair = ("overrides", "overrides")
result.append((identifier_pair, contributions))
contributions = [indent + _ for _ in settings]
contributions = self._tag_strings(contributions)
contributions = tuple(contributions)
identifier_pair = ("settings", "settings")
result.append((identifier_pair, contributions))
contributions = [f"}} {brackets_open[0]}"]
contributions = ["}", open_bracket]
contributions = self._tag_strings(contributions)
contributions = tuple(contributions)
identifier_pair = ("context_brackets", "open")
result.append((identifier_pair, contributions))
else:
contribution = self._format_invocation()
contribution += f" {brackets_open[0]}"
contributions = [contribution]
contributions = [self._format_invocation(), open_bracket]
contributions = self._tag_strings(contributions)
contributions = tuple(contributions)
identifier_pair = ("context_brackets", "open")
result.append((identifier_pair, contributions))
return tuple(result)
def _get_on_beat_anchor_leaf(self):
container = self._parent
if container is None:
return None
if len(container) != 2:
raise Exception("Combine on-beat grace container with one other voice.")
if container.index(self) == 0:
anchor_voice = container[-1]
else:
assert container.index(self) == 1
anchor_voice = container[0]
anchor_leaf = Selection(anchor_voice).leaf(0, grace=False)
return anchor_leaf
@staticmethod
def _is_on_beat_anchor_voice(CONTAINER):
wrapper = CONTAINER._parent
if wrapper is None:
return False
if not isinstance(CONTAINER, Voice):
return False
return OnBeatGraceContainer._is_on_beat_wrapper(wrapper)
@staticmethod
def _is_on_beat_wrapper(CONTAINER):
if not CONTAINER.simultaneous:
return False
if len(CONTAINER) != 2:
return False
if isinstance(CONTAINER[0], OnBeatGraceContainer) and isinstance(
CONTAINER[1], Voice
):
return True
if isinstance(CONTAINER[0], Voice) and isinstance(
CONTAINER[1], OnBeatGraceContainer
):
return True
return False
def _match_anchor_leaf(self):
first_grace = _iterate._get_leaf(self, 0)
if not isinstance(first_grace, (Note, Chord)):
message = "must start with note or chord:\n"
message += f" {repr(self)}"
raise Exception(message)
anchor_leaf = self._get_on_beat_anchor_leaf()
if isinstance(anchor_leaf, (Note, Chord)) and isinstance(
first_grace, (Note, Chord)
):
if isinstance(first_grace, Note):
chord = Chord(first_grace)
mutate.replace(first_grace, chord)
first_grace = chord
selection = Selection(anchor_leaf)
anchor_pitches = PitchSet.from_selection(selection)
highest_pitch = list(sorted(anchor_pitches))[-1]
if highest_pitch not in first_grace.note_heads:
first_grace.note_heads.append(highest_pitch)
grace_mate_head = first_grace.note_heads.get(highest_pitch)
tweak(grace_mate_head).font_size = 0
tweak(grace_mate_head).transparent = True
def _set_leaf_durations(self):
if self.leaf_duration is None:
return
for leaf in Selection(self).leaves():
duration = leaf._get_duration()
if duration != self.leaf_duration:
multiplier = self.leaf_duration / duration
leaf.multiplier = multiplier
### PUBLIC PROPERTIES ###
@property
def leaf_duration(self) -> typing.Optional[Duration]:
"""
Gets leaf duration.
"""
return self._leaf_duration
### FACTORY FUNCTIONS ###
def on_beat_grace_container(
contents,
anchor_voice_selection,
*,
anchor_voice_number=2,
do_not_beam=None,
do_not_slash=None,
do_not_slur=None,
do_not_stop_polyphony=None,
font_size=-3,
grace_voice_number=1,
leaf_duration=None,
):
r"""
Makes on-beat grace container and wraps around ``selection``.
.. container:: example
GRACE NOTES ABOVE.
Note-to-note anchor:
>>> music_voice = abjad.Voice("c'4 d' e' f'", name="Music_Voice")
>>> string = "g'8 a' b' c'' d'' c'' b' a' b' c'' d''"
>>> result = abjad.on_beat_grace_container(
... string, music_voice[1:3], leaf_duration=(1, 30)
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
c'4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceOne
<
\tweak font-size 0
\tweak transparent ##t
d'
g'
>8 * 4/15
[
(
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
c''8 * 4/15
b'8 * 4/15
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceTwo
d'4
e'4
}
>>
\oneVoice
f'4
}
}
Note-to-chord anchor:
>>> music_voice = abjad.Voice(
... "<a c'>4 <b d'> <c' e'> <d' f'>", name="Music_Voice"
... )
>>> string = "g'8 a' b' c'' d'' c'' b' a' b' c'' d''"
>>> result = abjad.on_beat_grace_container(
... string, music_voice[1:3], leaf_duration=(1, 30)
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
<a c'>4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceOne
<
\tweak font-size 0
\tweak transparent ##t
d'
g'
>8 * 4/15
[
(
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
c''8 * 4/15
b'8 * 4/15
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceTwo
<b d'>4
<c' e'>4
}
>>
\oneVoice
<d' f'>4
}
}
Chord-to-note anchor:
>>> music_voice = abjad.Voice("c'4 d' e' f'", name="Music_Voice")
>>> string = "<g' b'>8 a' b' c'' d'' c'' b' a' b' c'' d''"
>>> result = abjad.on_beat_grace_container(
... string, music_voice[1:3], leaf_duration=(1, 30)
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
c'4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceOne
<
\tweak font-size 0
\tweak transparent ##t
d'
g'
b'
>8 * 4/15
[
(
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
c''8 * 4/15
b'8 * 4/15
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceTwo
d'4
e'4
}
>>
\oneVoice
f'4
}
}
Chord-to-chord anchor:
>>> music_voice = abjad.Voice(
... "<a c'>4 <b d'> <c' e'> <d' f'>", name="Music_Voice"
... )
>>> string = "<g' b'>8 a' b' c'' d'' c'' b' a' b' c'' d''"
>>> result = abjad.on_beat_grace_container(
... string, music_voice[1:3], leaf_duration=(1, 30)
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
<a c'>4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceOne
<
\tweak font-size 0
\tweak transparent ##t
d'
g'
b'
>8 * 4/15
[
(
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
c''8 * 4/15
b'8 * 4/15
a'8 * 4/15
b'8 * 4/15
c''8 * 4/15
d''8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceTwo
<b d'>4
<c' e'>4
}
>>
\oneVoice
<d' f'>4
}
}
.. container:: example
GRACE NOTES BELOW.
Note-to-note anchor:
>>> music_voice = abjad.Voice("c'4 d' e' f'", name="Music_Voice")
>>> string = "g8 a b c' d' c' b a b c' d'"
>>> result = abjad.on_beat_grace_container(
... string,
... music_voice[1:3],
... anchor_voice_number=1,
... grace_voice_number=2,
... leaf_duration=(1, 30),
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
c'4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceTwo
<
g
\tweak font-size 0
\tweak transparent ##t
d'
>8 * 4/15
[
(
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
c'8 * 4/15
b8 * 4/15
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceOne
d'4
e'4
}
>>
\oneVoice
f'4
}
}
Note-to-chord anchor:
>>> music_voice = abjad.Voice(
... "<c' e'>4 <d' f'> <e' g'> <f' a'>", name="Music_Voice"
... )
>>> string = "g8 a b c' d' c' b a b c' d'"
>>> result = abjad.on_beat_grace_container(
... string,
... music_voice[1:3],
... anchor_voice_number=1,
... grace_voice_number=2,
... leaf_duration=(1, 30),
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
<c' e'>4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceTwo
<
g
\tweak font-size 0
\tweak transparent ##t
f'
>8 * 4/15
[
(
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
c'8 * 4/15
b8 * 4/15
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceOne
<d' f'>4
<e' g'>4
}
>>
\oneVoice
<f' a'>4
}
}
Chord-to-note anchor:
>>> music_voice = abjad.Voice("c'4 d' e' f'", name="Music_Voice")
>>> string = "<e g>8 a b c' d' c' b a b c' d'"
>>> result = abjad.on_beat_grace_container(
... string,
... music_voice[1:3],
... anchor_voice_number=1,
... grace_voice_number=2,
... leaf_duration=(1, 30),
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
c'4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceTwo
<
e
g
\tweak font-size 0
\tweak transparent ##t
d'
>8 * 4/15
[
(
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
c'8 * 4/15
b8 * 4/15
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceOne
d'4
e'4
}
>>
\oneVoice
f'4
}
}
Chord-to-chord anchor:
>>> music_voice = abjad.Voice(
... "<c' e'>4 <d' f'> <e' g'> <f' a'>", name="Music_Voice"
... )
>>> string = "<e g>8 a b c' d' c' b a b c' d'"
>>> result = abjad.on_beat_grace_container(
... string,
... music_voice[1:3],
... anchor_voice_number=1,
... grace_voice_number=2,
... leaf_duration=(1, 30),
... )
>>> staff = abjad.Staff([music_voice])
>>> abjad.show(staff) # doctest: +SKIP
.. docs::
>>> string = abjad.lilypond(staff)
>>> print(string)
\new Staff
{
\context Voice = "Music_Voice"
{
<c' e'>4
<<
\context Voice = "On_Beat_Grace_Container"
{
\set fontSize = #-3
\slash
\voiceTwo
<
e
g
\tweak font-size 0
\tweak transparent ##t
f'
>8 * 4/15
[
(
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
c'8 * 4/15
b8 * 4/15
a8 * 4/15
b8 * 4/15
c'8 * 4/15
d'8 * 4/15
)
]
}
\context Voice = "Music_Voice"
{
\voiceOne
<d' f'>4
<e' g'>4
}
>>
\oneVoice
<f' a'>4
}
}
.. container:: example
Raises exception when duration of on-beat grace container exceeds
duration of anchor container:
>>> music_voice = abjad.Voice("c'4 d' e' f'", name="Music_Voice")
>>> string = "g'8 a' b' c'' d'' c'' b' a' b' c'' d''"
>>> result = abjad.on_beat_grace_container(
... string, music_voice[1:2], leaf_duration=(1, 8)
... )
Traceback (most recent call last):
...
Exception: grace Duration(11, 8) exceeds anchor Duration(1, 4).
"""
def _site(n):
return Tag(f"abjad.on_beat_grace_container({n})")
assert isinstance(anchor_voice_selection, Selection)
if not anchor_voice_selection.are_contiguous_same_parent(
ignore_before_after_grace=True
):
message = "selection must be contiguous in same parent:\n"
message += f" {repr(anchor_voice_selection)}"
raise Exception(message)
on_beat_grace_container = OnBeatGraceContainer(
contents, leaf_duration=leaf_duration
)
if not isinstance(anchor_voice_selection, Selection):
raise Exception(f"must be selection:\n {repr(anchor_voice_selection)}")
anchor_leaf = _iterate._get_leaf(anchor_voice_selection, 0)
anchor_voice = Parentage(anchor_leaf).get(Voice)
if anchor_voice.name is None:
raise Exception(f"anchor voice must be named:\n {repr(anchor_voice)}")
anchor_voice_insert = Voice(name=anchor_voice.name)
mutate.wrap(anchor_voice_selection, anchor_voice_insert)
container = Container(simultaneous=True)
mutate.wrap(anchor_voice_insert, container)
container.insert(0, on_beat_grace_container)
on_beat_grace_container._match_anchor_leaf()
on_beat_grace_container._set_leaf_durations()
insert_duration = anchor_voice_insert._get_duration()
grace_container_duration = on_beat_grace_container._get_duration()
if insert_duration < grace_container_duration:
message = f"grace {repr(grace_container_duration)}"
message += f" exceeds anchor {repr(insert_duration)}."
raise Exception(message)
if font_size is not None:
string = rf"\set fontSize = #{font_size}"
literal = LilyPondLiteral(string)
attach(literal, on_beat_grace_container, tag=_site(1))
if not do_not_beam:
beam(on_beat_grace_container[:])
if not do_not_slash:
literal = LilyPondLiteral(r"\slash")
attach(literal, on_beat_grace_container[0], tag=_site(2))
if not do_not_slur:
slur(on_beat_grace_container[:])
voice_number_to_string = {
1: r"\voiceOne",
2: r"\voiceTwo",
3: r"\voiceThree",
4: r"\voiceFour",
}
first_grace = _iterate._get_leaf(on_beat_grace_container, 0)
one_voice_literal = LilyPondLiteral(r"\oneVoice", format_slot="absolute_before")
string = voice_number_to_string.get(grace_voice_number, None)
if string is not None:
literal
detach(one_voice_literal, anchor_leaf)
attach(LilyPondLiteral(string), first_grace, tag=_site(3))
string = voice_number_to_string.get(anchor_voice_number, None)
if string is not None:
detach(one_voice_literal, anchor_leaf)
attach(LilyPondLiteral(string), anchor_leaf, tag=_site(4))
if not do_not_stop_polyphony:
last_anchor_leaf = _iterate._get_leaf(anchor_voice_selection, -1)
next_leaf = _iterate._get_leaf(last_anchor_leaf, 1)
if next_leaf is not None:
literal = LilyPondLiteral(r"\oneVoice", format_slot="absolute_before")
attach(literal, next_leaf, tag=_site(5))
return on_beat_grace_container
|
gpl-3.0
| 6,486,861,756,668,666,000
| 33.565564
| 84
| 0.377911
| false
| 4.506318
| false
| false
| false
|
UB-info/estructura-datos
|
RafaelArqueroGimeno_S6/ABB_Rafael_Arquero_Gimeno.py
|
1
|
8132
|
import copy
__author__ = "Rafael Arquero Gimeno"
class Node(object):
def __init__(self):
self.data = []
self.left = None
self.right = None
def clear(self):
"""Empty Node"""
self.data = []
self.left = None
self.right = None
def clearData(self):
"""Empty stored values"""
self.data = []
def append(self, data):
"""Appends given value"""
self.data.append(data)
return self # allow method chaining
def delete(self, data):
"""Deletes the given value from Node"""
self.data.remove(data)
@property
def key(self):
return self.data[0] if self else None
@property
def leftmost(self):
return self.left.leftmost if self.left else self.key
@property
def rightmost(self):
return self.right.rightmost if self.right else self.key
@property
def depth(self):
if self:
left_depth = self.left.depth if self.left else 0
right_depth = self.right.depth if self.right else 0
return 1 + max(left_depth, right_depth)
else:
return 0
def __copy__(self):
"""Returns a copy of self
:rtype : Node
"""
result = Node()
result.data = copy.copy(self.data)
if self.left:
result.left = copy.copy(self.left)
if self.right:
result.right = copy.copy(self.right)
return result
def __nonzero__(self):
return bool(self.data)
def __cmp__(self, other):
return cmp(self.key, other.key) if isinstance(other, Node) else cmp(self.key, other)
def __str__(self):
return reduce(lambda x, y: x + str(y) + "\n", self.data, "")
class ABB(object):
def __init__(self):
self.root = Node()
def clear(self):
"""Empty the tree"""
self.root.clear()
def insert(self, data):
"""Insert a value in tree
:param data: value to be inserted
:return: self to allow method chaining
"""
if not self:
self.root.append(data)
return self
parent, current = self._lookup(data)
if current: # data equivalent node found!
current.append(data)
else: # equivalent node not found!
setattr(parent, "right" if parent < data else "left", Node().append(data))
return self
def delete(self, data, wholeNode=False):
"""Deletes the given Node or Value if it is contained, Therefore do nothing
:type data: Node or ValueType (e.g. User)
:type wholeNode: bool
:param data: The node or value to delete
:param wholeNode: if whole matched node should be deleted or only the matched value
"""
parent, current = self._lookup(data)
if current: # data was found
current.clearData() if wholeNode else current.delete(data)
if not current: # we have deleted the last element from current node!
if current.left and current.right: # 2 children
newData = current.right.leftmost()
current.clearData()
current.append(newData)
self.delete(newData)
elif current.left: # only left child
current.data = current.left.data
current.right = current.left.right
current.left = current.left.left
# TODO
elif current.right: # only right child
current.data = current.right.data
current.left = current.right.left
current.right = current.right.right
# TODO
else: # no children
if not parent:
parent = self.root
setattr(parent, "right" if parent < data else "left", None)
def deleteLower(self, threshold, current=None, parent=None):
"""Deletes all values below threshold
:param threshold: All values below that will be deleted
:param current: The current inspected node (default root)
:param parent: The parent of current node
:return: self, allows method chaining
"""
if current is None:
if self:
current = self.root
else:
return self # break
if current > threshold:
if current.left:
self.deleteLower(threshold, current.left, current)
elif current < threshold:
if current.right:
current.data = current.right.data
current.left = current.right.left
current.right = current.right.right
self.deleteLower(threshold, current, parent)
else:
if parent:
parent.left = None # restart current
else:
self.clear() # restart root
else: # equals
current.left = None
return self
def deleteHigher(self, threshold, current=None, parent=None):
"""Deletes all values above threshold
:param threshold: All values above that will be deleted
:param current: The current inspected current (default root)
:param parent: The parent of current node
:return: self, allows method chaining
"""
if current is None:
if self:
current = self.root
else:
return self # break
if current < threshold:
if current.right:
self.deleteHigher(threshold, current.right, current)
elif current > threshold:
if current.left:
current.data = current.left.data
current.right = current.left.right
current.left = current.left.left
self.deleteHigher(threshold, current, parent)
else:
if parent:
parent.right = None # restart current
else:
self.clear() # restart root
else: # equals
current.right = None
return self
def _lookup(self, data):
"""Internal method. Finds the given value and return the node where it IS or where it SHOULD BE (i.e. None) and
also his parent node.
:rtype: Node, Node
"""
parent, current = None, self.root
while current:
if current < data: # data should be in right
parent, current = current, current.right
elif current > data: # data should be in left
parent, current = current, current.left
else: # equals
return parent, current
return parent, current
@property
def min(self):
"""Returns the minimum value of the tree"""
return self.root.leftmost
@property
def max(self):
"""Returns the maximum value of the tree"""
return self.root.rightmost
@property
def depth(self):
return self.root.depth
def __copy__(self):
"""Returns a copy of self
:rtype : ABB
"""
result = ABB()
result.root = copy.copy(self.root)
return result
def __nonzero__(self):
"""Returns false if the tree is empty, therefore returns true"""
return self.root.__nonzero__()
def __iter__(self, current=None):
"""Creates a generator that walks through the tree in descending order
:param current: The current node
:type current: Node
"""
if current is None: # first call
current = self.root
if current.right:
for x in self.__iter__(current.right):
yield x
for x in current.data:
yield x
if current.left:
for x in self.__iter__(current.left):
yield x
def __str__(self):
return reduce(lambda x, y: x + str(y) + "\n", self, "")
|
mit
| -3,202,886,532,054,247,400
| 30.280769
| 119
| 0.539843
| false
| 4.568539
| false
| false
| false
|
msaadat/paper
|
password_dlg.py
|
1
|
2143
|
from PyQt5.QtWidgets import (QApplication, QMessageBox, QDialog, QGridLayout,
QHBoxLayout, QLabel, QPushButton, QLineEdit)
class PasswordDialog(QDialog):
def __init__(self, parent=None):
super(PasswordDialog, self).__init__(parent)
self.password = None
okButton = QPushButton("&Ok")
okButton.clicked.connect(self.ok_pressed)
self.pass1_edit = QLineEdit()
self.pass1_edit.setEchoMode(QLineEdit.Password)
self.pass2_edit = QLineEdit()
self.pass2_edit.setEchoMode(QLineEdit.Password)
lable1 = QLabel("Password:")
lable2 = QLabel("Repeat password:")
buttonsLayout = QHBoxLayout()
buttonsLayout.addStretch()
buttonsLayout.addWidget(okButton)
mainLayout = QGridLayout()
mainLayout.addWidget(lable1, 0, 0)
mainLayout.addWidget(self.pass1_edit, 0, 1)
mainLayout.addWidget(lable2, 1, 0)
mainLayout.addWidget(self.pass2_edit, 1, 1)
mainLayout.addLayout(buttonsLayout, 2, 1)
self.setLayout(mainLayout)
self.setWindowTitle("Set Password")
def ok_pressed(self):
pass1 = self.pass1_edit.text()
pass2 = self.pass2_edit.text()
if pass1 != pass2:
QMessageBox.warning(self, "Password",
"Passwords do not match.")
self.pass1_edit.setFocus()
self.pass1_edit.selectAll()
elif pass1 == '':
QMessageBox.information(self, "Password",
"Passwords cannot be empty.")
self.pass1_edit.setFocus()
self.pass1_edit.selectAll()
else:
self.password = pass1
self.accept()
@staticmethod
def getPassword(parent):
dialog = PasswordDialog(parent)
result = dialog.exec_()
return dialog.password, result
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
window = PasswordDialog()
window.show()
sys.exit(app.exec_())
|
gpl-3.0
| -6,078,596,863,458,804,000
| 29.057971
| 77
| 0.569295
| false
| 4.145068
| false
| false
| false
|
ircah/cah-js
|
util/convert-csv.py
|
1
|
1612
|
#!/usr/bin/env python3
import re
import json # turns out the dump function of the json5 module just calls the normal json module (╯°□°)╯︵ ┻━┻
INPUT = "cards-DevOpsAgainstHumanity.csv"
META_NAME = "DevOps Against Humanity"
DELIM = ","
QUOTE = "\""
SKIPLINES = 2
def parse_csv(line):
a = []
tmp = ""
at_elem_start = True
in_quotes = False
in_escape = False
for c in line:
if at_elem_start:
if c == DELIM: # empty element
a.append("")
continue
in_quotes = (c == QUOTE)
if not in_quotes:
tmp += c
at_elem_start = False
continue
if c == QUOTE and in_quotes and not in_escape:
in_escape = True
elif c == QUOTE and in_quotes and in_escape:
tmp += QUOTE
in_escape = False
elif (c == DELIM and in_quotes and in_escape) or (c == DELIM and not in_quotes):
a.append(tmp)
tmp = ""
in_escape = False
at_elem_start = True
else:
tmp += c
a.append(tmp)
return a
r_blank = re.compile(r"_+")
odict = {}
odict["questions"] = []
odict["answers"] = []
odict["meta"] = {}
odict["meta"]["name"] = META_NAME
ifd = open(INPUT, "r")
for i in range(SKIPLINES):
ifd.readline()
n = 0
while True:
l = ifd.readline()
if not l:
break
l = l.rstrip("\r\n")
l = parse_csv(l)
if l[0] != "":
odict["answers"].append(l[0])
n += 1
if l[1] != "":
tmp = {}
tmp["text"] = re.sub(r_blank, "%s", l[1])
# pick is inferred from number of %s
odict["questions"].append(tmp)
n += 1
ifd.close()
ofd = open(INPUT.replace(".csv", ".json5"), "w")
json.dump(odict, ofd, indent=2, sort_keys=True)
ofd.close()
print("Processed %d cards." % (n, ))
|
mit
| 5,042,605,997,194,050,000
| 19.410256
| 108
| 0.600503
| false
| 2.479751
| false
| false
| false
|
miguelgrinberg/python-socketio
|
tests/common/test_pubsub_manager.py
|
1
|
13274
|
import functools
import logging
import unittest
from unittest import mock
import pytest
from socketio import base_manager
from socketio import pubsub_manager
class TestPubSubManager(unittest.TestCase):
def setUp(self):
id = 0
def generate_id():
nonlocal id
id += 1
return str(id)
mock_server = mock.MagicMock()
mock_server.eio.generate_id = generate_id
self.pm = pubsub_manager.PubSubManager()
self.pm._publish = mock.MagicMock()
self.pm.set_server(mock_server)
self.pm.host_id = '123456'
self.pm.initialize()
def test_default_init(self):
assert self.pm.channel == 'socketio'
self.pm.server.start_background_task.assert_called_once_with(
self.pm._thread
)
def test_custom_init(self):
pubsub = pubsub_manager.PubSubManager(channel='foo')
assert pubsub.channel == 'foo'
assert len(pubsub.host_id) == 32
def test_write_only_init(self):
mock_server = mock.MagicMock()
pm = pubsub_manager.PubSubManager(write_only=True)
pm.set_server(mock_server)
pm.initialize()
assert pm.channel == 'socketio'
assert len(pm.host_id) == 32
assert pm.server.start_background_task.call_count == 0
def test_write_only_default_logger(self):
pm = pubsub_manager.PubSubManager(write_only=True)
pm.initialize()
assert pm.channel == 'socketio'
assert len(pm.host_id) == 32
assert pm._get_logger() == logging.getLogger('socketio')
def test_write_only_with_provided_logger(self):
test_logger = logging.getLogger('new_logger')
pm = pubsub_manager.PubSubManager(write_only=True, logger=test_logger)
pm.initialize()
assert pm.channel == 'socketio'
assert len(pm.host_id) == 32
assert pm._get_logger() == test_logger
def test_emit(self):
self.pm.emit('foo', 'bar')
self.pm._publish.assert_called_once_with(
{
'method': 'emit',
'event': 'foo',
'data': 'bar',
'namespace': '/',
'room': None,
'skip_sid': None,
'callback': None,
'host_id': '123456',
}
)
def test_emit_with_namespace(self):
self.pm.emit('foo', 'bar', namespace='/baz')
self.pm._publish.assert_called_once_with(
{
'method': 'emit',
'event': 'foo',
'data': 'bar',
'namespace': '/baz',
'room': None,
'skip_sid': None,
'callback': None,
'host_id': '123456',
}
)
def test_emit_with_room(self):
self.pm.emit('foo', 'bar', room='baz')
self.pm._publish.assert_called_once_with(
{
'method': 'emit',
'event': 'foo',
'data': 'bar',
'namespace': '/',
'room': 'baz',
'skip_sid': None,
'callback': None,
'host_id': '123456',
}
)
def test_emit_with_skip_sid(self):
self.pm.emit('foo', 'bar', skip_sid='baz')
self.pm._publish.assert_called_once_with(
{
'method': 'emit',
'event': 'foo',
'data': 'bar',
'namespace': '/',
'room': None,
'skip_sid': 'baz',
'callback': None,
'host_id': '123456',
}
)
def test_emit_with_callback(self):
with mock.patch.object(
self.pm, '_generate_ack_id', return_value='123'
):
self.pm.emit('foo', 'bar', room='baz', callback='cb')
self.pm._publish.assert_called_once_with(
{
'method': 'emit',
'event': 'foo',
'data': 'bar',
'namespace': '/',
'room': 'baz',
'skip_sid': None,
'callback': ('baz', '/', '123'),
'host_id': '123456',
}
)
def test_emit_with_callback_without_server(self):
standalone_pm = pubsub_manager.PubSubManager()
with pytest.raises(RuntimeError):
standalone_pm.emit('foo', 'bar', callback='cb')
def test_emit_with_callback_missing_room(self):
with mock.patch.object(
self.pm, '_generate_ack_id', return_value='123'
):
with pytest.raises(ValueError):
self.pm.emit('foo', 'bar', callback='cb')
def test_emit_with_ignore_queue(self):
sid = self.pm.connect('123', '/')
self.pm.emit(
'foo', 'bar', room=sid, namespace='/', ignore_queue=True
)
self.pm._publish.assert_not_called()
self.pm.server._emit_internal.assert_called_once_with(
'123', 'foo', 'bar', '/', None
)
def test_can_disconnect(self):
sid = self.pm.connect('123', '/')
assert self.pm.can_disconnect(sid, '/')
self.pm.can_disconnect(sid, '/foo')
self.pm._publish.assert_called_once_with(
{'method': 'disconnect', 'sid': sid, 'namespace': '/foo'}
)
def test_close_room(self):
self.pm.close_room('foo')
self.pm._publish.assert_called_once_with(
{'method': 'close_room', 'room': 'foo', 'namespace': '/'}
)
def test_close_room_with_namespace(self):
self.pm.close_room('foo', '/bar')
self.pm._publish.assert_called_once_with(
{'method': 'close_room', 'room': 'foo', 'namespace': '/bar'}
)
def test_handle_emit(self):
with mock.patch.object(base_manager.BaseManager, 'emit') as super_emit:
self.pm._handle_emit({'event': 'foo', 'data': 'bar'})
super_emit.assert_called_once_with(
'foo',
'bar',
namespace=None,
room=None,
skip_sid=None,
callback=None,
)
def test_handle_emit_with_namespace(self):
with mock.patch.object(base_manager.BaseManager, 'emit') as super_emit:
self.pm._handle_emit(
{'event': 'foo', 'data': 'bar', 'namespace': '/baz'}
)
super_emit.assert_called_once_with(
'foo',
'bar',
namespace='/baz',
room=None,
skip_sid=None,
callback=None,
)
def test_handle_emit_with_room(self):
with mock.patch.object(base_manager.BaseManager, 'emit') as super_emit:
self.pm._handle_emit(
{'event': 'foo', 'data': 'bar', 'room': 'baz'}
)
super_emit.assert_called_once_with(
'foo',
'bar',
namespace=None,
room='baz',
skip_sid=None,
callback=None,
)
def test_handle_emit_with_skip_sid(self):
with mock.patch.object(base_manager.BaseManager, 'emit') as super_emit:
self.pm._handle_emit(
{'event': 'foo', 'data': 'bar', 'skip_sid': '123'}
)
super_emit.assert_called_once_with(
'foo',
'bar',
namespace=None,
room=None,
skip_sid='123',
callback=None,
)
def test_handle_emit_with_callback(self):
host_id = self.pm.host_id
with mock.patch.object(base_manager.BaseManager, 'emit') as super_emit:
self.pm._handle_emit(
{
'event': 'foo',
'data': 'bar',
'namespace': '/baz',
'callback': ('sid', '/baz', 123),
'host_id': host_id,
}
)
assert super_emit.call_count == 1
assert super_emit.call_args[0] == ('foo', 'bar')
assert super_emit.call_args[1]['namespace'] == '/baz'
assert super_emit.call_args[1]['room'] is None
assert super_emit.call_args[1]['skip_sid'] is None
assert isinstance(
super_emit.call_args[1]['callback'], functools.partial
)
super_emit.call_args[1]['callback']('one', 2, 'three')
self.pm._publish.assert_called_once_with(
{
'method': 'callback',
'host_id': host_id,
'sid': 'sid',
'namespace': '/baz',
'id': 123,
'args': ('one', 2, 'three'),
}
)
def test_handle_callback(self):
host_id = self.pm.host_id
with mock.patch.object(self.pm, 'trigger_callback') as trigger:
self.pm._handle_callback(
{
'method': 'callback',
'host_id': host_id,
'sid': 'sid',
'namespace': '/',
'id': 123,
'args': ('one', 2),
}
)
trigger.assert_called_once_with('sid', 123, ('one', 2))
def test_handle_callback_bad_host_id(self):
with mock.patch.object(self.pm, 'trigger_callback') as trigger:
self.pm._handle_callback(
{
'method': 'callback',
'host_id': 'bad',
'sid': 'sid',
'namespace': '/',
'id': 123,
'args': ('one', 2),
}
)
assert trigger.call_count == 0
def test_handle_callback_missing_args(self):
host_id = self.pm.host_id
with mock.patch.object(self.pm, 'trigger_callback') as trigger:
self.pm._handle_callback(
{
'method': 'callback',
'host_id': host_id,
'sid': 'sid',
'namespace': '/',
'id': 123,
}
)
self.pm._handle_callback(
{
'method': 'callback',
'host_id': host_id,
'sid': 'sid',
'namespace': '/',
}
)
self.pm._handle_callback(
{'method': 'callback', 'host_id': host_id, 'sid': 'sid'}
)
self.pm._handle_callback(
{'method': 'callback', 'host_id': host_id}
)
assert trigger.call_count == 0
def test_handle_disconnect(self):
self.pm._handle_disconnect(
{'method': 'disconnect', 'sid': '123', 'namespace': '/foo'}
)
self.pm.server.disconnect.assert_called_once_with(
sid='123', namespace='/foo', ignore_queue=True
)
def test_handle_close_room(self):
with mock.patch.object(
base_manager.BaseManager, 'close_room'
) as super_close_room:
self.pm._handle_close_room({'method': 'close_room', 'room': 'foo'})
super_close_room.assert_called_once_with(
room='foo', namespace=None
)
def test_handle_close_room_with_namespace(self):
with mock.patch.object(
base_manager.BaseManager, 'close_room'
) as super_close_room:
self.pm._handle_close_room(
{'method': 'close_room', 'room': 'foo', 'namespace': '/bar'}
)
super_close_room.assert_called_once_with(
room='foo', namespace='/bar'
)
def test_background_thread(self):
self.pm._handle_emit = mock.MagicMock()
self.pm._handle_callback = mock.MagicMock()
self.pm._handle_disconnect = mock.MagicMock()
self.pm._handle_close_room = mock.MagicMock()
def messages():
import pickle
yield {'method': 'emit', 'value': 'foo'}
yield {'missing': 'method'}
yield '{"method": "callback", "value": "bar"}'
yield {'method': 'disconnect', 'sid': '123', 'namespace': '/foo'}
yield {'method': 'bogus'}
yield pickle.dumps({'method': 'close_room', 'value': 'baz'})
yield 'bad json'
yield b'bad pickled'
self.pm._listen = mock.MagicMock(side_effect=messages)
try:
self.pm._thread()
except StopIteration:
pass
self.pm._handle_emit.assert_called_once_with(
{'method': 'emit', 'value': 'foo'}
)
self.pm._handle_callback.assert_called_once_with(
{'method': 'callback', 'value': 'bar'}
)
self.pm._handle_disconnect.assert_called_once_with(
{'method': 'disconnect', 'sid': '123', 'namespace': '/foo'}
)
self.pm._handle_close_room.assert_called_once_with(
{'method': 'close_room', 'value': 'baz'}
)
|
mit
| 408,921,371,623,106,050
| 33.035897
| 79
| 0.469715
| false
| 4.027306
| true
| false
| false
|
rschnapka/account-closing
|
account_cutoff_prepaid/account.py
|
1
|
6258
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Account Cut-off Prepaid module for OpenERP
# Copyright (C) 2013 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
from openerp.tools.translate import _
class account_invoice_line(orm.Model):
_inherit = 'account.invoice.line'
_columns = {
'start_date': fields.date('Start Date'),
'end_date': fields.date('End Date'),
}
def _check_start_end_dates(self, cr, uid, ids):
for invline in self.browse(cr, uid, ids):
if invline.start_date and not invline.end_date:
raise orm.except_orm(
_('Error:'),
_("Missing End Date for invoice line with "
"Description '%s'.")
% (invline.name))
if invline.end_date and not invline.start_date:
raise orm.except_orm(
_('Error:'),
_("Missing Start Date for invoice line with "
"Description '%s'.")
% (invline.name))
if invline.end_date and invline.start_date and \
invline.start_date > invline.end_date:
raise orm.except_orm(
_('Error:'),
_("Start Date should be before or be the same as "
"End Date for invoice line with Description '%s'.")
% (invline.name))
# Note : we can't check invline.product_id.must_have_dates
# have start_date and end_date here, because it would
# block automatic invoice generation. So we do the check
# upon validation of the invoice (see below the function
# action_move_create)
return True
_constraints = [
(_check_start_end_dates, "Error msg in raise",
['start_date', 'end_date', 'product_id']),
]
def move_line_get_item(self, cr, uid, line, context=None):
res = super(account_invoice_line, self).move_line_get_item(
cr, uid, line, context=context)
res['start_date'] = line.start_date
res['end_date'] = line.end_date
return res
class account_move_line(orm.Model):
_inherit = "account.move.line"
_columns = {
'start_date': fields.date('Start Date'),
'end_date': fields.date('End Date'),
}
def _check_start_end_dates(self, cr, uid, ids):
for moveline in self.browse(cr, uid, ids):
if moveline.start_date and not moveline.end_date:
raise orm.except_orm(
_('Error:'),
_("Missing End Date for move line with Name '%s'.")
% (moveline.name))
if moveline.end_date and not moveline.start_date:
raise orm.except_orm(
_('Error:'),
_("Missing Start Date for move line with Name '%s'.")
% (moveline.name))
if moveline.end_date and moveline.start_date and \
moveline.start_date > moveline.end_date:
raise orm.except_orm(
_('Error:'),
_("Start Date should be before End Date for move line "
"with Name '%s'.")
% (moveline.name))
# should we check that it's related to an expense / revenue ?
# -> I don't think so
return True
_constraints = [(
_check_start_end_dates,
"Error msg in raise",
['start_date', 'end_date']
)]
class account_invoice(orm.Model):
_inherit = 'account.invoice'
def inv_line_characteristic_hashcode(self, invoice, invoice_line):
'''Add start and end dates to hashcode used when the option "Group
Invoice Lines" is active on the Account Journal'''
code = super(account_invoice, self).inv_line_characteristic_hashcode(
invoice, invoice_line)
hashcode = '%s-%s-%s' % (
code, invoice_line.get('start_date', 'False'),
invoice_line.get('end_date', 'False'),
)
return hashcode
def line_get_convert(self, cr, uid, x, part, date, context=None):
res = super(account_invoice, self).line_get_convert(
cr, uid, x, part, date, context=context)
res['start_date'] = x.get('start_date', False)
res['end_date'] = x.get('end_date', False)
return res
def action_move_create(self, cr, uid, ids, context=None):
'''Check that products with must_have_dates=True have
Start and End Dates'''
for invoice in self.browse(cr, uid, ids, context=context):
for invline in invoice.invoice_line:
if invline.product_id and invline.product_id.must_have_dates:
if not invline.start_date or not invline.end_date:
raise orm.except_orm(
_('Error:'),
_("Missing Start Date and End Date for invoice "
"line with Product '%s' which has the "
"property 'Must Have Start and End Dates'.")
% (invline.product_id.name))
return super(account_invoice, self).action_move_create(
cr, uid, ids, context=context)
|
agpl-3.0
| 7,632,116,702,459,464,000
| 40.72
| 78
| 0.541067
| false
| 4.147117
| false
| false
| false
|
PaddlePaddle/Paddle
|
python/paddle/fluid/tests/unittests/hybrid_parallel_mp_clip_grad.py
|
1
|
1391
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from __future__ import print_function
import paddle
import numpy as np
from hybrid_parallel_mp_model import TestDistMPTraning
import unittest
import logging
#log = logging.getLogger("HybridParallel")
#log.setLevel(logging.WARNING)
class TestMPClipGrad(TestDistMPTraning):
def build_optimizer(self, model):
grad_clip = paddle.nn.ClipGradByGlobalNorm(2.0)
scheduler = paddle.optimizer.lr.ExponentialDecay(
learning_rate=0.001, gamma=0.999, verbose=True)
optimizer = paddle.optimizer.SGD(scheduler,
grad_clip=grad_clip,
parameters=model.parameters())
return optimizer
if __name__ == "__main__":
unittest.main()
|
apache-2.0
| -7,350,561,299,220,626,000
| 33.775
| 74
| 0.698059
| false
| 4.079179
| false
| false
| false
|
zsiciarz/django-pgallery
|
pgallery/admin.py
|
1
|
2105
|
"""
Administration for photos and galleries.
"""
from django.contrib import admin
from django.db.models import Count
from django.utils.translation import ugettext_lazy as _
from .forms import PhotoForm
from .models import Gallery, Photo
class PhotoInline(admin.TabularInline):
"""
Administration for photos.
"""
model = Photo
form = PhotoForm
ordering = ["created"]
def get_extra(self, request, obj=None, **kwargs):
return 0 if obj else 3
class GalleryAdmin(admin.ModelAdmin):
"""
Administration for galleries.
"""
list_display = (
"author",
"title",
"status",
# Having "description" here raises SystemCheckError (admin.E108).
# We need to remove description from list_display for Django 2.1-2.2
# See https://code.djangoproject.com/ticket/30543
# "description",
"shot_date",
"modified",
"photo_count",
)
list_display_links = ("title",)
list_editable = ("status",)
list_filter = ("status",)
date_hierarchy = "shot_date"
prepopulated_fields = {"slug": ("title",)}
inlines = [PhotoInline]
def photo_count(self, obj):
return obj.photo_count
photo_count.short_description = _("Photo count")
def get_queryset(self, request):
"""
Add number of photos to each gallery.
"""
qs = super(GalleryAdmin, self).get_queryset(request)
return qs.annotate(photo_count=Count("photos"))
def save_model(self, request, obj, form, change):
"""
Set currently authenticated user as the author of the gallery.
"""
obj.author = request.user
obj.save()
def save_formset(self, request, form, formset, change):
"""
For each photo set it's author to currently authenticated user.
"""
instances = formset.save(commit=False)
for instance in instances:
if isinstance(instance, Photo):
instance.author = request.user
instance.save()
admin.site.register(Gallery, GalleryAdmin)
|
mit
| 6,763,024,374,916,542,000
| 25.3125
| 76
| 0.614252
| false
| 4.095331
| false
| false
| false
|
dgketchum/satellite_image
|
sat_image/image.py
|
1
|
27285
|
# =============================================================================================
# Copyright 2017 dgketchum
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================================
import os
import shutil
from rasterio import open as rasopen
from numpy import where, pi, cos, nan, inf, true_divide, errstate, log
from numpy import float32, sin, deg2rad, array, isnan
from shapely.geometry import Polygon, mapping
from fiona import open as fiopen
from fiona.crs import from_epsg
from tempfile import mkdtemp
from datetime import datetime
from bounds import RasterBounds
from sat_image import mtl
class UnmatchedStackGeoError(ValueError):
pass
class InvalidObjectError(TypeError):
pass
class LandsatImage(object):
'''
Object to process landsat images. The parent class: LandsatImage takes a directory
containing untarred files, for now this ingests images that have been downloaded
from USGS earth explorer, using our Landsat578 package.
'''
def __init__(self, obj):
'''
:param obj: Directory containing an unzipped Landsat 5, 7, or 8 image. This should include at least
a tif for each band, and a .mtl file.
'''
self.obj = obj
if os.path.isdir(obj):
self.isdir = True
self.date_acquired = None
self.file_list = os.listdir(obj)
self.tif_list = [x for x in os.listdir(obj) if x.endswith('.TIF')]
self.tif_list.sort()
# parse metadata file into attributes
# structure: {HEADER: {SUBHEADER: {key(attribute), val(attribute value)}}}
self.mtl = mtl.parsemeta(obj)
self.meta_header = list(self.mtl)[0]
self.super_dict = self.mtl[self.meta_header]
for key, val in self.super_dict.items():
for sub_key, sub_val in val.items():
# print(sub_key.lower(), sub_val)
setattr(self, sub_key.lower(), sub_val)
self.satellite = self.landsat_scene_id[:3]
# create numpy nd_array objects for each band
self.band_list = []
self.tif_dict = {}
for i, tif in enumerate(self.tif_list):
raster = os.path.join(self.obj, tif)
# set all lower case attributes
tif = tif.lower()
front_ind = tif.index('b')
end_ind = tif.index('.tif')
att_string = tif[front_ind: end_ind]
self.band_list.append(att_string)
self.tif_dict[att_string] = raster
self.band_count = i + 1
if i == 0:
with rasopen(raster) as src:
transform = src.transform
profile = src.profile
meta = src.meta.copy()
self.rasterio_geometry = meta
self.profile = profile
self.transform = transform
self.shape = (1, profile['height'], profile['width'])
bounds = RasterBounds(affine_transform=transform,
profile=profile,
latlon=False)
self.bounds = bounds
self.north, self.west, self.south, self.east = bounds.get_nwse_tuple()
self.coords = bounds.as_tuple('nsew')
self.solar_zenith = 90. - self.sun_elevation
self.solar_zenith_rad = self.solar_zenith * pi / 180
self.sun_elevation_rad = self.sun_elevation * pi / 180
self.earth_sun_dist = self.earth_sun_d(self.date_acquired)
dtime = datetime.strptime(str(self.date_acquired), '%Y-%m-%d')
julian_day = dtime.strftime('%j')
self.doy = int(julian_day)
self.scene_coords_deg = self._scene_centroid()
self.scene_coords_rad = deg2rad(self.scene_coords_deg[0]), deg2rad(self.scene_coords_deg[1])
def _get_band(self, band_str):
path = self.tif_dict[band_str]
with rasopen(path) as src:
arr = src.read(1)
arr = array(arr, dtype=float32)
arr[arr < 1.] = nan
return arr
def _scene_centroid(self):
""" Compute image center coordinates
:return: Tuple of image center in lat, lon
"""
ul_lat = self.corner_ul_lat_product
ll_lat = self.corner_ll_lat_product
ul_lon = self.corner_ul_lon_product
ur_lon = self.corner_ur_lon_product
lat = (ul_lat + ll_lat) / 2.
lon = (ul_lon + ur_lon) / 2.
return lat, lon
@staticmethod
def earth_sun_d(dtime):
""" Earth-sun distance in AU
:param dtime time, e.g. datetime.datetime(2007, 5, 1)
:type datetime object
:return float(distance from sun to earth in astronomical units)
"""
doy = int(dtime.strftime('%j'))
rad_term = 0.9856 * (doy - 4) * pi / 180
distance_au = 1 - 0.01672 * cos(rad_term)
return distance_au
@staticmethod
def _divide_zero(a, b, replace=0):
with errstate(divide='ignore', invalid='ignore'):
c = true_divide(a, b)
c[c == inf] = replace
return c
def get_tile_geometry(self, output_filename=None, geographic_coords=False):
if not output_filename:
temp_dir = mkdtemp()
temp = os.path.join(temp_dir, 'shape.shp')
else:
temp = output_filename
# corners = {'ul': (self.corner_ul_projection_x_product,
# self.corner_ul_projection_y_product),
# 'll': (self.corner_ll_projection_x_product,
# self.corner_ll_projection_y_product),
# 'lr': (self.corner_lr_projection_x_product,
# self.corner_lr_projection_y_product),
# 'ur': (self.corner_ur_projection_x_product,
# self.corner_ur_projection_y_product)}
if geographic_coords:
points = [(self.north, self.west), (self.south, self.west),
(self.south, self.east), (self.north, self.east),
(self.north, self.west)]
else:
points = [(self.west, self.north), (self.west, self.south),
(self.east, self.south), (self.east, self.north),
(self.west, self.north)]
polygon = Polygon(points)
schema = {'geometry': 'Polygon',
'properties': {'id': 'int'}}
crs = from_epsg(int(self.rasterio_geometry['crs']['init'].split(':')[1]))
with fiopen(temp, 'w', 'ESRI Shapefile', schema=schema, crs=crs) as shp:
shp.write({
'geometry': mapping(polygon),
'properties': {'id': 1}})
if output_filename:
return None
with fiopen(temp, 'r') as src:
features = [f['geometry'] for f in src]
if not output_filename:
try:
shutil.rmtree(temp_dir)
except UnboundLocalError:
pass
return features
def save_array(self, arr, output_filename):
geometry = self.rasterio_geometry
arr = arr.reshape(1, arr.shape[0], arr.shape[1])
geometry['dtype'] = arr.dtype
with rasopen(output_filename, 'w', **geometry) as dst:
dst.write(arr)
return None
def mask_by_image(self, arr):
image = self._get_band('b1')
image = array(image, dtype=float32)
image[image < 1.] = nan
arr = where(isnan(image), nan, arr)
return arr
def mask(self):
image = self._get_band('b1')
image = array(image, dtype=float32)
image[image < 1.] = nan
arr = where(isnan(image), 0, 1)
return arr
class Landsat5(LandsatImage):
def __init__(self, obj):
LandsatImage.__init__(self, obj)
if self.satellite != 'LT5':
raise ValueError('Must init Landsat5 object with Landsat5 data, not {}'.format(self.satellite))
# https://landsat.usgs.gov/esun
self.ex_atm_irrad = (1958.0, 1827.0, 1551.0,
1036.0, 214.9, nan, 80.65)
# old values from fmask.exe
# self.ex_atm_irrad = (1983.0, 1796.0, 1536.0, 1031.0, 220.0, nan, 83.44)
self.k1, self.k2 = 607.76, 1260.56
def radiance(self, band):
qcal_min = getattr(self, 'quantize_cal_min_band_{}'.format(band))
qcal_max = getattr(self, 'quantize_cal_max_band_{}'.format(band))
l_min = getattr(self, 'radiance_minimum_band_{}'.format(band))
l_max = getattr(self, 'radiance_maximum_band_{}'.format(band))
qcal = self._get_band('b{}'.format(band))
rad = ((l_max - l_min) / (qcal_max - qcal_min)) * (qcal - qcal_min) + l_min
return rad.astype(float32)
def brightness_temp(self, band, temp_scale='K'):
if band in [1, 2, 3, 4, 5, 7]:
raise ValueError('LT5 brightness must be band 6')
rad = self.radiance(band)
brightness = self.k2 / (log((self.k1 / rad) + 1))
if temp_scale == 'K':
return brightness
elif temp_scale == 'F':
return brightness * (9 / 5.0) - 459.67
elif temp_scale == 'C':
return brightness - 273.15
else:
raise ValueError('{} is not a valid temperature scale'.format(temp_scale))
def reflectance(self, band):
"""
:param band: An optical band, i.e. 1-5, 7
:return: At satellite reflectance, [-]
"""
if band == 6:
raise ValueError('LT5 reflectance must be other than band 6')
rad = self.radiance(band)
esun = self.ex_atm_irrad[band - 1]
toa_reflect = (pi * rad * self.earth_sun_dist ** 2) / (esun * cos(self.solar_zenith_rad))
return toa_reflect
def albedo(self, model='smith'):
"""Finds broad-band surface reflectance (albedo)
Smith (2010), “The heat budget of the earth’s surface deduced from space”
LT5 toa reflectance bands 1, 3, 4, 5, 7
# normalized i.e. 0.356 + 0.130 + 0.373 + 0.085 + 0.07 = 1.014
Should have option for Liang, 2000;
Tasumi (2008), "At-Surface Reflectance and Albedo from Satellite for
Operational Calculation of Land Surface Energy Balance"
:return albedo array of floats
"""
if model == 'smith':
blue, red, nir, swir1, swir2 = (self.reflectance(1), self.reflectance(3), self.reflectance(4),
self.reflectance(5), self.reflectance(7))
alb = (0.356 * blue + 0.130 * red + 0.373 * nir + 0.085 * swir1 + 0.072 * swir2 - 0.0018) / 1.014
elif model == 'tasumi':
pass
# add tasumi algorithm TODO
return alb
def saturation_mask(self, band, value=255):
""" Mask saturated pixels, 1 (True) is saturated.
:param band: Image band with dn values, type: array
:param value: Maximum (saturated) value, i.e. 255 for 8-bit data, type: int
:return: boolean array
"""
dn = self._get_band('b{}'.format(band))
mask = self.mask()
mask = where((dn == value) & (mask > 0), True, False)
return mask
def ndvi(self):
""" Normalized difference vegetation index.
:return: NDVI
"""
red, nir = self.reflectance(3), self.reflectance(4)
ndvi = self._divide_zero((nir - red), (nir + red), nan)
return ndvi
def lai(self):
"""
Leaf area index (LAI), or the surface area of leaves to surface area ground.
Trezza and Allen, 2014
:param ndvi: normalized difference vegetation index [-]
:return: LAI [-]
"""
ndvi = self.ndvi()
lai = 7.0 * (ndvi ** 3)
lai = where(lai > 6., 6., lai)
return lai
def emissivity(self, approach='tasumi'):
ndvi = self.ndvi()
if approach == 'tasumi':
lai = self.lai()
# Tasumi et al., 2003
# narrow-band emissivity
nb_epsilon = where((ndvi > 0) & (lai <= 3), 0.97 + 0.0033 * lai, nan)
nb_epsilon = where((ndvi > 0) & (lai > 3), 0.98, nb_epsilon)
nb_epsilon = where(ndvi <= 0, 0.99, nb_epsilon)
return nb_epsilon
if approach == 'sobrino':
# Sobrino et el., 2004
red = self.reflectance(3)
bound_ndvi = where(ndvi > 0.5, ndvi, 0.99)
bound_ndvi = where(ndvi < 0.2, red, bound_ndvi)
pv = ((ndvi - 0.2) / (0.5 - 0.2)) ** 2
pv_emiss = 0.004 * pv + 0.986
emissivity = where((ndvi >= 0.2) & (ndvi <= 0.5), pv_emiss, bound_ndvi)
return emissivity
def land_surface_temp(self):
"""
Mean values from Allen (2007)
:return:
"""
rp = 0.91
tau = 0.866
rsky = 1.32
epsilon = self.emissivity(approach='tasumi')
radiance = self.radiance(6)
rc = ((radiance - rp) / tau) - ((1 - epsilon) * rsky)
lst = self.k2 / (log((epsilon * self.k1 / rc) + 1))
return lst
def ndsi(self):
""" Normalized difference snow index.
:return: NDSI
"""
green, swir1 = self.reflectance(2), self.reflectance(5)
ndsi = self._divide_zero((green - swir1), (green + swir1), nan)
return ndsi
class Landsat7(LandsatImage):
def __init__(self, obj):
LandsatImage.__init__(self, obj)
if self.satellite != 'LE7':
raise ValueError('Must init Landsat7 object with Landsat5 data, not {}'.format(self.satellite))
# https://landsat.usgs.gov/esun; Landsat 7 Handbook
self.ex_atm_irrad = (1970.0, 1842.0, 1547.0, 1044.0,
255.700, nan, 82.06, 1369.00)
self.k1, self.k2 = 666.09, 1282.71
def radiance(self, band):
if band == 6:
band = '6_vcid_1'
qcal_min = getattr(self, 'quantize_cal_min_band_{}'.format(band))
qcal_max = getattr(self, 'quantize_cal_max_band_{}'.format(band))
l_min = getattr(self, 'radiance_minimum_band_{}'.format(band))
l_max = getattr(self, 'radiance_maximum_band_{}'.format(band))
qcal = self._get_band('b{}'.format(band))
rad = ((l_max - l_min) / (qcal_max - qcal_min)) * (qcal - qcal_min) + l_min
return rad
def brightness_temp(self, band=6, gain='low', temp_scale='K'):
if band in [1, 2, 3, 4, 5, 7, 8]:
raise ValueError('LE7 brightness must be either vcid_1 or vcid_2')
if gain == 'low':
# low gain : b6_vcid_1
band_gain = '6_vcid_1'
else:
band_gain = '6_vcid_2'
rad = self.radiance(band_gain)
brightness = self.k2 / (log((self.k1 / rad) + 1))
if temp_scale == 'K':
return brightness
elif temp_scale == 'F':
return brightness * (9 / 5.0) - 459.67
elif temp_scale == 'C':
return brightness - 273.15
else:
raise ValueError('{} is not a valid temperature scale'.format(temp_scale))
def reflectance(self, band):
"""
:param band: An optical band, i.e. 1-5, 7
:return: At satellite reflectance, [-]
"""
if band in ['b6_vcid_1', 'b6_vcid_2']:
raise ValueError('LE7 reflectance must not be b6_vcid_1 or b6_vcid_2')
rad = self.radiance(band)
esun = self.ex_atm_irrad[band - 1]
toa_reflect = (pi * rad * self.earth_sun_dist ** 2) / (esun * cos(self.solar_zenith_rad))
return toa_reflect
def albedo(self):
"""Finds broad-band surface reflectance (albedo)
Smith (2010), “The heat budget of the earth’s surface deduced from space”
Should have option for Liang, 2000;
LE7 toa reflectance bands 1, 3, 4, 5, 7
# normalized i.e. 0.356 + 0.130 + 0.373 + 0.085 + 0.07 = 1.014
:return albedo array of floats
"""
blue, red, nir, swir1, swir2 = (self.reflectance(1), self.reflectance(3), self.reflectance(4),
self.reflectance(5), self.reflectance(7))
alb = (0.356 * blue + 0.130 * red + 0.373 * nir + 0.085 * swir1 + 0.072 * swir2 - 0.0018) / 1.014
return alb
def saturation_mask(self, band, value=255):
""" Mask saturated pixels, 1 (True) is saturated.
:param band: Image band with dn values, type: array
:param value: Maximum (saturated) value, i.e. 255 for 8-bit data, type: int
:return: boolean array
"""
dn = self._get_band('b{}'.format(band))
mask = where((dn == value) & (self.mask() > 0), True, False)
return mask
def ndvi(self):
""" Normalized difference vegetation index.
:return: NDVI
"""
red, nir = self.reflectance(3), self.reflectance(4)
ndvi = self._divide_zero((nir - red), (nir + red), nan)
return ndvi
def lai(self):
"""
Leaf area index (LAI), or the surface area of leaves to surface area ground.
Trezza and Allen, 2014
:param ndvi: normalized difference vegetation index [-]
:return: LAI [-]
"""
ndvi = self.ndvi()
lai = 7.0 * (ndvi ** 3)
lai = where(lai > 6., 6., lai)
return lai
def emissivity(self, approach='tasumi'):
ndvi = self.ndvi()
if approach == 'tasumi':
lai = self.lai()
# Tasumi et al., 2003
# narrow-band emissivity
nb_epsilon = where((ndvi > 0) & (lai <= 3), 0.97 + 0.0033 * lai, nan)
nb_epsilon = where((ndvi > 0) & (lai > 3), 0.98, nb_epsilon)
nb_epsilon = where(ndvi <= 0, 0.99, nb_epsilon)
return nb_epsilon
if approach == 'sobrino':
# Sobrino et el., 2004
red = self.reflectance(3)
bound_ndvi = where(ndvi > 0.5, ndvi, 0.99)
bound_ndvi = where(ndvi < 0.2, red, bound_ndvi)
pv = ((ndvi - 0.2) / (0.5 - 0.2)) ** 2
pv_emiss = 0.004 * pv + 0.986
emissivity = where((ndvi >= 0.2) & (ndvi <= 0.5), pv_emiss, bound_ndvi)
return emissivity
def land_surface_temp(self):
rp = 0.91
tau = 0.866
rsky = 1.32
epsilon = self.emissivity()
rc = ((self.radiance(6) - rp) / tau) - ((1 - epsilon) * rsky)
lst = self.k2 / (log((epsilon * self.k1 / rc) + 1))
return lst
def ndsi(self):
""" Normalized difference snow index.
:return NDSI
"""
green, swir1 = self.reflectance(2), self.reflectance(5)
ndsi = self._divide_zero((green - swir1), (green + swir1), nan)
return ndsi
class Landsat8(LandsatImage):
def __init__(self, obj):
LandsatImage.__init__(self, obj)
self.oli_bands = [1, 2, 3, 4, 5, 6, 7, 8, 9]
def brightness_temp(self, band, temp_scale='K'):
"""Calculate brightness temperature of Landsat 8
as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php
T = K2 / log((K1 / L) + 1)
and
L = ML * Q + AL
where:
T = At-satellite brightness temperature (degrees kelvin)
L = TOA spectral radiance (Watts / (m2 * srad * mm))
ML = Band-specific multiplicative rescaling factor from the metadata
(RADIANCE_MULT_BAND_x, where x is the band number)
AL = Band-specific additive rescaling factor from the metadata
(RADIANCE_ADD_BAND_x, where x is the band number)
Q = Quantized and calibrated standard product pixel values (DN)
(ndarray img)
K1 = Band-specific thermal conversion constant from the metadata
(K1_CONSTANT_BAND_x, where x is the thermal band number)
K2 = Band-specific thermal conversion constant from the metadata
(K1_CONSTANT_BAND_x, where x is the thermal band number)
Returns
--------
ndarray:
float32 ndarray with shape == input shape
"""
if band in self.oli_bands:
raise ValueError('Landsat 8 brightness should be TIRS band (i.e. 10 or 11)')
k1 = getattr(self, 'k1_constant_band_{}'.format(band))
k2 = getattr(self, 'k2_constant_band_{}'.format(band))
rad = self.radiance(band)
brightness = k2 / log((k1 / rad) + 1)
if temp_scale == 'K':
return brightness
elif temp_scale == 'F':
return brightness * (9 / 5.0) - 459.67
elif temp_scale == 'C':
return brightness - 273.15
else:
raise ValueError('{} is not a valid temperature scale'.format(temp_scale))
def reflectance(self, band):
"""Calculate top of atmosphere reflectance of Landsat 8
as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php
R_raw = MR * Q + AR
R = R_raw / cos(Z) = R_raw / sin(E)
Z = 90 - E (in degrees)
where:
R_raw = TOA planetary reflectance, without correction for solar angle.
R = TOA reflectance with a correction for the sun angle.
MR = Band-specific multiplicative rescaling factor from the metadata
(REFLECTANCE_MULT_BAND_x, where x is the band number)
AR = Band-specific additive rescaling factor from the metadata
(REFLECTANCE_ADD_BAND_x, where x is the band number)
Q = Quantized and calibrated standard product pixel values (DN)
E = Local sun elevation angle. The scene center sun elevation angle
in degrees is provided in the metadata (SUN_ELEVATION).
Z = Local solar zenith angle (same angle as E, but measured from the
zenith instead of from the horizon).
Returns
--------
ndarray:
float32 ndarray with shape == input shape
"""
if band not in self.oli_bands:
raise ValueError('Landsat 8 reflectance should OLI band (i.e. bands 1-8)')
elev = getattr(self, 'sun_elevation')
dn = self._get_band('b{}'.format(band))
mr = getattr(self, 'reflectance_mult_band_{}'.format(band))
ar = getattr(self, 'reflectance_add_band_{}'.format(band))
if elev < 0.0:
raise ValueError("Sun elevation must be non-negative "
"(sun must be above horizon for entire scene)")
rf = ((mr * dn.astype(float32)) + ar) / sin(deg2rad(elev))
return rf
def radiance(self, band):
"""Calculate top of atmosphere radiance of Landsat 8
as outlined here: http://landsat.usgs.gov/Landsat8_Using_Product.php
L = ML * Q + AL
where:
L = TOA spectral radiance (Watts / (m2 * srad * mm))
ML = Band-specific multiplicative rescaling factor from the metadata
(RADIANCE_MULT_BAND_x, where x is the band number)
AL = Band-specific additive rescaling factor from the metadata
(RADIANCE_ADD_BAND_x, where x is the band number)
Q = Quantized and calibrated standard product pixel values (DN)
(ndarray img)
Returns
--------
ndarray:
float32 ndarray with shape == input shape
"""
ml = getattr(self, 'radiance_mult_band_{}'.format(band))
al = getattr(self, 'radiance_add_band_{}'.format(band))
dn = self._get_band('b{}'.format(band))
rad = ml * dn.astype(float32) + al
return rad
def albedo(self):
"""Smith (2010), finds broad-band surface reflectance (albedo)
Should have option for Liang, 2000; Tasumi, 2008;
LC8 toa reflectance bands 2, 4, 5, 6, 7
# normalized i.e. 0.356 + 0.130 + 0.373 + 0.085 + 0.07 = 1.014
:return albedo array of floats
"""
blue, red, nir, swir1, swir2 = (self.reflectance(2), self.reflectance(4), self.reflectance(5),
self.reflectance(6), self.reflectance(7))
alb = (0.356 * blue + 0.130 * red + 0.373 * nir + 0.085 * swir1 + 0.072 * swir2 - 0.0018) / 1.014
return alb
def ndvi(self):
""" Normalized difference vegetation index.
:return: NDVI
"""
red, nir = self.reflectance(4), self.reflectance(5)
ndvi = self._divide_zero((nir - red), (nir + red), nan)
return ndvi
def lai(self):
"""
Leaf area index (LAI), or the surface area of leaves to surface area ground.
Trezza and Allen, 2014
:param ndvi: normalized difference vegetation index [-]
:return: LAI [-]
"""
ndvi = self.ndvi()
lai = 7.0 * (ndvi ** 3)
lai = where(lai > 6., 6., lai)
return lai
def emissivity(self, approach='tasumi'):
ndvi = self.ndvi()
if approach == 'tasumi':
lai = self.lai()
# Tasumi et al., 2003
# narrow-band emissivity
nb_epsilon = where((ndvi > 0) & (lai <= 3), 0.97 + 0.0033 * lai, nan)
nb_epsilon = where((ndvi > 0) & (lai > 3), 0.98, nb_epsilon)
nb_epsilon = where(ndvi <= 0, 0.99, nb_epsilon)
return nb_epsilon
if approach == 'sobrino':
# Sobrino et el., 2004
red = self.reflectance(3)
bound_ndvi = where(ndvi > 0.5, ndvi, 0.99)
bound_ndvi = where(ndvi < 0.2, red, bound_ndvi)
pv = ((ndvi - 0.2) / (0.5 - 0.2)) ** 2
pv_emiss = 0.004 * pv + 0.986
emissivity = where((ndvi >= 0.2) & (ndvi <= 0.5), pv_emiss, bound_ndvi)
return emissivity
def land_surface_temp(self):
band = 10
k1 = getattr(self, 'k1_constant_band_{}'.format(band))
k2 = getattr(self, 'k2_constant_band_{}'.format(band))
rp = 0.91
tau = 0.866
rsky = 1.32
epsilon = self.emissivity()
rc = ((self.radiance(band) - rp) / tau) - ((1 - epsilon) * rsky)
lst = k2 / (log((epsilon * k1 / rc) + 1))
return lst
def ndsi(self):
""" Normalized difference snow index.
:return: NDSI
"""
green, swir1 = self.reflectance(3), self.reflectance(6)
ndsi = self._divide_zero((green - swir1), (green + swir1), nan)
return ndsi
# =============================================================================================
|
apache-2.0
| -1,773,811,191,002,346,000
| 34.419481
| 109
| 0.541011
| false
| 3.510942
| false
| false
| false
|
gchq/gaffer-tools
|
python-shell/src/example.py
|
1
|
26676
|
#
# Copyright 2016-2019 Crown Copyright
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from gafferpy import gaffer as g
from gafferpy import gaffer_connector
def run(host, verbose=False):
return run_with_connector(create_connector(host, verbose))
def run_with_connector(gc):
print()
print('Running operations')
print('--------------------------')
print()
get_schema(gc)
get_filter_functions(gc)
get_class_filter_functions(gc)
get_element_generators(gc)
get_object_generators(gc)
get_operations(gc)
get_serialised_fields(gc)
get_store_traits(gc)
is_operation_supported(gc)
add_elements(gc)
get_elements(gc)
get_adj_seeds(gc)
get_all_elements(gc)
get_walks(gc)
generate_elements(gc)
generate_domain_objs(gc)
generate_domain_objects_chain(gc)
get_element_group_counts(gc)
get_sub_graph(gc)
export_to_gaffer_result_cache(gc)
get_job_details(gc)
get_all_job_details(gc)
add_named_operation(gc)
get_all_named_operations(gc)
named_operation(gc)
delete_named_operation(gc)
add_named_view_summarise(gc)
add_named_view_date_range(gc)
get_all_named_views(gc)
named_view_summarise(gc)
named_view_date_range(gc)
named_views(gc)
delete_named_views(gc)
sort_elements(gc)
max_element(gc)
min_element(gc)
to_vertices_to_entity_seeds(gc)
complex_op_chain(gc)
op_chain_in_json(gc)
def create_connector(host, verbose=False):
return gaffer_connector.GafferConnector(host, verbose)
def get_schema(gc):
# Get Schema
result = gc.execute_get(
g.GetSchema()
)
print('Schema:')
print(result)
print()
def get_filter_functions(gc):
# Get filter functions
result = gc.execute_get(
g.GetFilterFunctions()
)
print('Filter Functions:')
print(result)
print()
def get_class_filter_functions(gc):
# Get class filter functions
class_name = 'uk.gov.gchq.koryphe.impl.predicate.IsMoreThan'
result = gc.execute_get(
g.GetClassFilterFunctions(class_name=class_name)
)
print('Class Filter Functions (IsMoreThan):')
print(result)
print()
def get_element_generators(gc):
# Get Element generators
result = gc.execute_get(
g.GetElementGenerators()
)
print('Element generators:')
print(result)
print()
def get_object_generators(gc):
# Get Object generators
result = gc.execute_get(
g.GetObjectGenerators()
)
print('Object generators:')
print(result)
print()
def get_operations(gc):
# Get operations
result = gc.execute_get(
g.GetOperations()
)
print('Operations:')
print(result)
print()
def get_serialised_fields(gc):
# Get serialised fields
class_name = 'uk.gov.gchq.koryphe.impl.predicate.IsMoreThan'
result = gc.execute_get(
g.GetSerialisedFields(class_name=class_name)
)
print('Serialised Fields (IsMoreThan):')
print(result)
print()
def get_store_traits(gc):
# Get Store Traits
result = gc.execute_get(
g.GetStoreTraits()
)
print('Store Traits:')
print(result)
print()
def is_operation_supported(gc):
# Is operation supported
operation = 'uk.gov.gchq.gaffer.operation.impl.add.AddElements'
result = gc.is_operation_supported(
g.IsOperationSupported(operation=operation)
)
print(
'\nOperation supported ("uk.gov.gchq.gaffer.operation.impl.add.AddElements"):')
print(result)
print()
def add_elements(gc):
# Add Elements
gc.execute_operation(
g.AddElements(
input=[
g.Entity(
group='JunctionUse',
vertex='M1:1',
properties={
'countByVehicleType': g.freq_map({
'BUS': 10,
'CAR': 50
}),
'endDate': g.date(1034319600000),
'count': g.long(60),
'startDate': g.date(1034316000000)
}
),
g.Edge(
group='RoadHasJunction',
source='M1',
destination='M1:1',
directed=True,
properties={}
)
]
)
)
print('Elements have been added')
print()
def get_elements(gc):
# Get Elements
input = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed('M5:10'),
# Edge input can be provided as follows
g.EdgeSeed('M5:10', 'M5:11', g.DirectedType.EITHER),
g.EdgeSeed('M5:10', 'M5:11', g.DirectedType.DIRECTED),
# Or you can use True or False for the direction
g.EdgeSeed('M5:10', 'M5:11', True)
],
view=g.View(
edges=[
g.ElementDefinition(
group='RoadUse',
group_by=[],
transient_properties=[
g.Property('description', 'java.lang.String')
],
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=['count'],
predicate=g.IsMoreThan(
value=g.long(1)
)
)
],
transform_functions=[
g.FunctionContext(
selection=['SOURCE', 'DESTINATION', 'count'],
function=g.Function(
class_name='uk.gov.gchq.gaffer.traffic.transform.DescriptionTransform'
),
projection=['description']
)
]
)
]
),
directed_type=g.DirectedType.EITHER
)
)
print('Related input')
print(input)
print()
def get_adj_seeds(gc):
# Adjacent Elements - chain 2 adjacent entities together
adj_seeds = gc.execute_operations(
[
g.GetAdjacentIds(
input=[
g.EntitySeed(
vertex='M5'
)
],
view=g.View(
edges=[
g.ElementDefinition(
'RoadHasJunction',
group_by=[]
)
]
),
include_incoming_out_going=g.InOutType.OUT
),
g.GetAdjacentIds(
view=g.View(
edges=[
g.ElementDefinition(
'RoadUse',
group_by=[]
)
]
),
include_incoming_out_going=g.InOutType.OUT
)
]
)
print('Adjacent entities - 2 hop')
print(adj_seeds)
print()
def get_all_elements(gc):
# Get all input, but limit the total results to 3
all_elements = gc.execute_operations(
operations=[
g.GetAllElements(),
g.Limit(result_limit=3)
]
)
print('All input (Limited to first 3)')
print(all_elements)
print()
def get_walks(gc):
# Get walks from M32 traversing down RoadHasJunction then JunctionLocatedAt
walks = gc.execute_operation(
g.GetWalks(
input=[
g.EntitySeed('M32'),
],
operations=[
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadHasJunction'
)
]
)
),
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
group='JunctionLocatedAt'
)
]
)
)
]
)
)
print(
'Walks from M32 traversing down RoadHasJunction then JunctionLocatedAt')
print(walks)
print()
def generate_elements(gc):
# Generate Elements
input = gc.execute_operation(
g.GenerateElements(
element_generator=g.ElementGenerator(
class_name='uk.gov.gchq.gaffer.traffic.generator.RoadTrafficStringElementGenerator'
),
input=[
'"South West","E06000054","Wiltshire","6016","389200","179080","M4","LA Boundary","381800","180030","17","391646","179560","TM","E","2000","2000-05-03 00:00:00","7","0","9","2243","15","426","127","21","20","37","106","56","367","3060"'
]
)
)
print('Generated input from provided domain input')
print(input)
print()
def generate_domain_objs(gc):
# Generate Domain Objects - single provided element
input = gc.execute_operation(
g.GenerateObjects(
element_generator=g.ElementGenerator(
class_name='uk.gov.gchq.gaffer.rest.example.ExampleDomainObjectGenerator'
),
input=[
g.Entity('entity', '1'),
g.Edge('edge', '1', '2', True)
]
)
)
print('Generated input from provided input')
print(input)
print()
def generate_domain_objects_chain(gc):
# Generate Domain Objects - chain of get input then generate input
input = gc.execute_operations(
[
g.GetElements(
input=[g.EntitySeed(vertex='M5')],
seed_matching_type=g.SeedMatchingType.RELATED,
view=g.View(
edges=[
g.ElementDefinition(
group='RoadHasJunction',
group_by=[]
)
]
)
),
g.GenerateObjects(
element_generator=g.ElementGenerator(
class_name='uk.gov.gchq.gaffer.rest.example.ExampleDomainObjectGenerator'
)
)
]
)
print('Generated input from get input by seed')
print(input)
print()
def get_element_group_counts(gc):
# Get Elements
group_counts = gc.execute_operations([
g.GetElements(
input=[g.EntitySeed('M5')]
),
g.CountGroups(limit=1000)
])
print('Groups counts (limited to 1000 input)')
print(group_counts)
print()
def get_sub_graph(gc):
# Export and Get to/from an in memory set
entity_seeds = gc.execute_operations(
[
g.GetAdjacentIds(
input=[g.EntitySeed('South West')],
include_incoming_out_going=g.InOutType.OUT
),
g.ExportToSet(),
g.GetAdjacentIds(include_incoming_out_going=g.InOutType.OUT),
g.ExportToSet(),
g.DiscardOutput(),
g.GetSetExport()
]
)
print('Export and Get to/from an in memory set')
print(entity_seeds)
print()
def export_to_gaffer_result_cache(gc):
# Export to Gaffer Result Cache and Get from Gaffer Result Cache
job_details = gc.execute_operations(
[
g.GetAdjacentIds(
input=[g.EntitySeed('South West')],
include_incoming_out_going=g.InOutType.OUT
),
g.ExportToGafferResultCache(),
g.DiscardOutput(),
g.GetJobDetails()
]
)
print('Export to Gaffer Result Cache. Job Details:')
print(job_details)
print()
job_id = job_details['jobId']
entity_seeds = gc.execute_operation(
g.GetGafferResultCacheExport(job_id=job_id),
)
print('Get Gaffer Result Cache Export.')
print(entity_seeds)
print()
def get_job_details(gc):
# Get all job details
job_details_initial = gc.execute_operations(
[
g.GetAdjacentIds(
input=[g.EntitySeed('1')],
),
g.ExportToGafferResultCache(),
g.DiscardOutput(),
g.GetJobDetails()
]
)
job_id = job_details_initial['jobId']
job_details = gc.execute_operation(
g.GetJobDetails(job_id=job_id),
)
print('Get job details')
print(job_details)
print()
def get_all_job_details(gc):
# Get all job details
all_job_details = gc.execute_operation(
g.GetAllJobDetails(),
)
print('Get all job details (just prints the first 3 results)')
print(all_job_details[:3])
print()
def delete_named_operation(gc):
gc.execute_operation(
g.DeleteNamedOperation('2-hop-with-limit')
)
print('Deleted named operation: 2-hop-with-limit')
print()
def add_named_operation(gc):
gc.execute_operation(
g.AddNamedOperation(
operation_chain={
"operations": [{
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing": "OUTGOING"
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing": "OUTGOING"
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.Limit",
"resultLimit": "${param1}"
}]
},
operation_name='2-hop-with-limit',
description='2 hop query with limit',
overwrite_flag=True,
read_access_roles=["read-user"],
write_access_roles=["write-user"],
parameters=[
g.NamedOperationParameter(
name="param1",
description="Limit param",
default_value=1,
value_class="java.lang.Long",
required=False
)
]
)
)
print('Added named operation: 2-hop-with-limit')
print()
def get_all_named_operations(gc):
namedOperations = gc.execute_operation(
g.GetAllNamedOperations()
)
print('Named operations')
print(namedOperations)
print()
def named_operation(gc):
result = gc.execute_operation(
g.NamedOperation(
operation_name='2-hop-with-limit',
parameters={
'param1': 2
},
input=[
g.EntitySeed('M5')
]
)
)
print('Execute named operation')
print(result)
print()
def delete_named_views(gc):
gc.execute_operation(
g.DeleteNamedView(name='summarise')
)
print('Deleted named view: summarise')
gc.execute_operation(
g.DeleteNamedView(name='dateRange')
)
print('Deleted named view: dateRange')
print()
def add_named_view_summarise(gc):
gc.execute_operation(
g.AddNamedView(
view=g.View(
global_elements=[
g.GlobalElementDefinition(group_by=[])
]
),
name='summarise',
description='Summarises all results (overrides the groupBy to an empty array).',
overwrite_flag=True
)
)
print('Added named view: summarise')
print()
def add_named_view_date_range(gc):
gc.execute_operation(
g.AddNamedView(
view=g.View(
global_elements=g.GlobalElementDefinition(
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=['startDate'],
predicate=g.InDateRange(
start='${start}',
end='${end}'
)
)
]
)
),
name='dateRange',
description='Filters results to a provided date range.',
overwrite_flag=True,
parameters=[
g.NamedViewParameter(
name="start",
description="A date string for the start of date range.",
value_class="java.lang.String",
required=False
),
g.NamedViewParameter(
name="end",
description="A date string for the end of the date range.",
value_class="java.lang.String",
required=False
)
]
)
)
print('Added named view: dateRange')
print()
def get_all_named_views(gc):
namedViews = gc.execute_operation(
g.GetAllNamedViews()
)
print('Named views')
print(namedViews)
print()
def named_view_summarise(gc):
result = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed(
vertex='M32:1'
)
],
view=g.NamedView(
name="summarise"
)
)
)
print('Execute get elements with summarised named view')
print(result)
print()
def named_view_date_range(gc):
result = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed(
vertex='M32:1'
)
],
view=g.NamedView(
name="dateRange",
parameters={
'start': '2005/05/03 06:00',
'end': '2005/05/03 09:00'
}
)
)
)
print('Execute get elements with date range named view')
print(result)
print()
def named_views(gc):
result = gc.execute_operation(
g.GetElements(
input=[
g.EntitySeed(
vertex='M32:1'
)
],
view=[
g.NamedView(
name="summarise"
),
g.NamedView(
name="dateRange",
parameters={
'start': '2005/05/03 06:00',
'end': '2005/05/03 09:00'
}
)
]
)
)
print('Execute get elements with summarised and date range named views')
print(result)
print()
def sort_elements(gc):
# Get sorted Elements
input = gc.execute_operations([
g.GetAllElements(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadUse',
group_by=[]
)
]
)
),
g.Sort(
comparators=[
g.ElementPropertyComparator(
groups=['RoadUse'],
property='count'
)
],
result_limit=5
)
])
print('Sorted input')
print(input)
print()
def max_element(gc):
# Get sorted Elements
input = gc.execute_operations([
g.GetAllElements(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadUse',
group_by=[]
)
]
)
),
g.Max(
comparators=[
g.ElementPropertyComparator(
groups=['RoadUse'],
property='count'
)
]
)
])
print('Max element')
print(input)
print()
def min_element(gc):
# Get sorted Elements
input = gc.execute_operations([
g.GetAllElements(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadUse',
group_by=[]
)
]
)
),
g.Min(
comparators=[
g.ElementPropertyComparator(
groups=['RoadUse'],
property='count'
)
]
)
])
print('Min element')
print(input)
print()
def to_vertices_to_entity_seeds(gc):
# Get sorted Elements
input = gc.execute_operations([
g.GetElements(
input=[
g.EntitySeed(
vertex='South West'
)
],
view=g.View(
edges=[
g.ElementDefinition(
'RegionContainsLocation',
group_by=[]
)
]
),
include_incoming_out_going=g.InOutType.OUT
),
g.ToVertices(
edge_vertices=g.EdgeVertices.DESTINATION,
use_matched_vertex=g.UseMatchedVertex.OPPOSITE
),
g.ToEntitySeeds(),
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
'LocationContainsRoad',
group_by=[]
)
]
),
include_incoming_out_going=g.InOutType.OUT
),
g.Limit(5)
])
print('ToVertices then ToEntitySeeds')
print(input)
print()
def complex_op_chain(gc):
# All road junctions in the South West that were heavily used by buses in year 2000.
junctions = gc.execute_operations(
operations=[
g.GetAdjacentIds(
input=[g.EntitySeed(vertex='South West')],
view=g.View(
edges=[
g.ElementDefinition(
group='RegionContainsLocation',
group_by=[]
)
]
)
),
g.GetAdjacentIds(
view=g.View(
edges=[
g.ElementDefinition(
group='LocationContainsRoad',
group_by=[]
)
]
)
),
g.ToSet(),
g.GetAdjacentIds(
view=g.View(
edges=[
g.ElementDefinition(
group='RoadHasJunction',
group_by=[]
)
]
)
),
g.GetElements(
view=g.View(
entities=[
g.ElementDefinition(
group='JunctionUse',
group_by=[],
transient_properties=[
g.Property('busCount', 'java.lang.Long')
],
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=['startDate'],
predicate=g.InDateRange(
start='2000/01/01',
end='2001/01/01'
)
)
],
post_aggregation_filter_functions=[
g.PredicateContext(
selection=['countByVehicleType'],
predicate=g.PredicateMap(
predicate=g.IsMoreThan(
value={'java.lang.Long': 1000},
or_equal_to=False
),
key='BUS'
)
)
],
transform_functions=[
g.FunctionContext(
selection=['countByVehicleType'],
function=g.FreqMapExtractor(key='BUS'),
projection=['busCount']
)
]
)
]
),
include_incoming_out_going=g.InOutType.OUT
),
g.ToCsv(
element_generator=g.CsvGenerator(
fields={
'VERTEX': 'Junction',
'busCount': 'Bus Count'
},
quoted=False
),
include_header=True
)
]
)
print(
'All road junctions in the South West that were heavily used by buses in year 2000.')
print(junctions)
print()
def op_chain_in_json(gc):
# Operation chain defined in json
result = gc.execute_operation_chain(
{
"class": "uk.gov.gchq.gaffer.operation.OperationChain",
"operations": [{
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements",
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.CountGroups"
}]
}
)
print('Operation chain defined in json')
print(result)
print()
if __name__ == "__main__":
run('http://localhost:8080/rest/latest', False)
|
apache-2.0
| -6,482,966,961,602,001,000
| 26.700935
| 252
| 0.458315
| false
| 4.535963
| false
| false
| false
|
menegazzo/travispy
|
setup.py
|
2
|
1873
|
from setuptools import setup
from setuptools.command.test import test as TestCommand
import sys
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', 'Arguments to pass to py.test')]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
args = ['travispy']
if self.pytest_args:
args.insert(0, self.pytest_args)
errno = pytest.main(args)
sys.exit(errno)
setup(
name='TravisPy',
version='0.3.5',
packages=['travispy', 'travispy.entities'],
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
install_requires=['requests'],
# metadata for upload to PyPI
author='Fabio Menegazzo',
author_email='menegazzo@gmail.com',
description='Python API for Travis CI.',
long_description=open('README.rst').read(),
license='GPL',
keywords='travis ci continuous integration travisci',
url='https://github.com/menegazzo/travispy',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
# tests
tests_require=['pytest'],
cmdclass={'test': PyTest},
)
|
gpl-3.0
| -5,545,387,666,978,756,000
| 29.704918
| 75
| 0.608649
| false
| 3.893971
| true
| false
| false
|
tulikavijay/vms
|
vms/administrator/tests/test_report.py
|
1
|
16008
|
# third party
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
# Django
from django.contrib.staticfiles.testing import LiveServerTestCase
from django.db import IntegrityError
# local Django
from pom.locators.administratorReportPageLocators import *
from pom.pages.administratorReportPage import AdministratorReportPage
from pom.pages.authenticationPage import AuthenticationPage
from shift.utils import (
create_admin,
create_volunteer,
create_organization_with_details,
create_event_with_details,
create_job_with_details,
create_shift_with_details,
log_hours_with_details,
register_volunteer_for_shift_utility
)
class Report(LiveServerTestCase):
'''
'''
@classmethod
def setUpClass(cls):
cls.driver = webdriver.Firefox()
cls.driver.implicitly_wait(5)
cls.driver.maximize_window()
cls.authentication_page = AuthenticationPage(cls.driver)
cls.report_page = AdministratorReportPage(cls.driver)
cls.elements = AdministratorReportPageLocators()
super(Report, cls).setUpClass()
def setUp(self):
create_admin()
self.login_admin()
self.report_page.go_to_admin_report()
def tearDown(self):
pass
@classmethod
def tearDownClass(cls):
cls.driver.quit()
super(Report, cls).tearDownClass()
def login_admin(self):
self.authentication_page.server_url = self.live_server_url
self.authentication_page.login({ 'username' : 'admin', 'password' : 'admin'})
def verify_shift_details(self, total_shifts, hours):
total_no_of_shifts = self.report_page.get_shift_summary().split(' ')[10].strip('\nTotal')
total_no_of_hours = self.report_page.get_shift_summary().split(' ')[-1].strip('\n')
self.assertEqual(total_no_of_shifts, total_shifts)
self.assertEqual(total_no_of_hours, hours)
#Failing test case which has been documented
#Test commented out to prevent travis build failure - bug #327
"""def test_null_values_with_dataset(self):
# register dataset
org = create_organization_with_details('organization-one')
volunteer = create_volunteer()
volunteer.organization = org
volunteer.save()
# create shift and log hours
# register event first to create job
event = ['Hackathon', '2017-08-21', '2017-09-28']
created_event = create_event_with_details(event)
# create job
job = ['Developer', '2017-08-21', '2017-08-30', '',created_event]
created_job = create_job_with_details(job)
# create shift
shift = ['2017-08-21', '09:00', '15:00', '10', created_job]
created_shift = create_shift_with_details(shift)
logged_shift = log_hours_with_details(volunteer, created_shift, "09:00", "12:00")
report_page = self.report_page
# check admin report with null fields, should return the above shift
report_page.fill_report_form(['','','','',''])
self.verify_shift_details('1','3.0')
self.assertEqual(report_page.element_by_xpath(
self.elements.NAME).text, created_event.name)
self.assertEqual(report_page.element_by_xpath(
self.elements.DATE).text, 'Aug. 21, 2016')
self.assertEqual(report_page.element_by_xpath(
self.elements.START_TIME).text, '9 a.m.')
self.assertEqual(report_page.element_by_xpath(
self.elements.END_TIME).text, '12 p.m.')
self.assertEqual(report_page.element_by_xpath(
self.elements.HOURS).text, '3.0')"""
def test_null_values_with_empty_dataset(self):
# should return no entries
report_page = self.report_page
report_page.fill_report_form(['','','','',''])
self.assertEqual(report_page.get_alert_box_text(),report_page.no_results_message)
def test_only_logged_shifts_are_reported(self):
# register dataset
org = create_organization_with_details('organization-one')
volunteer = create_volunteer()
volunteer.organization = org
volunteer.save()
# register event first to create job
event = ['Hackathon', '2017-08-21', '2017-09-28']
created_event = create_event_with_details(event)
# create job
job = ['Developer', '2017-08-21', '2017-08-30', '',created_event]
created_job = create_job_with_details(job)
# create shift
shift = ['2017-08-21', '09:00', '15:00', '10', created_job]
created_shift = create_shift_with_details(shift)
# shift is assigned to volunteer-one, but hours have not been logged
volunteer_shift = register_volunteer_for_shift_utility(created_shift, volunteer)
report_page = self.report_page
# check admin report with null fields, should not return the above shift
report_page.fill_report_form(['','','','',''])
self.assertEqual(report_page.get_alert_box_text(),report_page.no_results_message)
#Failing test case which has been documented - bug #327
#Test commented out to prevent travis build failure
"""def test_check_intersection_of_fields(self):
self.create_dataset()
report_page = self.report_page
search_parameters_1 = ['tom','','','','']
report_page.fill_report_form(search_parameters_1)
self.verify_shift_details('2','2.0')
search_parameters_2 = ['','','','','org-one']
report_page.fill_report_form(search_parameters_2)
self.verify_shift_details('3','3.0')
search_parameters_3 = ['','','event-four','Two','']
report_page.fill_report_form(search_parameters_3)
# 1 shift of 1:30 hrs
self.verify_shift_details('1','1.5')
search_parameters_4 = ['','','one','','']
report_page.fill_report_form(search_parameters_4)
# 3 shifts of 0:30 hrs, 1:00 hrs, 1:00 hrs
self.verify_shift_details('3','2.5')
# check case-insensitive
search_parameters_5 = ['','sherlock','two','','']
report_page.fill_report_form(search_parameters_5)
self.verify_shift_details('1','2.0')
def create_dataset(self):
parameters = {'org' : 'org-one',
'volunteer' : {
'username' : 'uname1',
'password' : 'uname1',
'email' : 'email1@email.com',
'first_name' : 'tom-fname',
'last_name' : 'tom-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-four',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobOneInEventFour',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '09:00',
'end_time' : '11:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '09:30',
'end_time' : '10:00',}}
self.register_dataset(parameters)
parameters = {'org' : 'org-one',
'volunteer' : {
'username' : 'uname2',
'password' : 'uname2',
'email' : 'email2@email.com',
'first_name' : 'peter-fname',
'last_name' : 'peter-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-one',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobOneInEventOne',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '18:00',
'end_time' : '23:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '19:00',
'end_time' : '20:00'}}
self.register_dataset(parameters)
parameters = {'org' : 'org-one',
'volunteer' : {
'username' : 'uname3',
'password' : 'uname3',
'email' : 'email3@email.com',
'first_name' : 'tom-fname',
'last_name' : 'tom-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-four',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobTwoInEventFour',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '09:00',
'end_time' : '15:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '10:00',
'end_time' : '11:30'}}
self.register_dataset(parameters)
parameters = {'org' : 'org-two',
'volunteer' : {
'username' : 'uname4',
'password' : 'uname4',
'email' : 'email4@email.com',
'first_name' : 'harry-fname',
'last_name' : 'harry-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-one',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobTwoInEventOne',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '09:00',
'end_time' : '11:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '09:00',
'end_time' : '10:00'}}
self.register_dataset(parameters)
parameters = {'org' : 'org-two',
'volunteer' : {
'username' : 'uname5',
'password' : 'uname5',
'email' : 'email5@email.com',
'first_name' : 'harry-fname',
'last_name' : 'harry-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-two',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobOneInEventTwo',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '09:00',
'end_time' : '18:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '12:00',
'end_time' : '15:00'}}
self.register_dataset(parameters)
parameters = {'org' : 'org-three',
'volunteer' : {
'username' : 'uname6',
'password' : 'uname6',
'email' : 'email6@email.com',
'first_name' : 'sherlock-fname',
'last_name' : 'sherlock-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-two',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobOneInEventTwo',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '09:00',
'end_time' : '16:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '12:00',
'end_time' : '14:00'}}
self.register_dataset(parameters)
parameters = {'org' : 'org-four',
'volunteer' : {
'username' : 'uname7',
'password' : 'uname7',
'email' : 'email7@email.com',
'first_name' : 'harvey-fname',
'last_name' : 'harvey-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-one',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobThreeInEventOne',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '09:00',
'end_time' : '13:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '12:00',
'end_time' : '12:30'}}
self.register_dataset(parameters)
parameters = {'org' : 'org-four',
'volunteer' : {
'username' : 'uname8',
'password' : 'uname8',
'email' : 'email8@email.com',
'first_name' : 'mike-fname',
'last_name' : 'mike-lname',
'address' : 'address',
'city' : 'city',
'state' : 'state',
'country' : 'country',
'phone-no' : '9999999999'},
'event' : {
'name' : 'event-three',
'start_date' : '2016-06-01',
'end_date' : '2016-06-10'},
'job' : {
'name' : 'jobOneInEventThree',
'start_date' : '2016-06-01',
'end_date' : '2016-06-01'},
'shift' : {
'date' : '2016-06-01',
'start_time' : '01:00',
'end_time' : '10:00',
'max_volunteers' : '10'},
'vshift' : {
'start_time' : '01:00',
'end_time' : '04:00'}}
self.register_dataset(parameters)"""
|
gpl-2.0
| 6,484,563,254,342,688,000
| 37.760291
| 97
| 0.446777
| false
| 3.889213
| true
| false
| false
|
uclouvain/osis_louvain
|
base/models/offer.py
|
1
|
2121
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2018 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django.db import models
from osis_common.models.serializable_model import SerializableModel, SerializableModelAdmin
class OfferAdmin(SerializableModelAdmin):
list_display = ('id', 'title', 'changed')
search_fields = ['title']
class Offer(SerializableModel):
external_id = models.CharField(max_length=100, blank=True, null=True, db_index=True)
changed = models.DateTimeField(null=True, auto_now=True)
title = models.CharField(max_length=255)
def __str__(self):
return "{} {}".format(self.id, self.title)
class Meta:
permissions = (
("can_access_offer", "Can access offer"),
("can_access_catalog", "Can access catalog"),
)
def find_by_id(offer_id):
try:
return Offer.objects.get(pk=offer_id)
except Offer.DoesNotExist:
return None
|
agpl-3.0
| 137,301,873,159,780,320
| 38.259259
| 91
| 0.649528
| false
| 4.007561
| false
| false
| false
|
django-oscar/django-oscar-mws
|
oscar_mws/migrations/0002_auto__add_field_fulfillmentorderline_shipment__add_field_fulfillmentor.py
|
1
|
34713
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'FulfillmentOrderLine.shipment'
db.add_column('oscar_mws_fulfillmentorderline', 'shipment',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='order_lines', null=True, to=orm['oscar_mws.FulfillmentShipment']),
keep_default=False)
# Adding field 'FulfillmentOrderLine.package'
db.add_column('oscar_mws_fulfillmentorderline', 'package',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='order_lines', null=True, to=orm['oscar_mws.ShipmentPackage']),
keep_default=False)
# Changing field 'FulfillmentOrderLine.line'
db.alter_column('oscar_mws_fulfillmentorderline', 'line_id', self.gf('django.db.models.fields.related.OneToOneField')(unique=True, to=orm['order.Line']))
# Adding unique constraint on 'FulfillmentOrderLine', fields ['line']
db.create_unique('oscar_mws_fulfillmentorderline', ['line_id'])
# Adding field 'ShipmentPackage.package_number'
db.add_column('oscar_mws_shipmentpackage', 'package_number',
self.gf('django.db.models.fields.IntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Removing unique constraint on 'FulfillmentOrderLine', fields ['line']
db.delete_unique('oscar_mws_fulfillmentorderline', ['line_id'])
# Deleting field 'FulfillmentOrderLine.shipment'
db.delete_column('oscar_mws_fulfillmentorderline', 'shipment_id')
# Deleting field 'FulfillmentOrderLine.package'
db.delete_column('oscar_mws_fulfillmentorderline', 'package_id')
# Changing field 'FulfillmentOrderLine.line'
db.alter_column('oscar_mws_fulfillmentorderline', 'line_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['order.Line']))
# Deleting field 'ShipmentPackage.package_number'
db.delete_column('oscar_mws_shipmentpackage', 'package_number')
models = {
'address.country': {
'Meta': {'ordering': "('-display_order', 'name')", 'object_name': 'Country'},
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}),
'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"})
},
'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductClass']", 'null': 'True'}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': "orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'order.billingaddress': {
'Meta': {'object_name': 'BillingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.line': {
'Meta': {'object_name': 'Line'},
'est_dispatch_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_price_before_discounts_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_before_discounts_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lines'", 'to': "orm['order.Order']"}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['partner.Partner']"}),
'partner_line_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'partner_line_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'partner_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'unit_cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_retail_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'order.order': {
'Meta': {'ordering': "['-date_placed']", 'object_name': 'Order'},
'basket_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.BillingAddress']", 'null': 'True', 'blank': 'True'}),
'date_placed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'guest_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingAddress']", 'null': 'True', 'blank': 'True'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_method': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'total_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'total_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders'", 'null': 'True', 'to': "orm['auth.User']"})
},
'order.shippingaddress': {
'Meta': {'object_name': 'ShippingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.shippingevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'ShippingEvent'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'shipping_events'", 'symmetrical': 'False', 'through': "orm['order.ShippingEventQuantity']", 'to': "orm['order.Line']"}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_events'", 'to': "orm['order.Order']"})
},
'order.shippingeventquantity': {
'Meta': {'object_name': 'ShippingEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.ShippingEvent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_event_quantities'", 'to': "orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'order.shippingeventtype': {
'Meta': {'ordering': "('sequence_number',)", 'object_name': 'ShippingEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'oscar_mws.amazonprofile': {
'Meta': {'object_name': 'AmazonProfile'},
'asin': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'fulfillment_by': ('django.db.models.fields.CharField', [], {'default': "'MFN'", 'max_length': '3'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_package_quantity': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'launch_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'number_of_items': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'product': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'amazon_profile'", 'unique': 'True', 'to': "orm['catalogue.Product']"}),
'product_tax_code': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'release_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'oscar_mws.feedreport': {
'Meta': {'object_name': 'FeedReport'},
'errors': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processed': ('django.db.models.fields.PositiveIntegerField', [], {}),
'status_code': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'submission': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'report'", 'unique': 'True', 'to': "orm['oscar_mws.FeedSubmission']"}),
'successful': ('django.db.models.fields.PositiveIntegerField', [], {}),
'warnings': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'oscar_mws.feedresult': {
'Meta': {'object_name': 'FeedResult'},
'description': ('django.db.models.fields.TextField', [], {}),
'feed_report': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'results'", 'to': "orm['oscar_mws.FeedReport']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message_code': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'oscar_mws.feedsubmission': {
'Meta': {'ordering': "['-date_updated']", 'object_name': 'FeedSubmission'},
'date_created': ('django.db.models.fields.DateTimeField', [], {}),
'date_submitted': ('django.db.models.fields.DateTimeField', [], {}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processing_status': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'submission_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'submitted_products': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'feed_submissions'", 'symmetrical': 'False', 'to': "orm['catalogue.Product']"})
},
'oscar_mws.fulfillmentorder': {
'Meta': {'object_name': 'FulfillmentOrder'},
'date_updated': ('django.db.models.fields.DateTimeField', [], {}),
'fulfillment_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'fulfillment_orders'", 'symmetrical': 'False', 'through': "orm['oscar_mws.FulfillmentOrderLine']", 'to': "orm['order.Line']"}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fulfillment_orders'", 'to': "orm['order.Order']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'})
},
'oscar_mws.fulfillmentorderline': {
'Meta': {'object_name': 'FulfillmentOrderLine'},
'fulfillment_order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fulfillment_lines'", 'to': "orm['oscar_mws.FulfillmentOrder']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'fulfillment_line'", 'unique': 'True', 'to': "orm['order.Line']"}),
'order_item_id': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'to': "orm['oscar_mws.ShipmentPackage']"}),
'shipment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'to': "orm['oscar_mws.FulfillmentShipment']"})
},
'oscar_mws.fulfillmentshipment': {
'Meta': {'object_name': 'FulfillmentShipment'},
'date_estimated_arrival': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_shipped': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'fulfillment_center_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'fulfillment_shipments'", 'to': "orm['order.Order']"}),
'shipment_events': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'fulfillment_shipments'", 'symmetrical': 'False', 'to': "orm['order.ShippingEvent']"}),
'shipment_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '24'})
},
'oscar_mws.shipmentpackage': {
'Meta': {'object_name': 'ShipmentPackage'},
'carrier_code': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'fulfillment_shipment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['oscar_mws.FulfillmentShipment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package_number': ('django.db.models.fields.IntegerField', [], {}),
'tracking_number': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'partner.partner': {
'Meta': {'object_name': 'Partner'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'partners'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.User']"})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['oscar_mws']
|
bsd-3-clause
| -6,517,976,486,046,583,000
| 85.568579
| 222
| 0.563046
| false
| 3.627273
| false
| false
| false
|
BrAwnyTime/RayTracer
|
Textures/makeScaledTex.py
|
1
|
1585
|
import numpy as np
import time
import tables
import sys
'''---------------------------------------------------------'''
''' Setup PyTables Files '''
'''---------------------------------------------------------'''
scale = 2
originalName = "earthScaled8"
scaledName = "earthScaled16"
h5tex = tables.open_file("/home/brad/rayTracer/Textures/textures.h5", mode = 'a', title = "HDF5 Texture File")
og = h5tex.getNode(h5tex.root, name=originalName)
texWidth = og.shape[1] / 3
texHeight = og.shape[0]
scaledWidth = texWidth/scale
scaledHeight = texHeight/scale
scaled = np.zeros((scaledHeight, scaledWidth * 3))
str_time = time.time()
curPercent = 0
lastPercent = 0
for y in range(0, scaledHeight):
for x in range(0, scaledWidth):
scaledValue = np.zeros(3)
t_y = y * scale
t_x = x * scale
curPercent = np.floor((((y*scaledWidth)+(x+1))/float(scaledWidth*scaledHeight))*1000) / 10.0
if (curPercent > lastPercent):
lastPercent = curPercent
cur_sec = time.time() - str_time
sys.stdout.write("\rScale Texture %.1f%% [%ds]" % (curPercent, cur_sec))
sys.stdout.flush()
for iy in range(0, scale):
for ix in range(0, scale):
scaledValue += og[t_y + iy, (3 * (t_x + ix)):(3 * (t_x + ix)) + 3]
scaledValue = scaledValue / float(scale**2)
scaled[y, (3 * x):(3 * x) + 3] = scaledValue
earthsmall = h5tex.create_array(h5tex.root, scaledName, scaled, "Scaled texture map of the Earth's surface")
h5tex.close()
|
mit
| 3,638,641,487,451,038,000
| 27.818182
| 110
| 0.557098
| false
| 3.274793
| false
| false
| false
|
pattarapol-iamngamsup/projecteuler_python
|
problem_011.py
|
1
|
6237
|
""" Copyright 2012, July 31
Written by Pattarapol (Cheer) Iamngamsup
E-mail: IAM.PATTARAPOL@GMAIL.COM
Largest product in a grid
Problem 11
In the 20 X 20 grid below, four numbers
along a diagonal line have been marked in red.
08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08
49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00
81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65
52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91
22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80
24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50
32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70
67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21
24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72
21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95
78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92
16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57
86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58
19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40
04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66
88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69
04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36
20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16
20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54
01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48
The product of these numbers is 26 63 78 14 = 1788696.
What is the greatest product of four adjacent numbers in any direction
(up, down, left, right, or diagonally) in the 20 X 20 grid?
"""
#################################################
# Importing libraries & modules
import datetime
#################################################
# Global variables
ADJACENT_NUM = 4
ROW_NUM = 20
COL_NUM = 20
GridNumberStr = '08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 '
GridNumberStr += '49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 '
GridNumberStr += '81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 '
GridNumberStr += '52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 '
GridNumberStr += '22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 '
GridNumberStr += '24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 '
GridNumberStr += '32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 '
GridNumberStr += '67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 '
GridNumberStr += '24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 '
GridNumberStr += '21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 '
GridNumberStr += '78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 '
GridNumberStr += '16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 '
GridNumberStr += '86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 '
GridNumberStr += '19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 '
GridNumberStr += '04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 '
GridNumberStr += '88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 '
GridNumberStr += '04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 '
GridNumberStr += '20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 '
GridNumberStr += '20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 '
GridNumberStr += '01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48 '
#################################################
# Functions
#################################################
# Classes
#################################################
# Main function
def main():
numberStrList = GridNumberStr.split()
numList = list()
for index in range( 0, len( numberStrList ) ):
numList.append( int( numberStrList[index] ) )
greatestProduct = 0
adjacentProduct = 0
for i in range( 0, ROW_NUM ):
for j in range( 0, COL_NUM ):
# left to right
if j + ( ADJACENT_NUM - 1 ) < COL_NUM:
adjacentProduct = ( numList[ ROW_NUM * i + j ]
* numList[ ROW_NUM * i + j + 1 ]
* numList[ ROW_NUM * i + j + 2 ]
* numList[ ROW_NUM * i + j + 3 ] )
if adjacentProduct > greatestProduct:
greatestProduct = adjacentProduct
########################
# up to down
if i + ( ADJACENT_NUM - 1 ) < ROW_NUM:
adjacentProduct = ( numList[ ROW_NUM * i + j ]
* numList[ ROW_NUM * i + 1 + j ]
* numList[ ROW_NUM * i + 2 + j ]
* numList[ ROW_NUM * i + 3 + j ] )
if adjacentProduct > greatestProduct:
greatestProduct = adjacentProduct
########################
# diagnal left to right
if j + ( ADJACENT_NUM - 1 ) < COL_NUM \
and i + ( ADJACENT_NUM - 1) < ROW_NUM:
adjacentProduct = ( numList[ ROW_NUM * i + j ]
* numList[ ROW_NUM * ( i + 1 ) + j + 1 ]
* numList[ ROW_NUM * ( i + 2 ) + j + 2 ]
* numList[ ROW_NUM * ( i + 3 ) + j + 3 ] )
if adjacentProduct > greatestProduct:
greatestProduct = adjacentProduct
########################
# diagnal right to left
if j - ( ADJACENT_NUM - 1 ) > 0 \
and i + ( ADJACENT_NUM - 1 ) < ROW_NUM:
adjacentProduct = ( numList[ ROW_NUM * i + j ]
* numList[ ROW_NUM * ( i + 1 ) + j - 1 ]
* numList[ ROW_NUM * ( i + 2 ) + j - 2 ]
* numList[ ROW_NUM * ( i + 3 ) + j - 3 ] )
if adjacentProduct > greatestProduct:
greatestProduct = adjacentProduct
print( 'answer = {0}'.format( greatestProduct ) )
#################################################
# Main execution
if __name__ == '__main__':
# get starting date time
startingDateTime = datetime.datetime.utcnow()
print( 'startingDateTime = {0} UTC'.format( startingDateTime ) )
# call main function
main()
# get ending date time
endingdateTime = datetime.datetime.utcnow()
print( 'endingdateTime = {0} UTC'.format( endingdateTime ) )
# compute delta date time
deltaDateTime = endingdateTime - startingDateTime
print( 'deltaDateTime = {0}'.format( deltaDateTime ) )
|
gpl-3.0
| 6,276,150,481,619,951,000
| 40.141892
| 79
| 0.575758
| false
| 3.03356
| true
| false
| false
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_07_01/operations/_available_delegations_operations.py
|
1
|
5366
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class AvailableDelegationsOperations(object):
"""AvailableDelegationsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
location, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.AvailableDelegationsResult"]
"""Gets all of the available subnet delegations for this subscription in this region.
:param location: The location of the subnet.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AvailableDelegationsResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_07_01.models.AvailableDelegationsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AvailableDelegationsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('AvailableDelegationsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/availableDelegations'} # type: ignore
|
mit
| 7,378,669,600,206,450,000
| 44.863248
| 148
| 0.641446
| false
| 4.590248
| true
| false
| false
|
ThomasMcVay/MediaApp
|
MediaAppKnobs/KnobElements/FloatWidget.py
|
1
|
1798
|
#===============================================================================
# @Author: Madison Aster
# @ModuleDescription:
# @License:
# MediaApp Library - Python Package framework for developing robust Media
# Applications with Qt Library
# Copyright (C) 2013 Madison Aster
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License version 2.1 as published by the Free Software Foundation;
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See LICENSE in the root directory of this library for copy of
# GNU Lesser General Public License and other license details.
#===============================================================================
from Qt import QtGui, QtCore, QtWidgets
class FloatWidget(QtWidgets.QLineEdit):
def __init__(self):
super(FloatWidget, self).__init__()
self.setSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
self.setAlignment(QtCore.Qt.AlignLeft)
def setValue(self, value):
self.setText(str(value))
self.textChanged.emit
self.update()
def getValue(self):
return float(self.text())
def sizeHint(self):
return QtCore.QSize(150,16)
|
lgpl-2.1
| 156,117,284,344,290,800
| 41.902439
| 88
| 0.619577
| false
| 4.374696
| false
| false
| false
|
ARM-software/astc-encoder
|
Test/astc_quality_test.py
|
1
|
3498
|
#!/usr/bin/env python3
# SPDX-License-Identifier: Apache-2.0
# -----------------------------------------------------------------------------
# Copyright 2021 Arm Limited
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# -----------------------------------------------------------------------------
"""
The ``astc_quality_test`` utility provides a tool to sweep quality settings.
"""
import numpy as np
import re
import subprocess as sp
import sys
def get_psnr_pattern():
return r"\s*PSNR \(LDR-RGB\):\s*([0-9.]*) dB"
def get_coding_rate_pattern():
return r"\s*Coding rate:\s*([0-9.]*) MT/s"
def parse_output(output):
# Regex pattern for image quality
patternPSNR = re.compile(get_psnr_pattern())
patternCRate = re.compile(get_coding_rate_pattern())
# Extract results from the log
runPSNR = None
runCRate = None
for line in output:
match = patternPSNR.match(line)
if match:
runPSNR = float(match.group(1))
match = patternCRate.match(line)
if match:
runCRate = float(match.group(1))
assert runPSNR is not None, "No coding PSNR found"
assert runCRate is not None, "No coding rate found"
return (runPSNR, runCRate)
def execute(command):
"""
Run a subprocess with the specified command.
Args:
command (list(str)): The list of command line arguments.
Returns:
list(str): The output log (stdout) split into lines.
"""
try:
result = sp.run(command, stdout=sp.PIPE, stderr=sp.PIPE,
check=True, universal_newlines=True)
except (OSError, sp.CalledProcessError):
print("ERROR: Test run failed")
print(" + %s" % " ".join(command))
qcommand = ["\"%s\"" % x for x in command]
print(" + %s" % ", ".join(qcommand))
sys.exit(1)
return result.stdout.splitlines()
def main():
"""
The main function.
Returns:
int: The process return code.
"""
for block in ("4x4", "5x5", "6x6", "8x8", "10x10"):
for quality in range (0, 101, 2):
resultsQ = []
resultsS = []
if (quality < 40):
repeats = 20
elif (quality < 75):
repeats = 10
else:
repeats = 5
for _ in range(0, repeats):
command = [
"./astcenc/astcenc-avx2",
"-tl",
"./Test/Images/Kodak/LDR-RGB/ldr-rgb-kodak23.png",
"/dev/null",
block,
"%s" % quality,
"-silent"
]
stdout = execute(command)
psnr, mts = parse_output(stdout)
resultsQ.append(psnr)
resultsS.append(mts)
print("%s, %u, %0.3f, %0.3f" % (block, quality, np.mean(resultsS), np.mean(resultsQ)))
return 0
if __name__ == "__main__":
sys.exit(main())
|
apache-2.0
| -8,849,309,807,774,193,000
| 27.209677
| 98
| 0.540309
| false
| 3.85667
| false
| false
| false
|
tensorflow/models
|
official/vision/image_classification/optimizer_factory.py
|
1
|
6894
|
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Optimizer factory for vision tasks."""
from __future__ import absolute_import
from __future__ import division
# from __future__ import google_type_annotations
from __future__ import print_function
from typing import Any, Dict, Optional, Text
from absl import logging
import tensorflow as tf
import tensorflow_addons as tfa
from official.modeling import optimization
from official.vision.image_classification import learning_rate
from official.vision.image_classification.configs import base_configs
# pylint: disable=protected-access
def build_optimizer(
optimizer_name: Text,
base_learning_rate: tf.keras.optimizers.schedules.LearningRateSchedule,
params: Dict[Text, Any],
model: Optional[tf.keras.Model] = None):
"""Build the optimizer based on name.
Args:
optimizer_name: String representation of the optimizer name. Examples: sgd,
momentum, rmsprop.
base_learning_rate: `tf.keras.optimizers.schedules.LearningRateSchedule`
base learning rate.
params: String -> Any dictionary representing the optimizer params. This
should contain optimizer specific parameters such as `base_learning_rate`,
`decay`, etc.
model: The `tf.keras.Model`. This is used for the shadow copy if using
`ExponentialMovingAverage`.
Returns:
A tf.keras.Optimizer.
Raises:
ValueError if the provided optimizer_name is not supported.
"""
optimizer_name = optimizer_name.lower()
logging.info('Building %s optimizer with params %s', optimizer_name, params)
if optimizer_name == 'sgd':
logging.info('Using SGD optimizer')
nesterov = params.get('nesterov', False)
optimizer = tf.keras.optimizers.SGD(
learning_rate=base_learning_rate, nesterov=nesterov)
elif optimizer_name == 'momentum':
logging.info('Using momentum optimizer')
nesterov = params.get('nesterov', False)
optimizer = tf.keras.optimizers.SGD(
learning_rate=base_learning_rate,
momentum=params['momentum'],
nesterov=nesterov)
elif optimizer_name == 'rmsprop':
logging.info('Using RMSProp')
rho = params.get('decay', None) or params.get('rho', 0.9)
momentum = params.get('momentum', 0.9)
epsilon = params.get('epsilon', 1e-07)
optimizer = tf.keras.optimizers.RMSprop(
learning_rate=base_learning_rate,
rho=rho,
momentum=momentum,
epsilon=epsilon)
elif optimizer_name == 'adam':
logging.info('Using Adam')
beta_1 = params.get('beta_1', 0.9)
beta_2 = params.get('beta_2', 0.999)
epsilon = params.get('epsilon', 1e-07)
optimizer = tf.keras.optimizers.Adam(
learning_rate=base_learning_rate,
beta_1=beta_1,
beta_2=beta_2,
epsilon=epsilon)
elif optimizer_name == 'adamw':
logging.info('Using AdamW')
weight_decay = params.get('weight_decay', 0.01)
beta_1 = params.get('beta_1', 0.9)
beta_2 = params.get('beta_2', 0.999)
epsilon = params.get('epsilon', 1e-07)
optimizer = tfa.optimizers.AdamW(
weight_decay=weight_decay,
learning_rate=base_learning_rate,
beta_1=beta_1,
beta_2=beta_2,
epsilon=epsilon)
else:
raise ValueError('Unknown optimizer %s' % optimizer_name)
if params.get('lookahead', None):
logging.info('Using lookahead optimizer.')
optimizer = tfa.optimizers.Lookahead(optimizer)
# Moving average should be applied last, as it's applied at test time
moving_average_decay = params.get('moving_average_decay', 0.)
if moving_average_decay is not None and moving_average_decay > 0.:
if model is None:
raise ValueError(
'`model` must be provided if using `ExponentialMovingAverage`.')
logging.info('Including moving average decay.')
optimizer = optimization.ExponentialMovingAverage(
optimizer=optimizer, average_decay=moving_average_decay)
optimizer.shadow_copy(model)
return optimizer
def build_learning_rate(params: base_configs.LearningRateConfig,
batch_size: Optional[int] = None,
train_epochs: Optional[int] = None,
train_steps: Optional[int] = None):
"""Build the learning rate given the provided configuration."""
decay_type = params.name
base_lr = params.initial_lr
decay_rate = params.decay_rate
if params.decay_epochs is not None:
decay_steps = params.decay_epochs * train_steps
else:
decay_steps = 0
if params.warmup_epochs is not None:
warmup_steps = params.warmup_epochs * train_steps
else:
warmup_steps = 0
lr_multiplier = params.scale_by_batch_size
if lr_multiplier and lr_multiplier > 0:
# Scale the learning rate based on the batch size and a multiplier
base_lr *= lr_multiplier * batch_size
logging.info(
'Scaling the learning rate based on the batch size '
'multiplier. New base_lr: %f', base_lr)
if decay_type == 'exponential':
logging.info(
'Using exponential learning rate with: '
'initial_learning_rate: %f, decay_steps: %d, '
'decay_rate: %f', base_lr, decay_steps, decay_rate)
lr = tf.keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate=base_lr,
decay_steps=decay_steps,
decay_rate=decay_rate,
staircase=params.staircase)
elif decay_type == 'stepwise':
steps_per_epoch = params.examples_per_epoch // batch_size
boundaries = [boundary * steps_per_epoch for boundary in params.boundaries]
multipliers = [batch_size * multiplier for multiplier in params.multipliers]
logging.info(
'Using stepwise learning rate. Parameters: '
'boundaries: %s, values: %s', boundaries, multipliers)
lr = tf.keras.optimizers.schedules.PiecewiseConstantDecay(
boundaries=boundaries, values=multipliers)
elif decay_type == 'cosine_with_warmup':
lr = learning_rate.CosineDecayWithWarmup(
batch_size=batch_size,
total_steps=train_epochs * train_steps,
warmup_steps=warmup_steps)
if warmup_steps > 0:
if decay_type not in ['cosine_with_warmup']:
logging.info('Applying %d warmup steps to the learning rate',
warmup_steps)
lr = learning_rate.WarmupDecaySchedule(
lr, warmup_steps, warmup_lr=base_lr)
return lr
|
apache-2.0
| 6,286,244,039,494,606,000
| 36.879121
| 80
| 0.687554
| false
| 3.7631
| false
| false
| false
|
mahabs/nitro
|
nssrc/com/citrix/netscaler/nitro/resource/config/cs/cspolicylabel_cspolicy_binding.py
|
1
|
9664
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class cspolicylabel_cspolicy_binding(base_resource) :
""" Binding class showing the cspolicy that can be bound to cspolicylabel.
"""
def __init__(self) :
self._policyname = ""
self._priority = 0
self._targetvserver = ""
self._gotopriorityexpression = ""
self._invoke = False
self._labeltype = ""
self._invoke_labelname = ""
self._labelname = ""
self.___count = 0
@property
def priority(self) :
"""Specifies the priority of the policy.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
"""Specifies the priority of the policy.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def gotopriorityexpression(self) :
"""Expression specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE.
"""
try :
return self._gotopriorityexpression
except Exception as e:
raise e
@gotopriorityexpression.setter
def gotopriorityexpression(self, gotopriorityexpression) :
"""Expression specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE.
"""
try :
self._gotopriorityexpression = gotopriorityexpression
except Exception as e:
raise e
@property
def policyname(self) :
"""Name of the content switching policy.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
"""Name of the content switching policy.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def targetvserver(self) :
"""Name of the virtual server to which to forward requests that match the policy.
"""
try :
return self._targetvserver
except Exception as e:
raise e
@targetvserver.setter
def targetvserver(self, targetvserver) :
"""Name of the virtual server to which to forward requests that match the policy.
"""
try :
self._targetvserver = targetvserver
except Exception as e:
raise e
@property
def labeltype(self) :
"""Type of policy label invocation.<br/>Possible values = policylabel.
"""
try :
return self._labeltype
except Exception as e:
raise e
@labeltype.setter
def labeltype(self, labeltype) :
"""Type of policy label invocation.<br/>Possible values = policylabel
"""
try :
self._labeltype = labeltype
except Exception as e:
raise e
@property
def labelname(self) :
"""Name of the policy label to which to bind a content switching policy.
"""
try :
return self._labelname
except Exception as e:
raise e
@labelname.setter
def labelname(self, labelname) :
"""Name of the policy label to which to bind a content switching policy.
"""
try :
self._labelname = labelname
except Exception as e:
raise e
@property
def invoke_labelname(self) :
"""Name of the label to invoke if the current policy rule evaluates to TRUE.
"""
try :
return self._invoke_labelname
except Exception as e:
raise e
@invoke_labelname.setter
def invoke_labelname(self, invoke_labelname) :
"""Name of the label to invoke if the current policy rule evaluates to TRUE.
"""
try :
self._invoke_labelname = invoke_labelname
except Exception as e:
raise e
@property
def invoke(self) :
try :
return self._invoke
except Exception as e:
raise e
@invoke.setter
def invoke(self, invoke) :
try :
self._invoke = invoke
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(cspolicylabel_cspolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.cspolicylabel_cspolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.labelname) :
return str(self.labelname)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = cspolicylabel_cspolicy_binding()
updateresource.labelname = resource.labelname
updateresource.policyname = resource.policyname
updateresource.targetvserver = resource.targetvserver
updateresource.gotopriorityexpression = resource.gotopriorityexpression
updateresource.invoke = resource.invoke
updateresource.labeltype = resource.labeltype
updateresource.invoke_labelname = resource.invoke_labelname
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [cspolicylabel_cspolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].labelname = resource[i].labelname
updateresources[i].policyname = resource[i].policyname
updateresources[i].targetvserver = resource[i].targetvserver
updateresources[i].gotopriorityexpression = resource[i].gotopriorityexpression
updateresources[i].invoke = resource[i].invoke
updateresources[i].labeltype = resource[i].labeltype
updateresources[i].invoke_labelname = resource[i].invoke_labelname
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = cspolicylabel_cspolicy_binding()
deleteresource.labelname = resource.labelname
deleteresource.policyname = resource.policyname
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [cspolicylabel_cspolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].labelname = resource[i].labelname
deleteresources[i].policyname = resource[i].policyname
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, labelname) :
""" Use this API to fetch cspolicylabel_cspolicy_binding resources.
"""
try :
obj = cspolicylabel_cspolicy_binding()
obj.labelname = labelname
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, labelname, filter_) :
""" Use this API to fetch filtered set of cspolicylabel_cspolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = cspolicylabel_cspolicy_binding()
obj.labelname = labelname
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, labelname) :
""" Use this API to count cspolicylabel_cspolicy_binding resources configued on NetScaler.
"""
try :
obj = cspolicylabel_cspolicy_binding()
obj.labelname = labelname
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, labelname, filter_) :
""" Use this API to count the filtered set of cspolicylabel_cspolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = cspolicylabel_cspolicy_binding()
obj.labelname = labelname
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Labeltype:
policylabel = "policylabel"
class cspolicylabel_cspolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.cspolicylabel_cspolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.cspolicylabel_cspolicy_binding = [cspolicylabel_cspolicy_binding() for _ in range(length)]
|
apache-2.0
| 3,208,087,756,175,860,700
| 28.735385
| 132
| 0.714714
| false
| 3.505259
| false
| false
| false
|
gfetterman/bark
|
bark/tools/barkutils.py
|
1
|
12062
|
import os.path
from glob import glob
import bark
import argparse
from bark import stream
import arrow
from dateutil import tz
import numpy
import sys
import subprocess
def meta_attr():
p = argparse.ArgumentParser(
description="Create/Modify a metadata attribute")
p.add_argument("name", help="name of bark object (Entry or Dataset)")
p.add_argument("attribute",
help="name of bark attribute to create or modify")
p.add_argument("value", help="value of attribute")
args = p.parse_args()
name, attr, val = (args.name, args.attribute, args.value)
attrs = bark.read_metadata(name)
try:
attrs[attr] = eval(val) # try to parse
except Exception:
attrs[attr] = val # assign as string
bark.write_metadata(name, **attrs)
def meta_column_attr():
p = argparse.ArgumentParser(
description="Create/Modify a metadata attribute for a column of data")
p.add_argument("name", help="name of bark object (Entry or Dataset)")
p.add_argument("column", help="name of the column of a Dataset")
p.add_argument("attribute",
help="name of bark attribute to create or modify")
p.add_argument("value", help="value of attribute")
args = p.parse_args()
name, column, attr, val = (args.name, args.column, args.attribute, args.value)
attrs = bark.read_metadata(name)
columns = attrs['columns']
if 'dtype' in attrs:
column = int(column)
try:
columns[column][attr] = eval(val) # try to parse
except Exception:
columns[column][attr] = val # assign as string
bark.write_metadata(name, **attrs)
def mk_entry():
p = argparse.ArgumentParser(description="create a bark entry")
p.add_argument("name", help="name of bark entry")
p.add_argument("-a",
"--attributes",
action='append',
type=lambda kv: kv.split("="),
dest='keyvalues',
help="extra metadata in the form of KEY=VALUE")
p.add_argument("-t",
"--timestamp",
help="format: YYYY-MM-DD or YYYY-MM-DD_HH-MM-SS.S")
p.add_argument("-p",
"--parents",
help="no error if already exists, new meta-data written",
action="store_true")
p.add_argument('--timezone',
help="timezone of timestamp, default: America/Chicago",
default='America/Chicago')
args = p.parse_args()
timestamp = arrow.get(args.timestamp).replace(
tzinfo=tz.gettz(args.timezone)).datetime
attrs = dict(args.keyvalues) if args.keyvalues else {}
bark.create_entry(args.name, timestamp, args.parents, **attrs)
def _clean_metafiles(path, recursive, meta='.meta.yaml'):
metafiles = glob(os.path.join(path, "*" + meta))
for mfile in metafiles:
if not os.path.isfile(mfile[:-len(meta)]):
os.remove(mfile)
if recursive:
dirs = [x
for x in os.listdir(path)
if os.path.isdir(os.path.join(path, x))]
for d in dirs:
_clean_metafiles(os.path.join(path, d), True, meta)
def clean_metafiles():
"""
remove x.meta.yaml files with no associated file (x)
"""
p = argparse.ArgumentParser(
description="remove x.meta.yaml files with no associated file (x)")
p.add_argument("path", help="name of bark entry", default=".")
p.add_argument("-r",
"--recursive",
help="search recursively",
action="store_true")
args = p.parse_args()
_clean_metafiles(args.path, args.recursive)
def rb_concat():
p = argparse.ArgumentParser(
description="""Concatenate raw binary files by adding new samples.
Do not confuse with merge, which combines channels""")
p.add_argument("input", help="input raw binary files", nargs="+")
p.add_argument("-a",
"--attributes",
action='append',
type=lambda kv: kv.split("="),
dest='keyvalues',
help="extra metadata in the form of KEY=VALUE")
p.add_argument("-o", "--out", help="name of output file", required=True)
args = p.parse_args()
if args.keyvalues:
attrs = dict(args.keyvalues)
else:
attrs = {}
streams = [stream.read(x) for x in args.input]
streams[0].chain(*streams[1:]).write(args.out, **attrs)
def rb_decimate():
' Downsample raw binary file.'
p = argparse.ArgumentParser(description="Downsample raw binary file")
p.add_argument("input", help="input bark file")
p.add_argument("--factor",
required=True,
type=int,
help="downsample factor")
p.add_argument("-a",
"--attributes",
action='append',
type=lambda kv: kv.split("="),
dest='keyvalues',
help="extra metadata in the form of KEY=VALUE")
p.add_argument("-o", "--out", help="name of output file", required=True)
args = p.parse_args()
if args.keyvalues:
attrs = dict(args.keyvalues)
else:
attrs = {}
stream.read(args.input).decimate(args.factor).write(args.out, **attrs)
def rb_select():
p = argparse.ArgumentParser(description='''
Select a subset of channels from a sampled dataset
''')
p.add_argument('dat', help='dat file')
p.add_argument('-o', '--out', help='name of output datfile')
p.add_argument('-c',
'--channels',
help='''channels to extract,
zero indexed channel numbers
unless --col-attr is set, in which case
channels are metadata values''',
nargs='+',
required=True)
p.add_argument('--col-attr',
help='name of column attribute to select channels with')
args = p.parse_args()
fname, outfname, channels, col_attr = (args.dat, args.out, args.channels,
args.col_attr)
stream = bark.read_sampled(fname).toStream()
if col_attr:
columns = stream.attrs['columns']
rev_attr = {col[col_attr]: idx
for idx, col in columns.items()
if col_attr in col} # so you can tag only some channels
channels = [rev_attr[c] for c in channels]
else:
channels = [int(c) for c in channels]
stream[channels].write(outfname)
def rb_filter():
p = argparse.ArgumentParser(description="""
filter a sampled dataset
""")
p.add_argument("dat", help="dat file")
p.add_argument("-o", "--out", help="name of output dat file")
p.add_argument("--order", help="filter order", default=3, type=int)
p.add_argument("--highpass", help="highpass frequency", type=float)
p.add_argument("--lowpass", help="lowpass frequency", type=float)
p.add_argument("-f",
"--filter",
help="filter type: butter or bessel",
default="bessel")
opt = p.parse_args()
dtype = bark.read_metadata(opt.dat)['dtype']
stream.read(opt.dat)._analog_filter(opt.filter,
highpass=opt.highpass,
lowpass=opt.lowpass,
order=opt.order).write(opt.out, dtype)
attrs = bark.read_metadata(opt.out)
attrs['highpass'] = opt.highpass
attrs['lowpass'] = opt.lowpass
attrs['filter'] = opt.filter
attrs['filter_order'] = opt.order
bark.write_metadata(opt.out, **attrs)
def rb_diff():
p = argparse.ArgumentParser(description="""
Subtracts one channel from another
""")
p.add_argument("dat", help="dat file")
p.add_argument("-c",
"--channels",
help="""channels to difference, zero indexed, default: 0 1,
subtracts second channel from first.""",
type=int,
nargs="+")
p.add_argument("-o", "--out", help="name of output dat file")
opt = p.parse_args()
dat, out, channels = opt.dat, opt.out, opt.channels
if not channels:
channels = (0, 1)
(stream.read(dat)[channels[0]] - stream.read(dat)[channels[1]]).write(out)
def rb_join():
p = argparse.ArgumentParser(description="""
Combines dat files by adding new channels with the same number
samples. To add additional samples, use dat-cat""")
p.add_argument("dat", help="dat files", nargs="+")
p.add_argument("-o", "--out", help="name of output dat file")
opt = p.parse_args()
streams = [stream.read(fname) for fname in opt.dat]
streams[0].merge(*streams[1:]).write(opt.out)
def rb_to_audio():
p = argparse.ArgumentParser()
p.add_argument("dat",
help="""dat file to convert to audio,
can be any number of channels but you probably want 1 or 2""")
p.add_argument("out", help="name of output file, with filetype extension")
opt = p.parse_args()
attrs = bark.read_metadata(opt.dat)
sr = str(attrs['sampling_rate'])
ch = str(len(attrs['columns']))
dt = numpy.dtype(attrs['dtype'])
bd = str(dt.itemsize * 8)
if dt.name[:5] == 'float':
enc = 'floating-point'
elif dt.name[:3] == 'int':
enc = 'signed-integer'
elif dt.name[:4] == 'uint':
enc = 'unsigned-integer'
else:
raise TypeError('cannot handle dtype of ' + dtname)
if dt.byteorder == '<':
order = 'little'
elif dt.byteorder == '>':
order = 'big'
elif dt.byteorder == '=': # native
order = sys.byteorder
else:
raise ValueError('unrecognized endianness: ' + dt.byteorder)
sox_cmd = ['sox', '-r', sr, '-c', ch, '-b', bd, '-e', enc,
'--endian', order, '-t', 'raw', opt.dat, opt.out]
try:
subprocess.run(sox_cmd)
except FileNotFoundError as e:
if "'sox'" in str(e):
raise FileNotFoundError(str(e) + '. dat-to-audio requires SOX')
else:
raise
def rb_to_wave_clus():
import argparse
p = argparse.ArgumentParser(prog="dat2wave_clus",
description="""
Converts a raw binary file to a wav_clus compatible matlab file
""")
p.add_argument("dat", help="dat file")
p.add_argument("-o", "--out", help="name of output .mat file")
opt = p.parse_args()
from scipy.io import savemat
dataset = bark.read_sampled(opt.dat)
savemat(opt.out,
{'data': dataset.data.T,
'sr': dataset.attrs['sampling_rate']},
appendmat=False)
def _datchunk():
p = argparse.ArgumentParser(description="split a dat file by samples")
p.add_argument("dat", help="datfile")
p.add_argument("stride",
type=float,
help="number of samples to chunk together")
p.add_argument("--seconds",
help="specify seconds instead of samples",
action='store_true')
p.add_argument("--onecut",
help="only perform the first cut",
action="store_true")
args = p.parse_args()
datchunk(args.dat, args.stride, args.seconds, args.onecut)
def datchunk(dat, stride, use_seconds, one_cut):
def write_chunk(chunk, attrs, i):
filename = "{}-chunk-{}.dat".format(basename, i)
attrs['offset'] = stride * i
bark.write_sampled(filename, chunk, **attrs)
attrs = bark.read_metadata(dat)
if use_seconds:
stride = stride * attrs['sampling_rate']
stride = int(stride)
basename = os.path.splitext(dat)[0]
if one_cut:
sds = bark.read_sampled(dat)
write_chunk(sds.data[:stride,:], attrs, 0)
write_chunk(sds.data[stride:,:], attrs, 1)
else:
for i, chunk in enumerate(stream.read(dat, chunksize=stride)):
write_chunk(chunk, attrs, i)
|
gpl-2.0
| -8,583,137,634,202,377,000
| 36.113846
| 82
| 0.570718
| false
| 3.852443
| false
| false
| false
|
taosheng/jarvis
|
chatbot/src/socialEnBrain.py
|
1
|
2090
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import json
import requests
import re
import random
import time
import sys
import csv
from genericKB import genericHandler
from esHealth import esHealthHandler
from wikiFinder import findWikiEn
#from io import open
import codecs
#from pttChat import pttHandler
#from wikiChat import wikiHandler
import os
class GenericEnBrain():
listIdx = [('enbasic1',0.8), ('enbot1',2.0)]
kb = {}
notFoundResList = []
def __init__(self):
with open('basickb_en.csv') as csvfile:
spamreader = csv.reader(csvfile, delimiter=',', quotechar='"')
for row in spamreader:
if(len(row)>=2):
self.kb[row[0].strip()] = row[1].strip()
def randomAct(self, actKey):
res_act = self.kb[actKey].split(";")
return random.choice(res_act)
def think(self, msg):
response = ''
dirtylist = self.kb['dirty_words'].lower().split(";")
msg = msg.strip()
for dword in dirtylist:
dword = dword.strip()
if dword in msg:
return self.randomAct('dirty_words_res')
for cnf in self.listIdx:
response = genericHandler(cnf[0], 'fb', msg, min_score=cnf[1])
if response != '':
return response
if response == '': # Wikifinedr
nltk_data_path = os.getcwd()+'/nltk_data'
print(nltk_data_path)
os.environ['NLTK_DATA'] = nltk_data_path
from textblob import TextBlob
b = TextBlob(msg.lower())
if len(b.noun_phrases) > 0:
toFindInWiki = b.noun_phrases[0]
wikiResponse = findWikiEn(toFindInWiki)
response = wikiResponse[0:256] + "...<search from wiki>"
if response == '':
response = self.randomAct('act_no_info')
return response
genBrain = GenericEnBrain()
if __name__ == '__main__':
msg = sys.argv[1]
print(genBrain.think(msg))
# print(gBrain.think(msg))
# print(fbBrain.think(msg))
|
apache-2.0
| -2,871,116,191,798,822,400
| 26.142857
| 74
| 0.571292
| false
| 3.591065
| false
| false
| false
|
jas14/khmer
|
scripts/extract-paired-reads.py
|
1
|
3488
|
#! /usr/bin/env python
#
# This script is part of khmer, https://github.com/dib-lab/khmer/, and is
# Copyright (C) Michigan State University, 2009-2015. It is licensed under
# the three-clause BSD license; see LICENSE.
# Contact: khmer-project@idyll.org
#
# pylint: disable=invalid-name,missing-docstring
"""
Split up pairs and singletons.
Take a file containing a mixture of interleaved and orphaned reads, and
extract them into separate files (.pe and .se).
% scripts/extract-paired-reads.py <infile>
Reads FASTQ and FASTA input, retains format for output.
"""
from __future__ import print_function
import screed
import sys
import os.path
import textwrap
import argparse
import khmer
from khmer.kfile import check_input_files, check_space
from khmer.khmer_args import info
from khmer.utils import broken_paired_reader, write_record, write_record_pair
def get_parser():
epilog = """
The output is two files, <input file>.pe and <input file>.se, placed in the
current directory. The .pe file contains interleaved and properly paired
sequences, while the .se file contains orphan sequences.
Many assemblers (e.g. Velvet) require that you give them either perfectly
interleaved files, or files containing only single reads. This script takes
files that were originally interleaved but where reads may have been
orphaned via error filtering, application of abundance filtering, digital
normalization in non-paired mode, or partitioning.
Example::
extract-paired-reads.py tests/test-data/paired.fq
"""
parser = argparse.ArgumentParser(
description='Take a mixture of reads and split into pairs and '
'orphans.', epilog=textwrap.dedent(epilog))
parser.add_argument('infile')
parser.add_argument('--version', action='version', version='%(prog)s ' +
khmer.__version__)
parser.add_argument('-f', '--force', default=False, action='store_true',
help='Overwrite output file if it exists')
return parser
def main():
info('extract-paired-reads.py')
args = get_parser().parse_args()
check_input_files(args.infile, args.force)
infiles = [args.infile]
check_space(infiles, args.force)
outfile = os.path.basename(args.infile)
if len(sys.argv) > 2:
outfile = sys.argv[2]
single_fp = open(outfile + '.se', 'w')
paired_fp = open(outfile + '.pe', 'w')
print('reading file "%s"' % args.infile, file=sys.stderr)
print('outputting interleaved pairs to "%s.pe"' % outfile, file=sys.stderr)
print('outputting orphans to "%s.se"' % outfile, file=sys.stderr)
n_pe = 0
n_se = 0
screed_iter = screed.open(args.infile, parse_description=False)
for index, is_pair, read1, read2 in broken_paired_reader(screed_iter):
if index % 100000 == 0 and index > 0:
print('...', index, file=sys.stderr)
if is_pair:
write_record_pair(read1, read2, paired_fp)
n_pe += 1
else:
write_record(read1, single_fp)
n_se += 1
single_fp.close()
paired_fp.close()
if n_pe == 0:
raise Exception("no paired reads!? check file formats...")
print('DONE; read %d sequences,'
' %d pairs and %d singletons' %
(n_pe * 2 + n_se, n_pe, n_se), file=sys.stderr)
print('wrote to: ' + outfile + '.se' + ' and ' + outfile + '.pe',
file=sys.stderr)
if __name__ == '__main__':
main()
|
bsd-3-clause
| -76,326,370,296,495,920
| 31.296296
| 79
| 0.653956
| false
| 3.544715
| false
| false
| false
|
CondensedOtters/PHYSIX_Utils
|
Projects/Moog_2016-2019/CO2/CO2_NN/analysis.py
|
1
|
9175
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jun 14 05:54:11 2020
@author: mathieumoog
"""
import cpmd
import filexyz
import numpy as np
import matplotlib.pyplot as plt
# MSMbuilder ( lacks CK validation )
from msmbuilder.msm import MarkovStateModel
from msmbuilder.msm import BayesianMarkovStateModel
from msmbuilder.utils import dump
# PyEMMMA ( has CK validation )
import pyemma as pe
from pyemma.datasets import double_well_discrete
def getDistance1Dsq( position1, position2, length):
dist = position1-position2
half_length = length*0.5
if dist > half_length :
dist -= length
elif dist < -half_length:
dist += length
return dist*dist
def getDistanceOrtho( positions, index1, index2, cell_lengths ):
dist=0
for i in range(3):
dist += getDistance1Dsq( positions[index1,i], positions[index2,i], cell_lengths[i] )
return np.sqrt(dist)
def computeContactMatrix( positions, cell_lengths, cut_off ):
nb_atoms = len(positions[:,0])
matrix = np.zeros(( nb_atoms, nb_atoms ))
for atom in range(nb_atoms):
for atom2 in range(atom+1,nb_atoms):
if getDistanceOrtho( positions, atom, atom2, cell_lengths ) < cut_off :
matrix[atom,atom2] = 1
matrix[atom2,atom] = 1
return matrix
def computeTransitionMatrix( states, nb_states, tau, step_max ):
nb_step = len(states)
matrix = np.zeros((nb_states,nb_states))
for step in range( nb_step-step_max ):
matrix[ states[step], states[step+tau] ] += 1
return matrix
def computeChapmanKolmogorov( matrix, nb_states ):
matrix_ck = np.zeros((nb_states,nb_states),dtype=float)
for state_i in range( nb_states ):
for state_j in range( nb_states ):
for i in range(nb_states):
matrix_ck[ state_i, state_j ] += matrix[state_i,i]*matrix[i,state_j]
return matrix_ck
volume=8.82
temperature=3000
# run_nb=1
path_sim = str( "/Users/mathieumoog/Documents/CO2/" +
str(volume) + "/" +
str(temperature) + "K/"
# + str(run_nb) + "-run/"
)
cell_lengths = np.ones(3)*volume
traj_path = str( path_sim + "TRAJEC_fdb_wrapped.xyz" )
traj = filexyz.readAsArray( traj_path )
nbC=32
nbO=64
nb_atoms=nbC+nbO
max_neigh=5
nb_step=len(traj[:,0,0])
cut_off = 1.75
min_stat=1000
# Build States
coordC = np.zeros( (nb_step,nbC), dtype=int )
coordO = np.zeros( (nb_step,nbO), dtype=int )
for step in range(nb_step):
matrix = computeContactMatrix( traj[step,:,:], cell_lengths, cut_off)
for carbon in range(0,nbC):
coordC[ step, carbon ] = int( sum(matrix[carbon,:]) )
for oxygen in range(nbC,nb_atoms):
coordO[ step, oxygen-nbC ] = int( sum(matrix[oxygen,:]) )
c_min = coordC.min()
o_min = coordO.min()
# Adapting the labels to make sure they are in the 0-nb_states range
coordC -= c_min
coordO -= c_min
msm = MarkovStateModel( lag_time=1, n_timescales=6)
msm.fit( coordC[:,0] )
msm.timescales_
# Computing nb of states (max)
nb_states_C = coordC.max()+1
nb_states_O = coordO.max()+1
# Computing Equilibrium States Probabilities
coordC_hist = np.zeros( nb_states_C )
ones_ = np.ones((nb_step,nbC), dtype=int )
for i in range( nb_states_C ):
coordC_hist[i] = sum( ones_[ coordC == i ] )
# Clean marginal states
# for state in range( nb_states_C ):
# if coordC_hist[state] < min_stat:
# mask_to_clean = coordC[ :, : ]
coordC_hist /= sum(coordC_hist[:])
# Computing Equilibrium States Probabilities, cleaning marginals
ones_ = np.ones((nb_step,nbO), dtype=int )
coordO_hist = np.zeros( nb_states_O )
for i in range( nb_states_O ):
coordO_hist[i] = sum( ones_[ coordO == i ] )
coordO_hist /= sum(coordO_hist[:])
# Plotting Oxygens
plt.figure()
plt.plot(coordC_hist,"b.-")
plt.plot(coordO_hist,"r.-")
plt.legend(["C states","O states"])
plt.show()
dt=5*0.001
frac = 0.75
max_step=int(nb_step*frac)
nb_tau_min=int(250)
nb_tau_max=int(2*nb_tau_min)
# Computing Transition Matrix for a given tau
matrix_tot=np.zeros((nb_states_C,nb_states_C,nb_tau_max), dtype=float )
matrix_tot_ck=np.zeros((nb_states_C,nb_states_C,nb_tau_min), dtype=float )
for tau in range(nb_tau_max):
matrix = np.zeros((nb_states_C,nb_states_C),dtype=float)
for carbon in range(nbC):
matrix += computeTransitionMatrix( coordC[:,carbon], nb_states_C, tau+1, max_step )
for state in range(nb_states_C):
matrix[state,:] /= sum( matrix[state,:] )
matrix_tot[:,:,tau] = matrix[:,:]
if tau < nb_tau_min:
matrix_tot_ck[:,:,tau] = computeChapmanKolmogorov( matrix_tot[:,:,tau], nb_states_C )
carbon_target=3
matrix_markov = np.zeros( (4,4,nb_tau_min), dtype=float )
matrix_markov_ck = np.zeros( (4,4,nb_tau_min), dtype=float )
for tau in range(1,nb_tau_min+1):
msm_matrix = MarkovStateModel( lag_time=tau, reversible_type="mle" ,n_timescales=nb_states_C, ergodic_cutoff="on", sliding_window=True, verbose=True)
msm_matrix.fit( coordC[:,carbon_target] )
matrix_markov[:,:,tau-1] = msm_matrix.transmat_
for state_i in range( len(matrix_markov) ):
for state_j in range( len(matrix_markov) ):
for i in range( len(matrix_markov) ):
matrix_markov_ck[ state_i, state_j, tau-1 ] += matrix_markov[state_i,i,tau-1]*matrix_markov[i,state_j,tau-1]
# PyEMMA
lags = [1,5,10,15,20,50,100,200]
implied_timescales = pe.msm.its(dtrajs=coordC[:,carbon_target].tolist(),lags=lags)
pe.plots.plot_implied_timescales(implied_timescales,units='time-steps', ylog=False)
M = pe.msm.estimate_markov_model(dtrajs=coordC[:,carbon_target].tolist(), lag = 10 )
cktest = M.cktest(nsets=3)
cktplt = pe.plots.plot_cktest(cktest)
plt.figure()
plt.xlabel("Time lag (ps)")
plt.ylabel("P_ij, P_ij^CK")
# plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[0,0,:], "k-" )
# plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[0,0,:], "k--" )
plt.plot( np.arange(0,dt*nb_tau_min,dt*1), matrix_markov[0,0,:], "k-" )
plt.plot( np.arange(0,2*dt*nb_tau_min,dt*2), matrix_markov_ck[0,0,:], "k--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[1,1,:], "r-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[1,1,:], "r--" )
plt.plot( np.arange(0,dt*nb_tau_min,dt*1), matrix_markov[0,1,:], "k-" )
plt.plot( np.arange(0,2*dt*nb_tau_min,dt*2), matrix_markov_ck[0,1,:], "k--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[1,2,:], "b-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[1,2,:], "b--" )
plt.plot( np.arange(0,dt*nb_tau_min,dt*1), matrix_markov[0,2,:], "k-" )
plt.plot( np.arange(0,2*dt*nb_tau_min,dt*2), matrix_markov_ck[0,2,:], "k--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[1,3,:], "g-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[1,3,:], "g--" )
plt.plot( np.arange(0,dt*nb_tau_min,dt*1), matrix_markov[0,3,:], "k-" )
plt.plot( np.arange(0,2*dt*nb_tau_min,dt*2), matrix_markov_ck[0,3,:], "k--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[1,4,:], "m-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[1,4,:], "m--" )
plt.show()
rmseC = np.zeros(nb_tau_min, dtype=float)
for tau in range(nb_tau_min):
mat = matrix_tot[:,:,2*tau]-matrix_tot_ck[:,:,tau]
rmseC[tau] = sum(sum( mat*mat ))/(nb_states_C*nb_states_C)
plt.figure()
plt.xlabel("Time lag (ps)")
plt.ylabel("RMSE C (%)")
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), rmseC*100 )
plt.show()
matrix_tot=np.zeros((nb_states_O,nb_states_O,nb_tau_max), dtype=float )
matrix_tot_ck=np.zeros((nb_states_O,nb_states_O,nb_tau_min), dtype=float )
for tau in range(nb_tau_max):
matrix = np.zeros((nb_states_O,nb_states_O),dtype=float)
for carbon in range(nbC):
matrix += computeTransitionMatrix( coordO[:,carbon], nb_states_O, tau, max_step )
for state in range(nb_states_O):
matrix[state,:] /= sum( matrix[state,:] )
matrix_tot[:,:,tau] = matrix[:,:]
if tau < nb_tau_min:
matrix_tot_ck[:,:,tau] = computeChapmanKolmogorov( matrix_tot[:,:,tau], nb_states_O )
plt.figure()
plt.xlabel("Time lag (ps)")
plt.ylabel("P_ij, P_ij^CK")
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[0,0,:], "k-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[0,0,:], "k--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[1,1,:], "r-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[1,1,:], "r--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[2,2,:], "b-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[2,2,:], "b--" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*1), matrix_tot[3,3,:], "g-" )
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), matrix_tot_ck[3,3,:], "g--" )
plt.show()
rmseO = np.zeros(nb_tau_min, dtype=float)
for tau in range(nb_tau_min):
mat = matrix_tot[:,:,2*tau]-matrix_tot_ck[:,:,tau]
rmseO[tau] = sum(sum( mat*mat ))/(nb_states_O*nb_states_O)
plt.figure()
plt.xlabel("Time lag (ps)")
plt.ylabel("RMSE O (%)")
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), rmseO*100 )
plt.show()
plt.figure()
plt.xlabel("Time lag (ps)")
plt.ylabel("RMSE all (%)")
plt.plot( np.arange(0,dt*nb_tau_max,dt*2), (rmseO+rmseC)*100*0.5 )
plt.show()
|
gpl-3.0
| -8,227,085,920,944,906,000
| 35.7
| 153
| 0.637275
| false
| 2.506146
| false
| false
| false
|
Ecogenomics/CheckM
|
checkm/plot/distributionPlots.py
|
1
|
2841
|
###############################################################################
#
# codingDensityPlots.py - Create a GC histogram and a delta-CD plot.
#
###############################################################################
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
from checkm.plot.AbstractPlot import AbstractPlot
from checkm.plot.gcPlots import GcPlots
from checkm.plot.codingDensityPlots import CodingDensityPlots
from checkm.plot.tetraDistPlots import TetraDistPlots
class DistributionPlots(AbstractPlot):
def __init__(self, options):
AbstractPlot.__init__(self, options)
self.options = options
def plot(self, fastaFile, tetraSigs, distributionsToPlot):
# Set size of figure
self.fig.clear()
self.fig.set_size_inches(self.options.width, self.options.height)
axesHistGC = self.fig.add_subplot(321)
axesDeltaGC = self.fig.add_subplot(322)
axesHistTD = self.fig.add_subplot(323)
axesDeltaTD = self.fig.add_subplot(324)
axesHistCD = self.fig.add_subplot(325)
axesDeltaCD = self.fig.add_subplot(326)
gcPlots = GcPlots(self.options)
gcPlots.plotOnAxes(fastaFile, distributionsToPlot, axesHistGC, axesDeltaGC)
tetraDistPlots = TetraDistPlots(self.options)
tetraDistPlots.plotOnAxes(fastaFile, tetraSigs, distributionsToPlot, axesHistTD, axesDeltaTD)
codingDensityPlots = CodingDensityPlots(self.options)
codingDensityPlots.plotOnAxes(fastaFile, distributionsToPlot, axesHistCD, axesDeltaCD)
self.fig.tight_layout(pad=1, w_pad=2, h_pad=2)
self.draw()
|
gpl-3.0
| 5,811,296,671,881,534,000
| 48.732143
| 101
| 0.529743
| false
| 4.397833
| false
| false
| false
|
dotmpe/htcache
|
Protocol.py
|
1
|
18422
|
"""
The Protocol object relays the client request, accumulates the server response
data, and combines it with the cached. From there the Response object
reads this to the client.
"""
import calendar, os, time, socket, re
import Params, Runtime, Response, Resource, Rules
import HTTP
#from util import *
import log
mainlog = log.get_log('main')
class DNSLookupException(Exception):
def __init__(self, addr, exc):
self.addr = addr
self.exc = exc
def __str__(self):
return "DNS lookup error for %s: %s" % ( self.addr, self.exc )
DNSCache = {}
def connect(addr):
# FIXME: return HTTP 5xx
assert Runtime.ONLINE, \
'operating in off-line mode'
if addr not in DNSCache:
mainlog.debug('Requesting address info for %s:%i', *addr)
try:
DNSCache[ addr ] = socket.getaddrinfo(
addr[ 0 ], addr[ 1 ], Runtime.FAMILY, socket.SOCK_STREAM )
except Exception, e:
raise DNSLookupException(addr, e)
family, socktype, proto, canonname, sockaddr = DNSCache[ addr ][ 0 ]
mainlog.info('Connecting to %s:%i', *sockaddr)
sock = socket.socket( family, socktype, proto )
sock.setblocking( 0 )
sock.connect_ex( sockaddr )
return sock
class BlindProtocol:
"""
Blind protocol is used to aim for gracefull recovery upon unexpected
requests.
"""
Response = None
data = None
def __init__(self, request):
self.__socket = connect( request.hostinfo )
self.__sendbuf = request.recvbuf()
def socket(self):
return self.__socket
def recvbuf(self):
return ''
def hasdata(self):
return True
def send(self, sock):
bytecnt = sock.send( self.__sendbuf )
self.__sendbuf = self.__sendbuf[ bytecnt: ]
if not self.__sendbuf:
self.Response = Response.BlindResponse
def done(self):
pass
class CachingProtocol(object):
"""
Open cache and descriptor index for requested resources.
Filter requests using Drop, NoCache and .. rules.
"""
Response = None
"the htcache response class"
capture = None
"XXX: old indicator to track hashsum of response entity."
data = None
@property
def url(self):
# XXX: update this with data from content-location
return self.request.url
def __init__(self, request):
"Determine and open cache location, get descriptor backend. "
super(CachingProtocol, self).__init__()
self.request = request
self.data = None
# Track server response
self.__status, self.__message = None, None
def has_response(self):
return self.__status and self.__message
def prepare_direct_response(self,request):
"""
Serve either a proxy page, a replacement for blocked content, of static
content. All directly from local storage.
Returns true on direct-response ready.
"""
host, port = request.hostinfo
verb, path, proto = request.envelope
# XXX: move this to request phase
if port == Runtime.PORT:
mainlog.info("Direct request: %s", path)
localhosts = ( 'localhost', Runtime.HOSTNAME, '127.0.0.1', '127.0.1.1' )
assert host in localhosts, "Cannot service for %s, use from %s" % (host, localhosts)
self.Response = Response.ProxyResponse
# XXX: Respond by writing message as plain text, e.g echo/debug it:
#self.Response = Response.DirectResponse
# Filter request by regex from rules.drop
filtered_path = "%s%s" % ( host, path )
m = Rules.Drop.match( filtered_path )
if m:
self.set_blocked_response( path )
mainlog.note('Dropping connection, '
'request matches pattern: %r.', m)
def prepare_nocache_response(self):
"Blindly respond for NoCache rule matches. "
pattern = Rules.NoCache.match( self.url )
if pattern:
mainlog.note('Not caching request, matches pattern: %r.', pattern)
self.Response = Response.BlindResponse
return True
def set_blocked_response(self, path):
"Respond to client by writing filter warning about blocked content. "
if '?' in path or '#' in path:
pf = path.find( '#' )
pq = path.find( '?' )
p = len( path )
if pf > 0: p = pf
if pq > 0: p = pq
nameext = os.path.splitext( path[:p] )
else:
nameext = os.path.splitext( path )
if len( nameext ) == 2 and nameext[1][1:] in Params.IMG_TYPE_EXT:
self.Response = Response.BlockedImageContentResponse
else:
self.Response = Response.BlockedContentResponse
def get_size(self):
return self.data.descriptor.size;
def set_size(self, size):
self.data.descriptor.size = size
size = property( get_size, set_size )
def get_mtime(self):
return self.cache.mtime;
def set_mtime(self, mtime):
self.cache.mtime = mtime
mtime = property( get_mtime, set_mtime )
def read(self, pos, size):
return self.cache.read( pos, size )
def write(self, chunk):
return self.cache.write( chunk )
def tell(self):
return self.cache.tell()
def finish(self):
self.data.finish_response()
def __str__(self):
return "[CachingProtocol %s]" % hex(id(self))
class HttpProtocol(CachingProtocol):
rewrite = None
def __init__(self,request):
super(HttpProtocol, self).__init__(request)
host, port = request.hostinfo
verb, path, proto = request.envelope
# Serve direct response
self.prepare_direct_response(request)
if self.Response:
self.__socket = None
return
# Prepare to forward request
self.data = Resource.ProxyData(self)
# Skip server-round trip in static mode
if Runtime.STATIC: # and self.cache.full: # FIXME
mainlog.note('Static mode; serving file directly from cache')
self.__socket = None
if self.data.prepare_static():
self.Response = Response.DataResponse
else:
self.Response = Response.NotFoundResponse
return
proxy_req_headers = self.data.prepare_request( request )
mainlog.debug("Prepared request headers")
for key in proxy_req_headers:
mainlog.debug('> %s: %s',
key, proxy_req_headers[ key ].replace( '\r\n', ' > ' ) )
# Forward request to remote server, fiber will handle this
head = 'GET /%s HTTP/1.1' % path
# FIXME return proper HTTP error upon connection failure
try:
self.__socket = connect(request.hostinfo)
except Exception, e:
self.Response = Response.ExceptionResponse(self, request, e )
return
self.__sendbuf = '\r\n'.join(
[ head ] + map( ': '.join, proxy_req_headers.items() ) + [ '', '' ] )
self.__recvbuf = ''
# Proxy protocol continues in self.recv after server response haders are
# parsed, before the response entity is read from the remote server
self.__parse = HttpProtocol.__parse_head
@property
def cache(self):
# XXX: the other way around?
return self.data.cache
def hasdata(self):
"Indicator wether Protocol object has more request data available. "
return bool( self.__sendbuf )
def send(self, sock):
"fiber hook to send request data. "
assert self.hasdata(), "no data"
bytecnt = sock.send( self.__sendbuf )
self.__sendbuf = self.__sendbuf[ bytecnt: ]
def __parse_head(self, chunk):
eol = chunk.find( '\n' ) + 1
assert eol
line = chunk[ :eol ]
mainlog.note("%s: Server responds %r",self, line.strip())
fields = line.split()
assert (2 <= len( fields )) \
and fields[ 0 ].startswith( 'HTTP/' ) \
and fields[ 1 ].isdigit(), 'invalid header line: %r' % line
self.__status = int( fields[ 1 ] )
self.__message = ' '.join( fields[ 2: ] )
self.__args = {}
mainlog.info("%s: finished parse_head (%s, %s)",self, self.__status, self.__message)
self.__parse = HttpProtocol.__parse_args
return eol
def __parse_args(self, chunk):
eol = chunk.find( '\n' ) + 1
assert eol
line = chunk[ :eol ]
if ':' in line:
mainlog.debug('> '+ line.rstrip())
key, value = line.split( ':', 1 )
if key.lower() in HTTP.Header_Map:
key = HTTP.Header_Map[key.lower()]
else:
mainlog.warn("Warning: %r not a known HTTP (response) header (%r)",
key,value.strip())
key = key.title() # XXX: bad? :)
if key in self.__args:
self.__args[ key ] += '\r\n' + key + ': ' + value.strip()
else:
self.__args[ key ] = value.strip()
elif line in ( '\r\n', '\n' ):
mainlog.note("%s: finished parsing args", self)
self.__parse = None
else:
mainlog.err('Error: ignored server response header line: '+ line)
return eol
def recv(self, sock):
""""
Process server response until headers are fully parsed, then
prepare response handler.
"""
assert not self.hasdata(), "has data"
chunk = sock.recv( Params.MAXCHUNK, socket.MSG_PEEK )
mainlog.info("%s: recv'd chunk (%i)",self, len(chunk))
assert chunk, 'server closed connection before sending '\
'a complete message header, '\
'parser: %r, data: %r' % (self.__parse, self.__recvbuf)
self.__recvbuf += chunk
while self.__parse:
bytecnt = self.__parse(self, self.__recvbuf )
assert bytecnt
# sock.recv( len( chunk ) )
# return
self.__recvbuf = self.__recvbuf[ bytecnt: ]
sock.recv( len( chunk ) - len( self.__recvbuf ) )
# Server response header was parsed
self.chunked = self.__args.pop( 'Transfer-Encoding', None )
# XXX: transfer-encoding, chunking.. to client too?
# Check wether to step back now
if self.prepare_nocache_response():
self.data.descriptor = None
return
# Process and update headers before deferring to response class
# 2xx
if self.__status in ( HTTP.OK, ):
mainlog.info("%s: Caching new download. ", self)
self.data.finish_request()
# self.recv_entity()
self.set_dataresponse();
elif self.__status in ( HTTP.MULTIPLE_CHOICES, ):
assert False, HTTP.MULTIPLE_CHOICES
elif self.__status == HTTP.PARTIAL_CONTENT \
and self.cache.partial:
mainlog.debug("Updating partial download. ")
self.__args = self.data.prepare_response()
startpos, endpos = HTTP.parse_content_range(self.__args['Content-Range'])
assert endpos == '*' or endpos == self.data.descriptor.size, \
"Expected server to continue to end of resource."
if self.__args['ETag']:
assert self.__args['ETag'].strip('"') == self.data.descriptor.etag, (
self.__args['ETag'], self.data.descriptor.etag )
self.recv_part()
self.set_dataresponse();
# 3xx: redirects
elif self.__status in (HTTP.FOUND,
HTTP.MOVED_PERMANENTLY,
HTTP.TEMPORARY_REDIRECT):
self.data.finish_request()
# XXX:
#location = self.__args.pop( 'Location', None )
# self.descriptor.move( self.cache.path, self.__args )
# self.cache.remove_partial()
self.Response = Response.BlindResponse
elif self.__status == HTTP.NOT_MODIFIED:
assert self.cache.full, "XXX sanity"
mainlog.info("Reading complete file from cache at %s" %
self.cache.path)
self.data.finish_request()
self.Response = Response.DataResponse
# 4xx: client error
elif self.__status in ( HTTP.FORBIDDEN, HTTP.METHOD_NOT_ALLOWED ):
if self.data:
self.data.set_broken( self.__status )
self.Response = Response.BlindResponse
elif self.__status in ( HTTP.NOT_FOUND, HTTP.GONE ):
self.Response = Response.BlindResponse
#if self.descriptor:
# self.descriptor.update( self.__args )
elif self.__status in ( HTTP.REQUEST_RANGE_NOT_STATISFIABLE, ):
if self.cache.partial:
mainlog.warn("Warning: Cache corrupted?: %s", self.url)
self.cache.remove_partial()
elif self.cache.full:
self.cache.remove_full()
# XXX
# if self.descriptor:
# self.descriptor.drop()
# log("Dropped descriptor: %s" % self.url)
self.Response = Response.BlindResponse
else:
mainlog.warn("Warning: unhandled: %s, %s", self.__status, self.url)
self.Response = Response.BlindResponse
# def recv_entity(self):
# """
# Prepare to receive new entity.
# """
# if self.cache.full:
# log("HttpProtocol.recv_entity: overwriting cache: %s" %
# self.url, Params.LOG_NOTE)
# self.cache.remove_full()
# self.cache.open_new()
# else:
# log("HttpProtocol.recv_entity: new cache: %s" %
# self.url, Params.LOG_NOTE)
# self.cache.open_new()
# self.cache.stat()
# assert self.cache.partial
def recv_part(self):
"""
Prepare to receive partial entity.
"""
byterange = self.__args.pop( 'Content-Range', 'none specified' )
assert byterange.startswith( 'bytes ' ), \
'unhandled content-range type: %s' % byterange
byterange, size = byterange[ 6: ].split( '/' )
beg, end = byterange.split( '-' )
self.size = int( size )
# Sanity check
assert self.size == int( end ) + 1, \
"Complete range %r should match entity size of %s"%(end, self.size)
self.cache.open_partial( int( beg ) )
assert self.cache.partial, "Missing cache but receiving partial entity. "
def set_dataresponse(self):
mediatype = self.data.descriptor.mediatype
if Runtime.PROXY_INJECT and mediatype and 'html' in mediatype:
mainlog.note("XXX: Rewriting HTML resource: "+self.url)
self.rewrite = True
#te = self.__args.get( 'Transfer-Encoding', None )
if self.chunked:#te == 'chunked':
mainlog.info("%s: Chunked response", self)
self.Response = Response.ChunkedDataResponse
else:
self.Response = Response.DataResponse
def recvbuf(self):
return self.print_message()
def print_message(self, args=None):
if not args:
args = self.__args
return '\r\n'.join(
[ '%s %i %s' % (
self.request.envelope[2],
self.__status,
self.__message ) ] +
map( ': '.join, args.items() ) + [ '', '' ] )
def responsebuf(self):
return self.print_message(self.__args)
def args(self):
try:
return self.__args.copy()
except AttributeError, e:
return {}
#return hasattr(self, '__args') and self.__args.copy() or {}
def socket(self):
return self.__socket
def __str__(self):
return "[HttpProtocol %s]" % hex(id(self))
"""
class FtpProtocol( CachingProtocol ):
Response = None
def __init__(self,request):
super(FtpProtocol, self).__init__( request )
if Runtime.STATIC and self.cache.full:
self.__socket = None
log("Static FTP cache : %s" % self.url)
self.cache.open_full()
self.Response = Response.DataResponse
return
self.__socket = connect(request.hostinfo)
self.__path = request.envelope[1]
self.__sendbuf = ''
self.__recvbuf = ''
self.__handle = FtpProtocol.__handle_serviceready
def socket(self):
return self.__socket
def hasdata(self):
return self.__sendbuf != ''
def send(self, sock):
assert self.hasdata()
bytecnt = sock.send( self.__sendbuf )
self.__sendbuf = self.__sendbuf[ bytecnt: ]
def recv(self, sock):
assert not self.hasdata()
chunk = sock.recv( Params.MAXCHUNK )
assert chunk, 'server closed connection prematurely'
self.__recvbuf += chunk
while '\n' in self.__recvbuf:
reply, self.__recvbuf = self.__recvbuf.split( '\n', 1 )
log('S: %s' % reply.rstrip(), 2)
if reply[ :3 ].isdigit() and reply[ 3 ] != '-':
self.__handle(self, int( reply[ :3 ] ), reply[ 4: ] )
log('C: %s' % self.__sendbuf.rstrip(), 2)
def __handle_serviceready(self, code, line):
assert code == 220, \
'server sends %i; expected 220 (service ready)' % code
self.__sendbuf = 'USER anonymous\r\n'
self.__handle = FtpProtocol.__handle_password
def __handle_password(self, code, line):
assert code == 331, \
'server sends %i; expected 331 (need password)' % code
self.__sendbuf = 'PASS anonymous@\r\n'
self.__handle = FtpProtocol.__handle_loggedin
def __handle_loggedin(self, code, line):
assert code == 230, \
'server sends %i; expected 230 (user logged in)' % code
self.__sendbuf = 'TYPE I\r\n'
self.__handle = FtpProtocol.__handle_binarymode
def __handle_binarymode(self, code, line):
assert code == 200,\
'server sends %i; expected 200 (binary mode ok)' % code
self.__sendbuf = 'PASV\r\n'
self.__handle = FtpProtocol.__handle_passivemode
def __handle_passivemode(self, code, line):
assert code == 227, \
'server sends %i; expected 227 (passive mode)' % code
channel = eval( line.strip('.').split()[ -1 ] )
addr = '%i.%i.%i.%i' % channel[ :4 ], channel[ 4 ] * 256 + channel[ 5 ]
self.__socket = connect( addr )
self.__sendbuf = 'SIZE %s\r\n' % self.__path
self.__handle = FtpProtocol.__handle_size
def __handle_size(self, code, line):
if code == 550:
self.Response = Response.NotFoundResponse
return
assert code == 213,\
'server sends %i; expected 213 (file status)' % code
self.size = int( line )
log('File size: %s' % self.size)
self.__sendbuf = 'MDTM %s\r\n' % self.__path
self.__handle = FtpProtocol.__handle_mtime
def __handle_mtime(self, code, line):
if code == 550:
self.Response = Response.NotFoundResponse
return
assert code == 213, \
'server sends %i; expected 213 (file status)' % code
self.mtime = calendar.timegm( time.strptime(
line.rstrip(), '%Y%m%d%H%M%S' ) )
log('Modification time: %s' % time.strftime(
Params.TIMEFMT, time.gmtime( self.mtime ) ))
stat = self.cache.partial
if stat and stat.st_mtime == self.mtime:
self.__sendbuf = 'REST %i\r\n' % stat.st_size
self.__handle = FtpProtocol.__handle_resume
else:
stat = self.cache.full
if stat and stat.st_mtime == self.mtime:
log("Unmodified FTP cache : %s" % self.url)
self.cache.open_full()
self.Response = Response.DataResponse
else:
self.cache.open_new()
self.__sendbuf = 'RETR %s\r\n' % self.__path
self.__handle = FtpProtocol.__handle_data
def __handle_resume(self, code, line):
assert code == 350, 'server sends %i; ' \
'expected 350 (pending further information)' % code
self.cache.open_partial()
self.__sendbuf = 'RETR %s\r\n' % self.__path
self.__handle = FtpProtocol.__handle_data
def __handle_data(self, code, line):
if code == 550:
self.Response = Response.NotFoundResponse
return
assert code == 150, \
'server sends %i; expected 150 (file ok)' % code
self.Response = Response.DataResponse
"""
class ProxyProtocol:
"""
"""
Response = Response.ProxyResponse
data = None
def __init__(self,request):
method, reqname, proto = request.envelope
assert reqname.startswith('/'), reqname
self.reqname = reqname[1:]
self.status = HTTP.OK
if method is not 'GET':
self.status = HTTP.METHOD_NOT_ALLOWED
if self.reqname not in Response.ProxyResponse.urlmap.keys():
self.status = HTTP.NOT_FOUND
assert proto in ('', 'HTTP/1.0', 'HTTP/1.1'), proto
def socket(self):
return None
def recvbuf(self):
return ''
def hasdata(self):
return True
def send(self, sock):
bytecnt = sock.send( self.__sendbuf )
self.__sendbuf = self.__sendbuf[ bytecnt: ]
if not self.__sendbuf:
self.Response = Response.BlindResponse
def done(self):
pass
def has_response(self):
return False
|
gpl-3.0
| -626,849,070,475,962,400
| 26.827795
| 87
| 0.660026
| false
| 3.096134
| false
| false
| false
|
TakashiMatsuda/sag_svm
|
scaling.py
|
1
|
1287
|
#!/Users/takashi/.pyenv/shims/python
import numpy as np
import math
def scaling(data):
"""
Scaling. Make x's average to 0, variance to 1
=> CHANGED. Divide by normal deviation
"""
print("input:")
print(data)
scaled_data = np.zeros_like(data)
"""
average section
"""
sumlist = np.sum(data, axis=0)
avglist = np.array([d / len(data) for d in sumlist])
print("avglist:")
print(avglist)
for i, x in enumerate(data):
scaled_data[i] = np.array([x[j] - avglist[j] for j in range(len(x))])
"""
variance section
"""
vrlist = np.var(scaled_data, axis=0)
print("average=0 data:")
print(scaled_data)
return np.divide(scaled_data, vrlist)
"""
vr = (math.sqrt(np.sum(np.square(scaled_data)))) / len(data)
scaled_data = np.array([x / vr for x in scaled_data])
"""
# print(scaled_data)
# return scaled_data
def test_scaling():
"""
TODO: More Precise Test is necessary
"""
data = [[(i+1) * (j+1) for i in range(5)] for j in range(2)]
res = scaling(data)
print("res:")
print(res)
"""
average test
"""
assert np.sum(res, axis=0)[1] == 0
"""
variance test
"""
assert np.var(res, axis=0)[1] == 1
|
mit
| -6,234,009,568,127,102,000
| 22.4
| 77
| 0.554002
| false
| 3.177778
| false
| false
| false
|
andyneff/voxel-globe
|
voxel_globe/build_voxel_world/tasks.py
|
1
|
6207
|
from voxel_globe.common_tasks import shared_task, VipTask
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
import logging
import os
@shared_task(base=VipTask, bind=True)
def run_build_voxel_model(self, image_collection_id, scene_id, bbox,
skip_frames, cleanup=True, history=None):
from distutils.dir_util import remove_tree
from shutil import move
import random
from vsi.tools.redirect import Redirect, Logger as LoggerWrapper
from voxel_globe.meta import models
from voxel_globe.tools.camera import get_krt
import voxel_globe.tools
from boxm2_scene_adaptor import boxm2_scene_adaptor
from vil_adaptor import load_image
from vpgl_adaptor import load_perspective_camera
from voxel_globe.tools.wget import download as wget
from vsi.vxl.create_scene_xml import create_scene_xml
from vsi.tools.dir_util import copytree, mkdtemp
with Redirect(stdout_c=LoggerWrapper(logger, lvl=logging.INFO),
stderr_c=LoggerWrapper(logger, lvl=logging.WARNING)):
openclDevice = os.environ['VIP_OPENCL_DEVICE']
opencl_memory = os.environ.get('VIP_OPENCL_MEMORY', None)
scene = models.Scene.objects.get(id=scene_id)
imageCollection = models.ImageCollection.objects.get(\
id=image_collection_id).history(history);
imageList = imageCollection.images.all();
with voxel_globe.tools.task_dir('voxel_world') as processing_dir:
logger.warning(bbox)
if bbox['geolocated']:
create_scene_xml(openclDevice, 3, float(bbox['voxel_size']),
lla1=(float(bbox['x_min']), float(bbox['y_min']),
float(bbox['z_min'])),
lla2=(float(bbox['x_max']), float(bbox['y_max']),
float(bbox['z_max'])),
origin=scene.origin, model_dir='.', number_bins=1,
output_file=open(os.path.join(processing_dir, 'scene.xml'), 'w'),
n_bytes_gpu=opencl_memory)
else:
create_scene_xml(openclDevice, 3, float(bbox['voxel_size']),
lvcs1=(float(bbox['x_min']), float(bbox['y_min']),
float(bbox['z_min'])),
lvcs2=(float(bbox['x_max']), float(bbox['y_max']),
float(bbox['z_max'])),
origin=scene.origin, model_dir='.', number_bins=1,
output_file=open(os.path.join(processing_dir, 'scene.xml'), 'w'),
n_bytes_gpu=opencl_memory)
counter = 1;
imageNames = []
cameraNames = []
os.mkdir(os.path.join(processing_dir, 'local'))
#Prepping
for image in imageList:
self.update_state(state='INITIALIZE', meta={'stage':'image fetch',
'i':counter,
'total':len(imageList)})
image = image.history(history)
(K,R,T,o) = get_krt(image.history(history), history=history)
krtName = os.path.join(processing_dir, 'local', 'frame_%05d.krt' % counter)
with open(krtName, 'w') as fid:
print >>fid, (("%0.18f "*3+"\n")*3) % (K[0,0], K[0,1], K[0,2],
K[1,0], K[1,1], K[1,2], K[2,0], K[2,1], K[2,2]);
print >>fid, (("%0.18f "*3+"\n")*3) % (R[0,0], R[0,1], R[0,2],
R[1,0], R[1,1], R[1,2], R[2,0], R[2,1], R[2,2]);
print >>fid, ("%0.18f "*3+"\n") % (T[0,0], T[1,0], T[2,0]);
imageName = image.originalImageUrl;
extension = os.path.splitext(imageName)[1]
localName = os.path.join(processing_dir, 'local',
'frame_%05d%s' % (counter, extension));
wget(imageName, localName, secret=True)
counter += 1;
imageNames.append(localName)
cameraNames.append(krtName)
variance = 0.06
vxl_scene = boxm2_scene_adaptor(os.path.join(processing_dir, "scene.xml"),
openclDevice);
current_level = 0;
loaded_imgs = [];
loaded_cams = [];
for i in range(0, len(imageNames), skip_frames):
logger.debug("i: %d img name: %s cam name: %s", i, imageNames[i],
cameraNames[i])
self.update_state(state='PRELOADING', meta={'stage':'image load',
'i':i,
'total':len(imageNames)})
img, ni, nj = load_image(imageNames[i])
loaded_imgs.append(img)
pcam = load_perspective_camera(cameraNames[i])
loaded_cams.append(pcam)
refine_cnt = 5;
for rfk in range(0, refine_cnt, 1):
pair = zip(loaded_imgs, loaded_cams)
random.shuffle(pair)
for idx, (img, cam) in enumerate(pair):
self.update_state(state='PROCESSING', meta={'stage':'update',
'i':rfk+1, 'total':refine_cnt, 'image':idx+1,
'images':len(loaded_imgs)})
logger.debug("refine_cnt: %d, idx: %d", rfk, idx)
vxl_scene.update(cam,img,True,True,None,openclDevice[0:3],variance,
tnear = 1000.0, tfar = 100000.0);
logger.debug("writing cache: %d", rfk)
vxl_scene.write_cache();
logger.debug("wrote cache: %d", rfk)
if rfk < refine_cnt-1:
self.update_state(state='PROCESSING', meta={'stage':'refine',
'i':rfk,
'total':refine_cnt})
logger.debug("refining %d...", rfk)
refine_device = openclDevice[0:3]
if refine_device == 'cpu':
refine_device = 'cpp'
vxl_scene.refine(0.3, refine_device);
vxl_scene.write_cache();
voxel_world_dir = mkdtemp(dir=os.environ['VIP_STORAGE_DIR'])
copytree(processing_dir, voxel_world_dir, ignore=lambda x,y:['images'])
models.VoxelWorld.create(
name='%s world (%s)' % (imageCollection.name, self.request.id),
origin=scene.origin,
directory=voxel_world_dir,
service_id=self.request.id).save();
|
mit
| 2,948,493,370,364,752,000
| 38.535032
| 83
| 0.544546
| false
| 3.401096
| false
| false
| false
|
sunlightlabs/tcamp
|
tcamp/reg/forms.py
|
1
|
1825
|
from django import forms
from localflavor.us.us_states import STATE_CHOICES
from bootstrap_toolkit.widgets import BootstrapTextInput
import datetime
from reg.models import Sale, Ticket, AMBASSADOR_PROGRAM_CHOICES
class SaleForm(forms.ModelForm):
class Meta:
model = Sale
class TicketForm(forms.ModelForm):
#ambassador_program = forms.ChoiceField(initial="no", widget=forms.RadioSelect, choices=AMBASSADOR_PROGRAM_CHOICES, label="Would you like to be part of the TCamp Ambassador Program?")
class Meta:
model = Ticket
exclude = ['event', 'sale', 'success', 'checked_in', 'lobby_day', 'ambassador_program']
widgets = {
'twitter': BootstrapTextInput(attrs={'placeholder': "e.g., \"tcampdc\""}),
}
_current_year = datetime.datetime.now().year
class PaymentForm(forms.Form):
first_name = forms.CharField(max_length=255)
last_name = forms.CharField(max_length=255)
email = forms.EmailField()
address1 = forms.CharField(max_length=1024, label="Address Line 1")
address2 = forms.CharField(max_length=1024, label="Address Line 2", required=False)
city = forms.CharField(max_length=255)
state = forms.CharField(max_length=255, widget=forms.Select(choices=STATE_CHOICES + (('non-us', 'Outside the USA'),)))
zip = forms.CharField(max_length=255, label="Zip/Postal Code")
exp_month = forms.ChoiceField(initial="01", label="Expiration", choices=(("01","01"),("02","02"),("03","03"),("04","04"),("05","05"),("06","06"),("07","07"),("08","08"),("09","09"),("10","10"),("11","11"),("12","12")))
exp_year = forms.ChoiceField(initial="2014", label="Year", choices=tuple([2*(str(_current_year + i),) for i in xrange(11)]))
# will be encrypted
number = forms.CharField(max_length=4096)
cvv = forms.CharField(max_length=4096)
|
bsd-3-clause
| 5,021,559,516,469,790,000
| 48.351351
| 222
| 0.673973
| false
| 3.430451
| false
| false
| false
|
ewheeler/vaxtrack
|
vaxapp/migrations/0018_auto__add_field_countrystockstats_days_of_stock_data.py
|
1
|
12586
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'CountryStockStats.days_of_stock_data'
db.add_column('vaxapp_countrystockstats', 'days_of_stock_data', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='days_of_stock_data', null=True, to=orm['vaxapp.Dicty']), keep_default=False)
def backwards(self, orm):
# Deleting field 'CountryStockStats.days_of_stock_data'
db.delete_column('vaxapp_countrystockstats', 'days_of_stock_data_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'vaxapp.alert': {
'Meta': {'object_name': 'Alert'},
'analyzed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'countrystock': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.CountryStock']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reference_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'risk': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '2', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '2', 'null': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '2', 'null': 'True', 'blank': 'True'})
},
'vaxapp.country': {
'Meta': {'object_name': 'Country'},
'iso2_code': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso3_code': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'numerical_code': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'vaxapp.countrystock': {
'Meta': {'object_name': 'CountryStock'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.Country']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'md5_hash': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'vaccine': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.Vaccine']"})
},
'vaxapp.countrystockstats': {
'Meta': {'object_name': 'CountryStockStats'},
'actual_cons_rate': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'actual_cons_rate'", 'null': 'True', 'to': "orm['vaxapp.Dicty']"}),
'analyzed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'annual_demand': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'annual_demand'", 'null': 'True', 'to': "orm['vaxapp.Dicty']"}),
'consumed_in_year': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'consumed_in_year'", 'null': 'True', 'to': "orm['vaxapp.Dicty']"}),
'countrystock': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.CountryStock']"}),
'days_of_stock': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'days_of_stock_data': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'days_of_stock_data'", 'null': 'True', 'to': "orm['vaxapp.Dicty']"}),
'demand_for_period': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'doses_delivered_this_year': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'doses_on_orders': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'est_daily_cons': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nine_by_year': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'nine_by_year'", 'null': 'True', 'to': "orm['vaxapp.Dicty']"}),
'percent_coverage': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'reference_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'three_by_year': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'three_by_year'", 'null': 'True', 'to': "orm['vaxapp.Dicty']"})
},
'vaxapp.dicty': {
'Meta': {'object_name': 'Dicty'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '160'})
},
'vaxapp.document': {
'Meta': {'object_name': 'Document'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.utcnow'}),
'date_exception': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_process_end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_process_start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_queued': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_stored': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_uploaded': ('django.db.models.fields.DateTimeField', [], {}),
'exception': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'local_document': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'remote_document': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'U'", 'max_length': '1'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36'})
},
'vaxapp.keyval': {
'Meta': {'object_name': 'KeyVal'},
'dicty': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.Dicty']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '160'}),
'val': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'})
},
'vaxapp.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.Country']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'vaxapp.vaccine': {
'Meta': {'object_name': 'Vaccine'},
'abbr_en': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'abbr_en_alt': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'abbr_fr': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'abbr_fr_alt': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vaxapp.VaccineGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'})
},
'vaxapp.vaccinegroup': {
'Meta': {'object_name': 'VaccineGroup'},
'abbr_en': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'abbr_fr': ('django.db.models.fields.CharField', [], {'max_length': '160', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['vaxapp']
|
bsd-3-clause
| -6,147,009,584,548,764,000
| 78.658228
| 228
| 0.546719
| false
| 3.49514
| false
| false
| false
|
georgekis/salary
|
main/control/pay.py
|
1
|
2523
|
from flask.ext import wtf
import auth
import flask
import model
import wtforms
import util
from main import app
###############################################################################
# Create
###############################################################################
class PayUpdateForm(wtf.Form):
name = wtforms.StringField('Name', [wtforms.validators.required()])
date_for = wtforms.DateField('Date For', [wtforms.validators.required()])
date_paid = wtforms.DateField('Date Paid', [wtforms.validators.required()])
code = wtforms.StringField('Code', [wtforms.validators.required()])
amount = wtforms.FloatField('Amount', [wtforms.validators.required()])
add_more = wtforms.BooleanField('Add more', [wtforms.validators.optional()], default=True)
@app.route('/pay/<int:pay_id>/', methods=['GET', 'POST'])
@app.route('/pay/create/', methods=['GET', 'POST'])
@auth.login_required
def pay_update(pay_id=0):
if pay_id:
pay_db = model.Pay.get_by_id(pay_id, parent=auth.current_user_key())
else:
pay_db = model.Pay(parent=auth.current_user_key())
if not pay_db:
flask.abort(404)
form = PayUpdateForm(obj=pay_db)
if form.validate_on_submit():
form.populate_obj(pay_db)
pay_db.put()
if form.add_more.data:
return flask.redirect(flask.url_for('pay_update'))
return flask.redirect(flask.url_for('pay_list'))
return flask.render_template(
'pay/pay_update.html',
html_class='pay-update',
title=pay_db.name or 'Create Pay',
form=form,
pay_db=pay_db,
)
###############################################################################
# List
###############################################################################
@app.route('/pay/')
@auth.login_required
def pay_list():
pay_dbs, pay_cursor = auth.current_user_db().get_pay_dbs()
return flask.render_template(
'pay/pay_list.html',
html_class='pay-list',
title='Pay List',
pay_dbs=pay_dbs,
next_url=util.generate_next_url(pay_cursor),
)
###############################################################################
# Admin Pay List
###############################################################################
@app.route('/admin/pay/')
@auth.admin_required
def admin_pay_list():
pay_dbs, pay_cursor = model.Pay.get_dbs()
return flask.render_template(
'admin/pay_list.html',
html_class='admin-pay-list',
title='Pay List',
pay_dbs=pay_dbs,
next_url=util.generate_next_url(pay_cursor),
)
|
mit
| 1,797,666,842,330,151,400
| 30.148148
| 92
| 0.537852
| false
| 3.776946
| false
| false
| false
|
goujonpa/jeankevin
|
modules/numberCoupleClass.py
|
1
|
2707
|
#!/usr/local/bin/python
# -*-coding:Utf-8 -*
from modules.individualClass import Individual
import random
class NumberCouple(Individual):
"""NumberCouple class: represent one couple of real individual, inherits from the Individual class
Properties:
key : standardized representation of the problem [[x1, 'real'][x2, 'real']]
fitness : = 1/1+f(x) with f(x) = 100(x2 - x1^2)^2 + (x1 - 1)^2
+ every property from the Individual class
Methods:
__init__()
get_binary_standard()
get_real_standard()
get_binary_unstandardized()
get_real_unstandardized
_calcul_fitness()
_random_initialisation()
+ every method from the Individual Class
"""
def __init__(self, key=None):
"""Class constuctor"""
super(NumberCouple, self).__init__(key)
def _random_initialisation(self):
"""Randomly initialises an individual, Returns a random key"""
key = list()
for i in range(0, 2):
x = random.uniform(-2.048, 2.048)
key.append((x, 'real'))
return key
def _calcul_fitness(self):
"""Calculates the individuals fitness"""
x1, x2 = self._key
x1 = x1[0]
x2 = x2[0]
functionResult = 100 * pow((x2 - pow(x1, 2)), 2) + pow((x1 - 1), 2)
fitness = 1.0 / (1 + functionResult)
return fitness
def get_binary_standard(self):
"""Returns the standardised representation of the key for binary manipulations"""
x1, x2 = self.key
x1 = 1000 * x1[0]
x2 = 1000 * x2[0]
result = list()
result.append((self._binarize(x1, 12), 15, 3, 14))
result.append((self._binarize(x2, 12), 15, 3, 14))
return result
def get_real_standard(self):
"""Returns the standardised representation of the key for real manipulations"""
x1, x2 = self.key
x1 = 1000 * x1[0]
x2 = 1000 * x2[0]
result = list()
result.append((self._realize(x1, 12), 13, 9, 12))
result.append((self._realize(x2, 12), 13, 9, 12))
return result
@staticmethod
def get_binary_unstandardized(l):
"""Returns the unstandardisation of a standardised binary representation of the key"""
key = list()
for element in l:
a = int(element, 2)
a = a / 1000.0
key.append((a, 'real'))
return key
@staticmethod
def get_real_unstandardized(l):
"""Returns the unstandardisation of a real binary representation of the key"""
key = list()
for element in l:
a = int(element)
a = a / 1000.0
key.append((a, 'real'))
return key
|
mit
| -5,521,277,182,482,253,000
| 30.847059
| 102
| 0.574437
| false
| 3.57124
| false
| false
| false
|
makerhanoi/tagio
|
tagio/views/api/__init__.py
|
1
|
1249
|
"""API."""
from flask import Blueprint, jsonify, request
from tagio.models.user import User
from tagio.extensions import csrf_protect
from . import user
__all__ = ('user',)
blueprint = Blueprint('api',
__name__,
url_prefix='/api/v<string:version>')
@blueprint.route('/login', methods=['POST'])
@csrf_protect.exempt
def login(version):
"""Login.
login to retrieve token.
"""
if version == '1':
return _login_first_version()
return jsonify({'code': 1, 'msg': 'Invalid version'})
def _login_first_version():
username = request.form.get('username')
password = request.form.get('password')
if username is None or password is None:
return jsonify({'code': 2, 'msg': 'Invalid parameter'})
username = username.strip().lower()
obj = User.query.filter(User.username == username).first()
if obj is None:
return jsonify({'code': 2, 'msg': 'Invalid parameter'})
flag = obj.check_password(password)
if not flag:
return jsonify({'code': 2, 'msg': 'Invalid parameter'})
if not obj.active:
return jsonify({'code': 2, 'msg': 'Invalid parameter'})
return jsonify({'code': 0, 'token': obj.get_auth_token()})
|
bsd-3-clause
| 8,983,722,349,411,378,000
| 23.98
| 63
| 0.610088
| false
| 3.878882
| false
| false
| false
|
kushankr/approval_frame
|
approval_frame/urls.py
|
1
|
1127
|
from django.conf.urls import include, patterns, url
from django.contrib import admin
from approval_frame import views
from views import CustomRegistrationView
# autodiscover is required only for older versions of Django
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^approval_polls/', include('approval_polls.urls', namespace="approval_polls")),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/register/$', CustomRegistrationView.as_view(),
name='registration_register'),
url(r'^accounts/', include('registration.backends.default.urls')),
url(r'^accounts/username/change/$', views.changeUsername, name="username_change"),
url(r'^accounts/username/change/done/$', views.changeUsernameDone, name="username_change_done"),
url(r'^accounts/password/change/$', 'django.contrib.auth.views.password_change',
{'post_change_redirect': '/accounts/password_change/done/'}, name="password_change"),
url(r'^accounts/password/change/done/$', 'django.contrib.auth.views.password_change_done'),
url('', include('social.apps.django_app.urls', namespace='social'))
)
|
gpl-3.0
| -25,690,787,189,746,730
| 48
| 100
| 0.723159
| false
| 3.872852
| false
| false
| false
|
beiko-lab/gengis
|
bin/Lib/site-packages/numpy/lib/arraysetops.py
|
1
|
12374
|
"""
Set operations for 1D numeric arrays based on sorting.
:Contains:
ediff1d,
unique,
intersect1d,
setxor1d,
in1d,
union1d,
setdiff1d
:Notes:
For floating point arrays, inaccurate results may appear due to usual round-off
and floating point comparison issues.
Speed could be gained in some operations by an implementation of
sort(), that can provide directly the permutation vectors, avoiding
thus calls to argsort().
To do: Optionally return indices analogously to unique for all functions.
:Author: Robert Cimrman
"""
__all__ = ['ediff1d', 'intersect1d', 'setxor1d', 'union1d', 'setdiff1d',
'unique', 'in1d']
import numpy as np
from numpy.lib.utils import deprecate
def ediff1d(ary, to_end=None, to_begin=None):
"""
The differences between consecutive elements of an array.
Parameters
----------
ary : array_like
If necessary, will be flattened before the differences are taken.
to_end : array_like, optional
Number(s) to append at the end of the returned differences.
to_begin : array_like, optional
Number(s) to prepend at the beginning of the returned differences.
Returns
-------
ediff1d : ndarray
The differences. Loosely, this is ``ary.flat[1:] - ary.flat[:-1]``.
See Also
--------
diff, gradient
Notes
-----
When applied to masked arrays, this function drops the mask information
if the `to_begin` and/or `to_end` parameters are used.
Examples
--------
>>> x = np.array([1, 2, 4, 7, 0])
>>> np.ediff1d(x)
array([ 1, 2, 3, -7])
>>> np.ediff1d(x, to_begin=-99, to_end=np.array([88, 99]))
array([-99, 1, 2, 3, -7, 88, 99])
The returned array is always 1D.
>>> y = [[1, 2, 4], [1, 6, 24]]
>>> np.ediff1d(y)
array([ 1, 2, -3, 5, 18])
"""
ary = np.asanyarray(ary).flat
ed = ary[1:] - ary[:-1]
arrays = [ed]
if to_begin is not None:
arrays.insert(0, to_begin)
if to_end is not None:
arrays.append(to_end)
if len(arrays) != 1:
# We'll save ourselves a copy of a potentially large array in
# the common case where neither to_begin or to_end was given.
ed = np.hstack(arrays)
return ed
def unique(ar, return_index=False, return_inverse=False):
"""
Find the unique elements of an array.
Returns the sorted unique elements of an array. There are two optional
outputs in addition to the unique elements: the indices of the input array
that give the unique values, and the indices of the unique array that
reconstruct the input array.
Parameters
----------
ar : array_like
Input array. This will be flattened if it is not already 1-D.
return_index : bool, optional
If True, also return the indices of `ar` that result in the unique
array.
return_inverse : bool, optional
If True, also return the indices of the unique array that can be used
to reconstruct `ar`.
Returns
-------
unique : ndarray
The sorted unique values.
unique_indices : ndarray, optional
The indices of the first occurrences of the unique values in the
(flattened) original array. Only provided if `return_index` is True.
unique_inverse : ndarray, optional
The indices to reconstruct the (flattened) original array from the
unique array. Only provided if `return_inverse` is True.
See Also
--------
numpy.lib.arraysetops : Module with a number of other functions for
performing set operations on arrays.
Examples
--------
>>> np.unique([1, 1, 2, 2, 3, 3])
array([1, 2, 3])
>>> a = np.array([[1, 1], [2, 3]])
>>> np.unique(a)
array([1, 2, 3])
Return the indices of the original array that give the unique values:
>>> a = np.array(['a', 'b', 'b', 'c', 'a'])
>>> u, indices = np.unique(a, return_index=True)
>>> u
array(['a', 'b', 'c'],
dtype='|S1')
>>> indices
array([0, 1, 3])
>>> a[indices]
array(['a', 'b', 'c'],
dtype='|S1')
Reconstruct the input array from the unique values:
>>> a = np.array([1, 2, 6, 4, 2, 3, 2])
>>> u, indices = np.unique(a, return_inverse=True)
>>> u
array([1, 2, 3, 4, 6])
>>> indices
array([0, 1, 4, 3, 1, 2, 1])
>>> u[indices]
array([1, 2, 6, 4, 2, 3, 2])
"""
try:
ar = ar.flatten()
except AttributeError:
if not return_inverse and not return_index:
items = sorted(set(ar))
return np.asarray(items)
else:
ar = np.asanyarray(ar).flatten()
if ar.size == 0:
if return_inverse and return_index:
return ar, np.empty(0, np.bool), np.empty(0, np.bool)
elif return_inverse or return_index:
return ar, np.empty(0, np.bool)
else:
return ar
if return_inverse or return_index:
if return_index:
perm = ar.argsort(kind='mergesort')
else:
perm = ar.argsort()
aux = ar[perm]
flag = np.concatenate(([True], aux[1:] != aux[:-1]))
if return_inverse:
iflag = np.cumsum(flag) - 1
iperm = perm.argsort()
if return_index:
return aux[flag], perm[flag], iflag[iperm]
else:
return aux[flag], iflag[iperm]
else:
return aux[flag], perm[flag]
else:
ar.sort()
flag = np.concatenate(([True], ar[1:] != ar[:-1]))
return ar[flag]
def intersect1d(ar1, ar2, assume_unique=False):
"""
Find the intersection of two arrays.
Return the sorted, unique values that are in both of the input arrays.
Parameters
----------
ar1, ar2 : array_like
Input arrays.
assume_unique : bool
If True, the input arrays are both assumed to be unique, which
can speed up the calculation. Default is False.
Returns
-------
intersect1d : ndarray
Sorted 1D array of common and unique elements.
See Also
--------
numpy.lib.arraysetops : Module with a number of other functions for
performing set operations on arrays.
Examples
--------
>>> np.intersect1d([1, 3, 4, 3], [3, 1, 2, 1])
array([1, 3])
"""
if not assume_unique:
# Might be faster than unique( intersect1d( ar1, ar2 ) )?
ar1 = unique(ar1)
ar2 = unique(ar2)
aux = np.concatenate( (ar1, ar2) )
aux.sort()
return aux[:-1][aux[1:] == aux[:-1]]
def setxor1d(ar1, ar2, assume_unique=False):
"""
Find the set exclusive-or of two arrays.
Return the sorted, unique values that are in only one (not both) of the
input arrays.
Parameters
----------
ar1, ar2 : array_like
Input arrays.
assume_unique : bool
If True, the input arrays are both assumed to be unique, which
can speed up the calculation. Default is False.
Returns
-------
setxor1d : ndarray
Sorted 1D array of unique values that are in only one of the input
arrays.
Examples
--------
>>> a = np.array([1, 2, 3, 2, 4])
>>> b = np.array([2, 3, 5, 7, 5])
>>> np.setxor1d(a,b)
array([1, 4, 5, 7])
"""
if not assume_unique:
ar1 = unique(ar1)
ar2 = unique(ar2)
aux = np.concatenate( (ar1, ar2) )
if aux.size == 0:
return aux
aux.sort()
# flag = ediff1d( aux, to_end = 1, to_begin = 1 ) == 0
flag = np.concatenate( ([True], aux[1:] != aux[:-1], [True] ) )
# flag2 = ediff1d( flag ) == 0
flag2 = flag[1:] == flag[:-1]
return aux[flag2]
def in1d(ar1, ar2, assume_unique=False):
"""
Test whether each element of a 1-D array is also present in a second array.
Returns a boolean array the same length as `ar1` that is True
where an element of `ar1` is in `ar2` and False otherwise.
Parameters
----------
ar1 : (M,) array_like
Input array.
ar2 : array_like
The values against which to test each value of `ar1`.
assume_unique : bool, optional
If True, the input arrays are both assumed to be unique, which
can speed up the calculation. Default is False.
Returns
-------
in1d : (M,) ndarray, bool
The values `ar1[in1d]` are in `ar2`.
See Also
--------
numpy.lib.arraysetops : Module with a number of other functions for
performing set operations on arrays.
Notes
-----
`in1d` can be considered as an element-wise function version of the
python keyword `in`, for 1-D sequences. ``in1d(a, b)`` is roughly
equivalent to ``np.array([item in b for item in a])``.
.. versionadded:: 1.4.0
Examples
--------
>>> test = np.array([0, 1, 2, 5, 0])
>>> states = [0, 2]
>>> mask = np.in1d(test, states)
>>> mask
array([ True, False, True, False, True], dtype=bool)
>>> test[mask]
array([0, 2, 0])
"""
# Ravel both arrays, behavior for the first array could be different
ar1 = np.asarray(ar1).ravel()
ar2 = np.asarray(ar2).ravel()
# This code is significantly faster when the condition is satisfied.
if len(ar2) < 10 * len(ar1) ** 0.145:
mask = np.zeros(len(ar1), dtype=np.bool)
for a in ar2:
mask |= (ar1 == a)
return mask
# Otherwise use sorting
if not assume_unique:
ar1, rev_idx = np.unique(ar1, return_inverse=True)
ar2 = np.unique(ar2)
ar = np.concatenate( (ar1, ar2) )
# We need this to be a stable sort, so always use 'mergesort'
# here. The values from the first array should always come before
# the values from the second array.
order = ar.argsort(kind='mergesort')
sar = ar[order]
equal_adj = (sar[1:] == sar[:-1])
flag = np.concatenate( (equal_adj, [False] ) )
indx = order.argsort(kind='mergesort')[:len( ar1 )]
if assume_unique:
return flag[indx]
else:
return flag[indx][rev_idx]
def union1d(ar1, ar2):
"""
Find the union of two arrays.
Return the unique, sorted array of values that are in either of the two
input arrays.
Parameters
----------
ar1, ar2 : array_like
Input arrays. They are flattened if they are not already 1D.
Returns
-------
union1d : ndarray
Unique, sorted union of the input arrays.
See Also
--------
numpy.lib.arraysetops : Module with a number of other functions for
performing set operations on arrays.
Examples
--------
>>> np.union1d([-1, 0, 1], [-2, 0, 2])
array([-2, -1, 0, 1, 2])
"""
return unique( np.concatenate( (ar1, ar2) ) )
def setdiff1d(ar1, ar2, assume_unique=False):
"""
Find the set difference of two arrays.
Return the sorted, unique values in `ar1` that are not in `ar2`.
Parameters
----------
ar1 : array_like
Input array.
ar2 : array_like
Input comparison array.
assume_unique : bool
If True, the input arrays are both assumed to be unique, which
can speed up the calculation. Default is False.
Returns
-------
setdiff1d : ndarray
Sorted 1D array of values in `ar1` that are not in `ar2`.
See Also
--------
numpy.lib.arraysetops : Module with a number of other functions for
performing set operations on arrays.
Examples
--------
>>> a = np.array([1, 2, 3, 2, 4, 1])
>>> b = np.array([3, 4, 5, 6])
>>> np.setdiff1d(a, b)
array([1, 2])
"""
if not assume_unique:
ar1 = unique(ar1)
ar2 = unique(ar2)
aux = in1d(ar1, ar2, assume_unique=True)
if aux.size == 0:
return aux
else:
return np.asarray(ar1)[aux == 0]
|
gpl-3.0
| 5,377,771,176,711,761,000
| 26.843823
| 79
| 0.552772
| false
| 3.717032
| false
| false
| false
|
seed2014/kraken
|
kraken-panel/panel/models.py
|
1
|
2573
|
from django.db import models
from django.utils import timezone
from datetime import timedelta
# Create your models here.
class Bot(models.Model):
computer_name = models.CharField(max_length=100)
system = models.CharField(max_length=100)
node = models.CharField(max_length=100)
release = models.CharField(max_length=100)
version = models.CharField(max_length=100)
machine = models.CharField(max_length=100)
processor = models.CharField(max_length=100)
first_checkin = models.DateTimeField('first check-in')
last_checkin = models.DateTimeField('last check-in')
ip = models.CharField(max_length=16)
def __str__(self):
return "%s (%s %s)" % (self.computer_name, self.system, self.release)
def artifact_count(self):
return self.artifact_set.count()
def is_alive(self):
# return str(timezone.now())
return self.last_checkin > timezone.now() - timedelta(hours=3) - timedelta(minutes=5)
# class Hunt(models.Model):
# date_found = models.DateTimeField('date found')
# bot = models.ForeignKey(Bot)
# def __str__(self):
# return "%s found %s matches on %s" % (self.bot.computer_name, self.artifact_set.count(), self.date_found)
class Query(models.Model):
QUERY_TYPES = (('hash', 'Cryptographic hash'), ('ctph', 'Context-triggered piecewise hash'), ('fs-regex', 'Filesystem regular expression'))
type = models.CharField(max_length=50, choices=QUERY_TYPES)
body = models.CharField(max_length=200)
def __str__(self):
return "%s (%s)" % (self.body, self.get_type_display())
class Artifact(models.Model):
data = models.CharField(max_length=200)
original_query = models.ForeignKey(Query)
bot = models.ForeignKey(Bot)
last_spotted = models.DateTimeField('last spotted')
def __str__(self):
return "%s" % (self.data)
def get_query_body(self):
return self.original_query.body
class Command(models.Model):
COMMAND_TYPES = (('regget', 'Retrieve arbitrary registry key'), ('regfind','Locate registry key'), ('ramdump', 'Dump volatile memory'), ('getfile', "Retrieve arbitrary file"), ('getfileenc', "Retrieve arbitrary file (encrypted)"))
RESULTS = ((0, 'Unknown'), (1, 'Success'), (-1, 'Error'))
type = models.CharField(max_length=50, choices=COMMAND_TYPES)
target = models.ForeignKey(Bot)
body = models.CharField(max_length=300)
done = models.BooleanField(default=False)
data = models.TextField(default="", null=True, blank=True)
def __str__(self):
return "%s on %s" % (self.get_type_display(), self.target)
class Config(models.Model):
key = models.CharField(max_length=50)
value = models.CharField(max_length=200)
|
gpl-2.0
| 5,171,206,282,541,833,000
| 31.175
| 231
| 0.710455
| false
| 3.172626
| false
| false
| false
|
ForestClaw/forestclaw
|
applications/geoclaw/tohoku/make_plots.py
|
1
|
11633
|
"""
Set up the plot figures, axes, and items to be done for each frame.
This module is imported by the plotting routines and then the
function setplot is called to set the plot parameters.
"""
#--------------------------
def setplot(plotdata):
#--------------------------
"""
Specify what is to be plotted at each frame.
Input: plotdata, an instance of clawpack.visclaw.data.ClawPlotData.
Output: a modified version of plotdata.
"""
from clawpack.visclaw import colormaps, geoplot
from numpy import linspace
plotdata.clearfigures() # clear any old figures,axes,items data
# To plot gauge locations on pcolor or contour plot, use this as
# an afteraxis function:
def addgauges(current_data):
from clawpack.visclaw import gaugetools
gaugetools.plot_gauge_locations(current_data.plotdata, \
gaugenos='all', format_string='ko', add_labels=False)
def fixup(current_data):
import pylab
addgauges(current_data)
t = current_data.t
t = t / 3600. # hours
pylab.title('Surface at %4.2f hours' % t, fontsize=20)
#pylab.xticks(fontsize=15)
#pylab.yticks(fontsize=15)
#-----------------------------------------
# Figure for imshow plot
#-----------------------------------------
plotfigure = plotdata.new_plotfigure(name='Domain', figno=1)
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes('imshow')
plotaxes.title = 'Surface'
plotaxes.scaled = True
plotaxes.afteraxes = fixup
# Water
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
# plotitem.plot_var = geoplot.surface
plotitem.plot_var = geoplot.surface_or_depth
plotitem.imshow_cmap = geoplot.tsunami_colormap
plotitem.imshow_cmin = -0.5
plotitem.imshow_cmax = 0.5
plotitem.add_colorbar = True
plotitem.amr_celledges_show = [0,0,0]
plotitem.patchedges_show = 0
#plotitem.amr_patchedges_show = [1,1,1,0,0] # only coarse levels
# Land
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
plotitem.plot_var = geoplot.land
plotitem.imshow_cmap = geoplot.land_colors
plotitem.imshow_cmin = 0.0
plotitem.imshow_cmax = 100.0
plotitem.add_colorbar = False
plotitem.amr_celledges_show = [0,0,0]
plotitem.patchedges_show = 0
#plotitem.amr_patchedges_show = [1,1,1,0,0] # only coarse levels
plotaxes.xlimits = 'auto'
plotaxes.ylimits = 'auto'
# add contour lines of bathy if desired:
plotitem = plotaxes.new_plotitem(plot_type='2d_contour')
plotitem.show = False
plotitem.plot_var = geoplot.topo
plotitem.contour_levels = linspace(-2000,0,5)
plotitem.amr_contour_colors = ['y'] # color on each level
plotitem.kwargs = {'linestyles':'solid','linewidths':2}
plotitem.amr_contour_show = [1,0,0]
plotitem.celledges_show = 0
plotitem.patchedges_show = 0
#-----------------------------------------
# Figure for zoom plot
#-----------------------------------------
plotfigure = plotdata.new_plotfigure(name='Maui', figno=2)
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes('imshow')
plotaxes.title = 'Surface'
plotaxes.scaled = True
plotaxes.afteraxes = fixup
# Water
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
# plotitem.plot_var = geoplot.surface
plotitem.plot_var = geoplot.surface_or_depth
plotitem.imshow_cmap = geoplot.tsunami_colormap
plotitem.imshow_cmin = -1.
plotitem.imshow_cmax = 1.
plotitem.add_colorbar = True
plotitem.amr_celledges_show = [0,0,0]
plotitem.patchedges_show = 0
# Land
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
plotitem.plot_var = geoplot.land
plotitem.imshow_cmap = geoplot.land_colors
plotitem.imshow_cmin = 0.0
plotitem.imshow_cmax = 100.0
plotitem.add_colorbar = False
plotitem.amr_celledges_show = [0,0,0]
plotitem.patchedges_show = 0
plotaxes.xlimits = [203.2, 204.1]
plotaxes.ylimits = [20.4, 21.3]
# add contour lines of bathy if desired:
plotitem = plotaxes.new_plotitem(plot_type='2d_contour')
plotitem.show = False
plotitem.plot_var = geoplot.topo
plotitem.contour_levels = linspace(-2000,0,5)
plotitem.amr_contour_colors = ['y'] # color on each level
plotitem.kwargs = {'linestyles':'solid','linewidths':2}
plotitem.amr_contour_show = [1,0,0]
plotitem.celledges_show = 0
plotitem.patchedges_show = 0
#-----------------------------------------
# Figure for zoom plot
#-----------------------------------------
plotfigure = plotdata.new_plotfigure(name='Kahului Harbor', figno=3)
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes('imshow')
plotaxes.title = 'Surface'
plotaxes.scaled = True
plotaxes.afteraxes = fixup
# Water
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
# plotitem.plot_var = geoplot.surface
plotitem.plot_var = geoplot.surface_or_depth
plotitem.imshow_cmap = geoplot.tsunami_colormap
plotitem.imshow_cmin = -0.2
plotitem.imshow_cmax = 0.2
plotitem.add_colorbar = True
plotitem.celledges_show = 0
plotitem.patchedges_show = 0
# Land
plotitem = plotaxes.new_plotitem(plot_type='2d_imshow')
plotitem.plot_var = geoplot.land
plotitem.imshow_cmap = geoplot.land_colors
plotitem.imshow_cmin = 0.0
plotitem.imshow_cmax = 10.0
plotitem.add_colorbar = False
plotitem.celledges_show = 0
plotitem.patchedges_show = 0
plotaxes.xlimits = [203.48, 203.57]
plotaxes.ylimits = [20.88, 20.94]
# add contour lines of bathy if desired:
plotitem = plotaxes.new_plotitem(plot_type='2d_contour')
plotitem.show = False
plotitem.plot_var = geoplot.topo
#plotitem.contour_levels = linspace(-2000,0,5)
plotitem.contour_levels = linspace(0,8,9)
plotitem.amr_contour_colors = ['y'] # color on each level
plotitem.kwargs = {'linestyles':'solid','linewidths':2}
plotitem.amr_contour_show = [0,0,0,0,0,1]
plotitem.celledges_show = 0
plotitem.patchedges_show = 0
#-----------------------------------------
# Figures for gauges
#-----------------------------------------
plotfigure = plotdata.new_plotfigure(name='Surface', figno=300, \
type='each_gauge')
plotfigure.clf_each_gauge = True
# Set up for axes in this figure:
plotaxes = plotfigure.new_plotaxes()
#plotaxes.axescmd = 'subplot(2,1,1)'
plotaxes.title = 'Surface'
# Plot surface as blue curve:
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.plot_var = 3
plotitem.plotstyle = 'b-'
plotitem.kwargs = {'linewidth':2}
# Plot topo as green curve:
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.show = False
def gaugetopo(current_data):
q = current_data.q
h = q[0,:]
eta = q[3,:]
topo = eta - h
return topo
plotitem.plot_var = gaugetopo
plotitem.plotstyle = 'g-'
def add_zeroline(current_data):
from pylab import plot, legend, xticks, floor, xlim,ylim
t = current_data.t
#legend(('surface','topography'),loc='lower left')
plot(t, 0*t, 'k')
#n = int(floor(t.max()/1800.)) + 2
#xticks([1800*i for i in range(n)],[str(0.5*i) for i in range(n)])
#xlim(25000,t.max())
#ylim(-0.5,0.5)
print("+++ gaugeno = ",current_data.gaugeno)
def add_legend_eta(current_data):
from pylab import legend
legend(('Surface'),loc='lower left')
add_zeroline(current_data)
plotaxes.ylimits = [-2.5, 2.5]
plotaxes.afteraxes = add_zeroline
plotfigure = plotdata.new_plotfigure(name='Velocities', figno=301, \
type='each_gauge')
plotfigure.clf_each_gauge = True
plotaxes = plotfigure.new_plotaxes()
#plotaxes.axescmd = 'subplot(2,1,2)'
plotaxes.title = 'Velocities'
plotaxes.afteraxes = add_zeroline
# Plot velocity as red curve:
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
plotitem.show = True
def speed(current_data):
from numpy import where, sqrt
h = current_data.q[0,:]
h = where(h>0.01, h, 1.e6)
u = 100. * current_data.q[1,:] / h
v = 100. * current_data.q[2,:] / h
s = sqrt(u**2 + v**2)
return s
plotitem.plot_var = speed
plotitem.plotstyle = 'k-'
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
def uvel(current_data):
from numpy import where, sqrt
h = current_data.q[0,:]
h = where(h>0.01, h, 1.e6)
u = 100. * current_data.q[1,:] / h
return u
plotitem.plot_var = uvel
plotitem.plotstyle = 'r-'
plotitem.kwargs = {'linewidth':2}
plotitem = plotaxes.new_plotitem(plot_type='1d_plot')
def vvel(current_data):
from numpy import where, sqrt
h = current_data.q[0,:]
h = where(h>0.01, h, 1.e6)
v = 100. * current_data.q[2,:] / h
return v
plotitem.plot_var = vvel
plotitem.plotstyle = 'g-'
plotitem.kwargs = {'linewidth':2}
def add_legend_vel(current_data):
from pylab import legend
# legend(["u","v"],'upper left')
legend(['Speed','uvel','vvel'],loc='upper left')
add_zeroline(current_data)
plotaxes.ylimits = [-50,50]
plotaxes.afteraxes = add_legend_vel
#-----------------------------------------
# Plots of timing (CPU and wall time):
def make_timing_plots(plotdata):
from clawpack.visclaw import plot_timing_stats
import os,sys
try:
timing_plotdir = plotdata.plotdir + '/_timing_figures'
os.system('mkdir -p %s' % timing_plotdir)
# adjust units for plots based on problem:
units = {'comptime':'seconds', 'simtime':'hours',
'cell':'millions'}
plot_timing_stats.make_plots(outdir=plotdata.outdir,
make_pngs=True,
plotdir=timing_plotdir,
units=units)
except:
print('*** Error making timing plots')
otherfigure = plotdata.new_otherfigure(name='timing plots',
fname='_timing_figures/timing.html')
otherfigure.makefig = make_timing_plots
#-----------------------------------------
# Parameters used only when creating html and/or latex hardcopy
# e.g., via clawpack.visclaw.frametools.printframes:
plotdata.printfigs = True # print figures
plotdata.print_format = 'png' # file format
plotdata.print_framenos = 'all' # list of frames to print
plotdata.print_gaugenos = 'all' # list of gauges to print
plotdata.print_fignos = [1,2,3,300,301] # list of figures to print
plotdata.html = True # create html files of plots?
plotdata.html_homelink = '../README.html' # pointer for top of index
plotdata.latex = False # create latex file of plots?
plotdata.latex_figsperline = 2 # layout of plots
plotdata.latex_framesperline = 1 # layout of plots
plotdata.latex_makepdf = False # also run pdflatex?
plotdata.parallel = False
return plotdata
if __name__=="__main__":
from clawpack.visclaw.plotclaw import plotclaw
plotclaw(outdir='.',setplot=setplot,plotdir='_plots',format='forestclaw')
|
bsd-2-clause
| -9,101,027,784,213,857,000
| 32.621387
| 81
| 0.600963
| false
| 3.181893
| false
| false
| false
|
better-dem/box_classify
|
specify_rect.py
|
1
|
2299
|
#!/usr/local/bin/python3.6
import tkinter as tk
from tkinter import messagebox as mb
from PIL import Image, ImageTk
class SelectRegionApp(tk.Tk):
def __init__(self, image_filename, image_resize, result):
tk.Tk.__init__(self)
self.result_dict = result
self.x = self.y = 0
im = Image.open(image_filename)
if not image_resize is None:
im = im.resize(image_resize)
self.tk_im = ImageTk.PhotoImage(im)
self.label = tk.Label(self, text="Select a Rectangle To Extract")
self.label.pack(side="top")
self.canvas = tk.Canvas(self, width=self.tk_im.width(), height=self.tk_im.height(), cursor="cross")
self.canvas.pack(side="top", fill="both", expand=True)
self.canvas.bind("<ButtonPress-1>", self.on_button_press)
self.canvas.bind("<B1-Motion>", self.on_move_press)
self.canvas.bind("<ButtonRelease-1>", self.on_button_release)
self.rect = None
self.start_x = None
self.start_y = None
self.canvas.create_image(0,0,anchor="nw",image=self.tk_im)
self.button = tk.Button(self, text="DONE", command=self.done)
self.button.pack(side="bottom")
def done(self):
if self.start_x is None:
mb.showwarning("warning","you need to drag a rectangle over the region you want to extract before continuing")
else:
self.result_dict["rect"] = self.canvas.coords(self.rect)
self.destroy()
def on_button_press(self, event):
if not self.rect is None:
self.canvas.delete(self.rect)
# save mouse drag start position
self.start_x = event.x
self.start_y = event.y
# create rectangle if not yet exist
#if not self.rect:
self.rect = self.canvas.create_rectangle(self.x, self.y, 1, 1, fill="")
def on_move_press(self, event):
curX, curY = (event.x, event.y)
# expand rectangle as you drag the mouse
self.canvas.coords(self.rect, self.start_x, self.start_y, curX, curY)
def on_button_release(self, event):
pass
def select_rectangle(image_filename, image_resize=None):
ans = dict()
app = SelectRegionApp(image_filename, image_resize, ans)
app.mainloop()
return ans['rect']
|
gpl-3.0
| -4,937,644,953,297,175,000
| 32.808824
| 122
| 0.618965
| false
| 3.499239
| false
| false
| false
|
Vykstorm/Othello
|
bots.py
|
1
|
3184
|
#!/usr/bin/python
# -*- coding: iso8859-1 -*-
# Autor: Víctor Ruiz Gómez
# Descripción: Este script define distintos bots que son jugadores del
# juego Othello.
from game2 import Player
from random import choice
from minmax import MinMax, MinMaxAlphaBeta
from othello import OthelloEval, OthelloEvalDiffPiezas, OthelloEvalComplex
# El siguiente bot selecciona un movimiento al azar entre el conjunto de
# movimientos posibles que puede realizar
class BotPlayerRandom(Player):
def play(self, game, opp_move):
# Obtenemos el conjunto de movimientos posibles.
moves = game.next_moves()
if len(moves) == 0:
return None
# Seleccionamos uno aleatoriamente.
return choice(moves)
def __repr__(self):
return 'Bot Aleatorio'
# El siguiente bot selecciona el movimiento que más piezas come.
class BotPlayerMaxFeed(Player):
def play(self, game, opp_move):
moves = game.next_moves()
if len(moves) == 0:
return None
best_move = moves[0]
max_pieces_eat = abs(game.transform(best_move).score() - game.score())
for i in range(1,len(moves)):
move = moves[i]
pieces_eat = abs(game.transform(move).score() - game.score())
if pieces_eat > max_pieces_eat:
max_pieces_eat = pieces_eat
best_move = move
return best_move
def __repr__(self):
return 'Bot mejor dif. Piezas'
# El siguiente bot usa el algorito MinMax para seleccionar el siguiente movimiento,
# usando la diferencia de piezas entre MIN y MAX como función de evaluación estática.
class BotPlayerMinMax(Player):
# Inicializa la instancia. Se puede indicar como parámetro el nivel de profundidad
# máxima para el algoritmo MinMax.
def __init__(self, max_deep, static_eval = None):
if static_eval is None:
static_eval = OthelloEvalDiffPiezas()
self.max_deep = max_deep
self.static_eval = static_eval
def get_static_eval(self):
return self.static_eval
def play(self, game, opp_move):
if len(game.next_moves()) == 0:
return None
minmax = MinMax(game, self.get_static_eval(), self.max_deep)
best_move = minmax()
return best_move
def __repr__(self):
return 'Bot min-max sin poda'
# Es igual que el anterior solo que el algoritmo Min-Max con poda alpha-beta
class BotPlayerMinMaxAlphaBeta(BotPlayerMinMax):
def __init__(self, max_deep):
BotPlayerMinMax.__init__(self, max_deep)
def play(self, game, opp_move):
if len(game.next_moves()) == 0:
return None
minmax = MinMaxAlphaBeta(game, self.get_static_eval(), self.max_deep)
best_move = minmax()
return best_move
def __repr__(self):
return 'Bot min-max con poda'
# Este último robot usa el algoritmo MinMax con poda alpha beta, usando
# una función de evaluación estática que tiene en cuenta posiciones estableces
# del tablero (bordes y esquinas)
class BotPlayerComplex(BotPlayerMinMax):
def __init__(self, max_deep):
BotPlayerMinMax.__init__(self, max_deep, OthelloEvalComplex())
def play(self, game, opp_move):
if len(game.next_moves()) == 0:
return None
minmax = MinMaxAlphaBeta(game, self.get_static_eval(), self.max_deep)
best_move = minmax()
return best_move
def __repr__(self):
return 'Bot min-max con poda y mejorado'
|
mit
| -6,708,081,805,361,755,000
| 28.915094
| 85
| 0.712709
| false
| 2.776708
| false
| false
| false
|
NetApp/manila
|
manila/tests/api/views/test_share_networks.py
|
1
|
8974
|
# Copyright (c) 2015 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
from manila.api.views import share_networks
from manila import test
from manila.tests.api import fakes
@ddt.ddt
class ViewBuilderTestCase(test.TestCase):
def setUp(self):
super(ViewBuilderTestCase, self).setUp()
self.builder = share_networks.ViewBuilder()
def test__collection_name(self):
self.assertEqual('share_networks', self.builder._collection_name)
@ddt.data(
{'id': 'fake_sn_id', 'name': 'fake_sn_name'},
{'id': 'fake_sn_id', 'name': 'fake_sn_name', 'fake_extra_key': 'foo'},
)
def test_build_share_network_v_2_18(self, sn):
req = fakes.HTTPRequest.blank('/share-networks', version="2.18")
expected_keys = (
'id', 'name', 'project_id', 'created_at', 'updated_at',
'neutron_net_id', 'neutron_subnet_id', 'nova_net_id',
'network_type', 'segmentation_id', 'cidr', 'ip_version',
'gateway', 'description')
result = self.builder.build_share_network(req, sn)
self.assertEqual(1, len(result))
self.assertIn('share_network', result)
self.assertEqual(sn['id'], result['share_network']['id'])
self.assertEqual(sn['name'], result['share_network']['name'])
self.assertEqual(len(expected_keys), len(result['share_network']))
for key in expected_keys:
self.assertIn(key, result['share_network'])
@ddt.data(
[],
[dict(id='fake_id',
name='fake_name',
project_id='fake_project_id',
created_at='fake_created_at',
updated_at='fake_updated_at',
neutron_net_id='fake_neutron_net_id',
neutron_subnet_id='fake_neutron_subnet_id',
nova_net_id='fake_nova_net_id',
network_type='fake_network_type',
segmentation_id='fake_segmentation_id',
cidr='fake_cidr',
ip_version='fake_ip_version',
gateway='fake_gateway',
description='fake_description'),
dict(id='fake_id2', name='fake_name2')],
)
def test_build_share_networks_with_details_v_2_18(self, share_networks):
req = fakes.HTTPRequest.blank('/share-networks', version="2.18")
expected = []
for share_network in share_networks:
expected.append(dict(
id=share_network.get('id'),
name=share_network.get('name'),
project_id=share_network.get('project_id'),
created_at=share_network.get('created_at'),
updated_at=share_network.get('updated_at'),
neutron_net_id=share_network.get('neutron_net_id'),
neutron_subnet_id=share_network.get('neutron_subnet_id'),
nova_net_id=share_network.get('nova_net_id'),
network_type=share_network.get('network_type'),
segmentation_id=share_network.get('segmentation_id'),
cidr=share_network.get('cidr'),
ip_version=share_network.get('ip_version'),
gateway=share_network.get('gateway'),
description=share_network.get('description')))
expected = {'share_networks': expected}
result = self.builder.build_share_networks(
req, share_networks, True)
self.assertEqual(expected, result)
@ddt.data(
[],
[{'id': 'foo', 'name': 'bar'}],
[{'id': 'id1', 'name': 'name1'}, {'id': 'id2', 'name': 'name2'}],
[{'id': 'id1', 'name': 'name1'},
{'id': 'id2', 'name': 'name2', 'fake': 'I should not be returned'}],
)
def test_build_share_networks_without_details_v_2_18(self,
share_networks):
req = fakes.HTTPRequest.blank('/share-networks', version="2.18")
expected = []
for share_network in share_networks:
expected.append(dict(
id=share_network.get('id'), name=share_network.get('name')))
expected = {'share_networks': expected}
result = self.builder.build_share_networks(
req, share_networks, False)
self.assertEqual(expected, result)
@ddt.data(
{'id': 'fake_sn_id', 'name': 'fake_sn_name'},
{'id': 'fake_sn_id', 'name': 'fake_sn_name', 'fake_extra_key': 'foo'},
)
def test_build_share_network_v_2_20(self, sn):
req = fakes.HTTPRequest.blank('/share-networks', version="2.20")
expected_keys = (
'id', 'name', 'project_id', 'created_at', 'updated_at',
'neutron_net_id', 'neutron_subnet_id', 'nova_net_id',
'network_type', 'segmentation_id', 'cidr', 'ip_version',
'gateway', 'description', 'mtu')
result = self.builder.build_share_network(req, sn)
self.assertEqual(1, len(result))
self.assertIn('share_network', result)
self.assertEqual(sn['id'], result['share_network']['id'])
self.assertEqual(sn['name'], result['share_network']['name'])
self.assertEqual(len(expected_keys), len(result['share_network']))
for key in expected_keys:
self.assertIn(key, result['share_network'])
for key in result['share_network']:
self.assertIn(key, expected_keys)
@ddt.data(
[], [{
'id': 'fake_id',
'name': 'fake_name',
'project_id': 'fake_project_id',
'created_at': 'fake_created_at',
'updated_at': 'fake_updated_at',
'neutron_net_id': 'fake_neutron_net_id',
'neutron_subnet_id': 'fake_neutron_subnet_id',
'nova_net_id': 'fake_nova_net_id',
'network_type': 'fake_network_type',
'segmentation_id': 'fake_segmentation_id',
'cidr': 'fake_cidr',
'ip_version': 'fake_ip_version',
'gateway': 'fake_gateway',
'description': 'fake_description',
'mtu': 1509
},
{
'id': 'fake_id2',
'name': 'fake_name2'
}],
)
def test_build_share_networks_with_details_v_2_20(self, share_networks):
req = fakes.HTTPRequest.blank('/share-networks', version="2.20")
expected = []
for share_network in share_networks:
expected.append({
'id': share_network.get('id'),
'name': share_network.get('name'),
'project_id': share_network.get('project_id'),
'created_at': share_network.get('created_at'),
'updated_at': share_network.get('updated_at'),
'neutron_net_id': share_network.get('neutron_net_id'),
'neutron_subnet_id': share_network.get('neutron_subnet_id'),
'nova_net_id': share_network.get('nova_net_id'),
'network_type': share_network.get('network_type'),
'segmentation_id': share_network.get('segmentation_id'),
'cidr': share_network.get('cidr'),
'ip_version': share_network.get('ip_version'),
'gateway': share_network.get('gateway'),
'description': share_network.get('description'),
'mtu': share_network.get('mtu'),
})
expected = {'share_networks': expected}
result = self.builder.build_share_networks(
req, share_networks, True)
self.assertEqual(expected, result)
@ddt.data(
[],
[{'id': 'foo', 'name': 'bar'}],
[{'id': 'id1', 'name': 'name1'}, {'id': 'id2', 'name': 'name2'}],
[{'id': 'id1', 'name': 'name1'},
{'id': 'id2', 'name': 'name2', 'fake': 'I should not be returned'}],
)
def test_build_share_networks_without_details_v_2_20(self,
share_networks):
req = fakes.HTTPRequest.blank('/share-networks', version="2.20")
expected = []
for share_network in share_networks:
expected.append({
'id': share_network.get('id'),
'name': share_network.get('name')
})
expected = {'share_networks': expected}
result = self.builder.build_share_networks(
req, share_networks, False)
self.assertEqual(expected, result)
|
apache-2.0
| -7,470,150,884,323,348,000
| 40.546296
| 78
| 0.551036
| false
| 3.823605
| true
| false
| false
|
googleapis/googleapis-gen
|
google/cloud/osconfig/agentendpoint/v1/osconfig-agentendpoint-v1-py/scripts/fixup_agentendpoint_v1_keywords.py
|
1
|
6593
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import libcst as cst
import pathlib
import sys
from typing import (Any, Callable, Dict, List, Sequence, Tuple)
def partition(
predicate: Callable[[Any], bool],
iterator: Sequence[Any]
) -> Tuple[List[Any], List[Any]]:
"""A stable, out-of-place partition."""
results = ([], [])
for i in iterator:
results[int(predicate(i))].append(i)
# Returns trueList, falseList
return results[1], results[0]
class agentendpointCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
'receive_task_notification': ('instance_id_token', 'agent_version', ),
'register_agent': ('instance_id_token', 'agent_version', 'supported_capabilities', ),
'report_inventory': ('instance_id_token', 'inventory_checksum', 'inventory', ),
'report_task_complete': ('instance_id_token', 'task_id', 'task_type', 'error_message', 'apply_patches_task_output', 'exec_step_task_output', 'apply_config_task_output', ),
'report_task_progress': ('instance_id_token', 'task_id', 'task_type', 'apply_patches_task_progress', 'exec_step_task_progress', 'apply_config_task_progress', ),
'start_next_task': ('instance_id_token', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
try:
key = original.func.attr.value
kword_params = self.METHOD_TO_PARAMS[key]
except (AttributeError, KeyError):
# Either not a method from the API or too convoluted to be sure.
return updated
# If the existing code is valid, keyword args come after positional args.
# Therefore, all positional args must map to the first parameters.
args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
if any(k.keyword.value == "request" for k in kwargs):
# We've already fixed this file, don't fix it again.
return updated
kwargs, ctrl_kwargs = partition(
lambda a: not a.keyword.value in self.CTRL_PARAMS,
kwargs
)
args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
request_arg = cst.Arg(
value=cst.Dict([
cst.DictElement(
cst.SimpleString("'{}'".format(name)),
cst.Element(value=arg.value)
)
# Note: the args + kwargs looks silly, but keep in mind that
# the control parameters had to be stripped out, and that
# those could have been passed positionally or by keyword.
for name, arg in zip(kword_params, args + kwargs)]),
keyword=cst.Name("request")
)
return updated.with_changes(
args=[request_arg] + ctrl_kwargs
)
def fix_files(
in_dir: pathlib.Path,
out_dir: pathlib.Path,
*,
transformer=agentendpointCallTransformer(),
):
"""Duplicate the input dir to the output dir, fixing file method calls.
Preconditions:
* in_dir is a real directory
* out_dir is a real, empty directory
"""
pyfile_gen = (
pathlib.Path(os.path.join(root, f))
for root, _, files in os.walk(in_dir)
for f in files if os.path.splitext(f)[1] == ".py"
)
for fpath in pyfile_gen:
with open(fpath, 'r') as f:
src = f.read()
# Parse the code and insert method call fixes.
tree = cst.parse_module(src)
updated = tree.visit(transformer)
# Create the path and directory structure for the new file.
updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
updated_path.parent.mkdir(parents=True, exist_ok=True)
# Generate the updated source file at the corresponding path.
with open(updated_path, 'w') as f:
f.write(updated.code)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="""Fix up source that uses the agentendpoint client library.
The existing sources are NOT overwritten but are copied to output_dir with changes made.
Note: This tool operates at a best-effort level at converting positional
parameters in client method calls to keyword based parameters.
Cases where it WILL FAIL include
A) * or ** expansion in a method call.
B) Calls via function or method alias (includes free function calls)
C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
These all constitute false negatives. The tool will also detect false
positives when an API method shares a name with another method.
""")
parser.add_argument(
'-d',
'--input-directory',
required=True,
dest='input_dir',
help='the input directory to walk for python files to fix up',
)
parser.add_argument(
'-o',
'--output-directory',
required=True,
dest='output_dir',
help='the directory to output files fixed via un-flattening',
)
args = parser.parse_args()
input_dir = pathlib.Path(args.input_dir)
output_dir = pathlib.Path(args.output_dir)
if not input_dir.is_dir():
print(
f"input directory '{input_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if not output_dir.is_dir():
print(
f"output directory '{output_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if os.listdir(output_dir):
print(
f"output directory '{output_dir}' is not empty",
file=sys.stderr,
)
sys.exit(-1)
fix_files(input_dir, output_dir)
|
apache-2.0
| -4,318,478,684,794,081,000
| 35.425414
| 181
| 0.62263
| false
| 3.878235
| false
| false
| false
|
SmartDeveloperHub/sdh-curator
|
sdh/curator/actions/ext/enrichment.py
|
1
|
10998
|
"""
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
This file is part of the Smart Developer Hub Project:
http://www.smartdeveloperhub.org
Center for Open Middleware
http://www.centeropenmiddleware.com/
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Copyright (C) 2015 Center for Open Middleware.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
"""
import logging
import uuid
from datetime import datetime
import base64
from agora.client.execution import AGORA
from sdh.curator.actions.core.fragment import FragmentRequest, FragmentAction, FragmentResponse, FragmentSink
from sdh.curator.actions.core import CURATOR, TYPES, RDF, XSD, FOAF
from sdh.curator.actions.core.utils import CGraph
from rdflib import BNode, Literal, URIRef, RDFS
from sdh.curator.store import r
from sdh.curator.actions.core.delivery import CURATOR_UUID
from sdh.curator.daemons.fragment import FragmentPlugin
from sdh.curator.store.triples import cache
import shortuuid
__author__ = 'Fernando Serena'
log = logging.getLogger('sdh.curator.actions.enrichment')
def get_fragment_enrichments(fid):
return [EnrichmentData(eid) for eid in r.smembers('fragments:{}:enrichments'.format(fid))]
def generate_enrichment_hash(target, links):
links = '|'.join(sorted([str(pr) for (pr, _) in links]))
eid = base64.b64encode('~'.join([target, links]))
return eid
def register_enrichment(pipe, fid, target, links):
e_hash = generate_enrichment_hash(target, links)
if not r.sismember('enrichments', e_hash):
eid = shortuuid.uuid()
enrichment_data = EnrichmentData(eid, fid, target, links)
enrichment_data.save(pipe)
pipe.sadd('enrichments', e_hash)
pipe.set('map:enrichments:{}'.format(e_hash), eid)
else:
eid = r.get('map:enrichments:{}'.format(e_hash))
return eid
class EnrichmentData(object):
def __init__(self, eid, fid=None, target=None, links=None):
if eid is None:
raise ValueError('Cannot create an enrichment data object without an identifier')
self.links = links
self.target = target
self.fragment_id = fid
self.enrichment_id = eid
self._enrichment_key = 'enrichments:{}'.format(self.enrichment_id)
if not any([fid, target, links]):
self.load()
def save(self, pipe):
pipe.hset('{}'.format(self._enrichment_key), 'target', self.target)
pipe.hset('{}'.format(self._enrichment_key), 'fragment_id', self.fragment_id)
pipe.sadd('fragments:{}:enrichments'.format(self.fragment_id), self.enrichment_id)
pipe.sadd('{}:links'.format(self._enrichment_key), *self.links)
pipe.hmset('{}:links:status'.format(self._enrichment_key),
dict((pr, False) for (pr, _) in self.links))
def load(self):
dict_fields = r.hgetall(self._enrichment_key)
self.target = URIRef(dict_fields.get('target', None))
self.fragment_id = dict_fields.get('fragment_id', None)
self.links = map(lambda (link, v): (URIRef(link), v), [eval(pair_str) for pair_str in
r.smembers('{}:links'.format(
self._enrichment_key))])
def set_link(self, link):
with r.pipeline(transaction=True) as p:
p.multi()
p.hset('{}:links:status'.format(self._enrichment_key), str(link), True)
p.execute()
@property
def completed(self):
return all([eval(value) for value in r.hgetall('{}:links:status'.format(self._enrichment_key)).values()])
class EnrichmentPlugin(FragmentPlugin):
@property
def sink_class(self):
return EnrichmentSink
def sink_aware(self):
return False
def consume(self, fid, (c, s, p, o), graph, *args):
enrichments = get_fragment_enrichments(fid)
for e in enrichments:
var_candidate = list(graph.objects(c, AGORA.subject))[0]
if (var_candidate, RDF.type, AGORA.Variable) in graph:
target = e.target
links = dict(map(lambda (l, v): (v, l), e.links))
var_label = str(list(graph.objects(var_candidate, RDFS.label))[0])
if var_label in links:
link = links[var_label]
if (target, link, s) not in cache.get_context('#enrichment'):
e.set_link(link)
cache.get_context('#enrichment').add((target, link, s))
print u'{} {} {} .'.format(target.n3(), link.n3(graph.namespace_manager), s.n3())
def complete(self, fid, *args):
# TODO: check if all links are set
pass
FragmentPlugin.register(EnrichmentPlugin)
class EnrichmentRequest(FragmentRequest):
def __init__(self):
super(EnrichmentRequest, self).__init__()
self._target_resource = None
self._target_links = set([])
def _extract_content(self):
super(EnrichmentRequest, self)._extract_content()
q_res = self._graph.query("""SELECT ?node ?t WHERE {
?node a curator:EnrichmentRequest;
curator:targetResource ?t
}""")
q_res = list(q_res)
if len(q_res) != 1:
raise SyntaxError('Invalid enrichment request')
request_fields = q_res.pop()
if not all(request_fields):
raise ValueError('Missing fields for enrichment request')
if request_fields[0] != self._request_node:
raise SyntaxError('Request node does not match')
(self._target_resource,) = request_fields[1:]
log.debug("""Parsed attributes of an enrichment request:
-target resource: {}""".format(self._target_resource))
target_pattern = self._graph.predicate_objects(self._target_resource)
for (pr, req_object) in target_pattern:
if (req_object, RDF.type, CURATOR.Variable) in self._graph:
self._target_links.add((pr, req_object))
enrich_properties = set([pr for (pr, _) in self._target_links])
if not enrich_properties:
raise ValueError('There is nothing to enrich')
log.debug(
'<{}> is requested to be enriched with values for the following properties:\n{}'.format(
self._target_resource,
'\n'.join(enrich_properties)))
@property
def target_resource(self):
return self._target_resource
@property
def target_links(self):
return self._target_links.copy()
class EnrichmentAction(FragmentAction):
def __init__(self, message):
self.__request = EnrichmentRequest()
self.__sink = EnrichmentSink()
super(EnrichmentAction, self).__init__(message)
@property
def sink(self):
return self.__sink
@classmethod
def response_class(cls):
return EnrichmentResponse
@property
def request(self):
return self.__request
def submit(self):
try:
super(EnrichmentAction, self).submit()
except Exception as e:
log.debug('Bad request: {}'.format(e.message))
self._reply_failure(e.message)
class EnrichmentSink(FragmentSink):
def _remove(self, pipe):
pipe.srem('enrichments', self._request_id)
super(FragmentSink, self)._remove(pipe)
def __init__(self):
super(EnrichmentSink, self).__init__()
self.__target_links = None
self.__target_resource = None
self._enrichment_id = None
self._enrichment_data = None
def _save(self, action):
super(EnrichmentSink, self)._save(action)
variable_links = [(str(pr), self.map(self._variables_dict[v])) for (pr, v) in action.request.target_links]
enrichment_id = register_enrichment(self._pipe, self._fragment_id, action.request.target_resource,
variable_links)
self._pipe.hset('{}'.format(self._request_key), 'enrichment_id', enrichment_id)
self._dict_fields['enrichment_id'] = enrichment_id
def _load(self):
super(EnrichmentSink, self)._load()
@property
def enrichment_data(self):
if self._enrichment_data is None:
self._enrichment_data = EnrichmentData(self.enrichment_id)
return self._enrichment_data
@property
def backed(self):
return self.fragment_updated_on is not None and EnrichmentData(
self.enrichment_id).completed
class EnrichmentResponse(FragmentResponse):
def __init__(self, rid):
self.__sink = EnrichmentSink()
self.__sink.load(rid)
super(EnrichmentResponse, self).__init__(rid)
@property
def sink(self):
return self.__sink
def _build(self):
log.debug('Building a response to request number {}'.format(self._request_id))
graph = CGraph()
resp_node = BNode('#response')
graph.add((resp_node, RDF.type, CURATOR.EnrichmentResponse))
graph.add((resp_node, CURATOR.messageId, Literal(str(uuid.uuid4()), datatype=TYPES.UUID)))
graph.add((resp_node, CURATOR.responseTo, Literal(self.sink.message_id, datatype=TYPES.UUID)))
graph.add((resp_node, CURATOR.responseNumber, Literal("1", datatype=XSD.unsignedLong)))
graph.add((resp_node, CURATOR.targetResource, self.sink.enrichment_data.target))
graph.add((resp_node, CURATOR.submittedOn, Literal(datetime.now())))
curator_node = BNode('#curator')
graph.add((resp_node, CURATOR.submittedBy, curator_node))
graph.add((curator_node, RDF.type, FOAF.Agent))
graph.add((curator_node, CURATOR.agentId, CURATOR_UUID))
addition_node = BNode('#addition')
graph.add((resp_node, CURATOR.additionTarget, addition_node))
graph.add((addition_node, RDF.type, CURATOR.Variable))
for link, v in self.sink.enrichment_data.links:
trs = self.graph().triples((self.sink.enrichment_data.target, link, None))
for (_, _, o) in trs:
graph.add((addition_node, link, o))
yield graph.serialize(format='turtle'), {}
|
apache-2.0
| 7,141,783,065,942,430,000
| 37.725352
| 114
| 0.601655
| false
| 3.715541
| false
| false
| false
|
Ziqi-Li/bknqgis
|
bokeh/bokeh/plotting/helpers.py
|
1
|
24268
|
from __future__ import absolute_import
from collections import Iterable, OrderedDict, Sequence
import difflib
import itertools
import re
import textwrap
import warnings
import numpy as np
import sys
from six import string_types, reraise
from ..models import (
BoxSelectTool, BoxZoomTool, CategoricalAxis,
TapTool, CrosshairTool, DataRange1d, DatetimeAxis,
FactorRange, Grid, HelpTool, HoverTool, LassoSelectTool, Legend, LegendItem, LinearAxis,
LogAxis, PanTool, ZoomInTool, ZoomOutTool, PolySelectTool, ContinuousTicker,
SaveTool, Range, Range1d, UndoTool, RedoTool, ResetTool, ResizeTool, Tool,
WheelPanTool, WheelZoomTool, ColumnarDataSource, ColumnDataSource, GlyphRenderer,
LogScale, LinearScale, CategoricalScale)
from ..core.properties import ColorSpec, Datetime, value, field
from ..transform import stack
from ..util.dependencies import import_optional
from ..util.deprecation import deprecated
from ..util.string import nice_join
pd = import_optional('pandas')
DEFAULT_PALETTE = ["#f22c40", "#5ab738", "#407ee7", "#df5320", "#00ad9c", "#c33ff3"]
def _stack(stackers, spec0, spec1, **kw):
for name in (spec0, spec1):
if name in kw:
raise ValueError("Stack property '%s' cannot appear in keyword args" % name)
lengths = { len(x) for x in kw.values() if isinstance(x, (list, tuple)) }
# lengths will be empty if there are no kwargs supplied at all
if len(lengths) > 0:
if len(lengths) != 1:
raise ValueError("Keyword argument sequences for broadcasting must all be the same lengths. Got lengths: %r" % sorted(list(lengths)))
if lengths.pop() != len(stackers):
raise ValueError("Keyword argument sequences for broadcasting must be the same length as stackers")
s0 = []
s1 = []
_kw = []
for i, val in enumerate(stackers):
d = {}
s0 = list(s1)
s1.append(val)
d[spec0] = stack(*s0)
d[spec1] = stack(*s1)
for k, v in kw.items():
if isinstance(v, (list, tuple)):
d[k] = v[i]
else:
d[k] = v
_kw.append(d)
return _kw
def get_default_color(plot=None):
colors = [
"#1f77b4",
"#ff7f0e", "#ffbb78",
"#2ca02c", "#98df8a",
"#d62728", "#ff9896",
"#9467bd", "#c5b0d5",
"#8c564b", "#c49c94",
"#e377c2", "#f7b6d2",
"#7f7f7f",
"#bcbd22", "#dbdb8d",
"#17becf", "#9edae5"
]
if plot:
renderers = plot.renderers
renderers = [x for x in renderers if x.__view_model__ == "GlyphRenderer"]
num_renderers = len(renderers)
return colors[num_renderers]
else:
return colors[0]
def get_default_alpha(plot=None):
return 1.0
def _pop_renderer_args(kwargs):
result = dict(data_source=kwargs.pop('source', ColumnDataSource()))
for attr in ['name', 'x_range_name', 'y_range_name', 'level', 'view', 'visible', 'muted']:
val = kwargs.pop(attr, None)
if val:
result[attr] = val
return result
def _pop_colors_and_alpha(glyphclass, kwargs, prefix="", default_alpha=1.0):
"""
Given a kwargs dict, a prefix, and a default value, looks for different
color and alpha fields of the given prefix, and fills in the default value
if it doesn't exist.
"""
result = dict()
# TODO: The need to do this and the complexity of managing this kind of
# thing throughout the codebase really suggests that we need to have
# a real stylesheet class, where defaults and Types can declaratively
# substitute for this kind of imperative logic.
color = kwargs.pop(prefix + "color", get_default_color())
for argname in ("fill_color", "line_color"):
if argname not in glyphclass.properties():
continue
result[argname] = kwargs.pop(prefix + argname, color)
# NOTE: text fill color should really always default to black, hard coding
# this here now until the stylesheet solution exists
if "text_color" in glyphclass.properties():
result["text_color"] = kwargs.pop(prefix + "text_color", "black")
alpha = kwargs.pop(prefix + "alpha", default_alpha)
for argname in ("fill_alpha", "line_alpha", "text_alpha"):
if argname not in glyphclass.properties():
continue
result[argname] = kwargs.pop(prefix + argname, alpha)
return result
def _get_legend_item_label(kwargs):
legend = kwargs.pop('legend', None)
source = kwargs.get('source')
legend_item_label = None
if legend:
if isinstance(legend, string_types):
# Do the simple thing first
legend_item_label = value(legend)
# But if there's a source - try and do something smart
if source and hasattr(source, 'column_names'):
if legend in source.column_names:
legend_item_label = field(legend)
else:
legend_item_label = legend
return legend_item_label
_GLYPH_SOURCE_MSG = """
Supplying a user-defined data source AND iterable values to glyph methods is deprecated.
See https://github.com/bokeh/bokeh/issues/2056 for more information.
"""
def _process_sequence_literals(glyphclass, kwargs, source, is_user_source):
dataspecs = glyphclass.dataspecs_with_props()
for var, val in kwargs.items():
# ignore things that are not iterable
if not isinstance(val, Iterable):
continue
# pass dicts (i.e., values or fields) on as-is
if isinstance(val, dict):
continue
# let any non-dataspecs do their own validation (e.g., line_dash properties)
if var not in dataspecs:
continue
# strings sequences are handled by the dataspec as-is
if isinstance(val, string_types):
continue
# similarly colorspecs handle color tuple sequences as-is
if (isinstance(dataspecs[var].property, ColorSpec) and isinstance(val, tuple)):
continue
if isinstance(val, np.ndarray) and val.ndim != 1:
raise RuntimeError("Columns need to be 1D (%s is not)" % var)
if is_user_source:
deprecated(_GLYPH_SOURCE_MSG)
source.add(val, name=var)
kwargs[var] = var
def _make_glyph(glyphclass, kws, extra):
if extra is None:
return None
kws = kws.copy()
kws.update(extra)
return glyphclass(**kws)
def _update_legend(plot, legend_item_label, glyph_renderer):
# Get the plot's legend
legends = plot.select(type=Legend)
if not legends:
legend = Legend()
plot.add_layout(legend)
elif len(legends) == 1:
legend = legends[0]
else:
raise RuntimeError("Plot %s configured with more than one legend renderer" % plot)
# If there is an existing legend with a matching label, then put the
# renderer on that (if the source matches). Otherwise add a new one.
added = False
for item in legend.items:
if item.label == legend_item_label:
if item.label.get('value'):
item.renderers.append(glyph_renderer)
added = True
break
if item.label.get('field') and \
glyph_renderer.data_source is item.renderers[0].data_source:
item.renderers.append(glyph_renderer)
added = True
break
if not added:
new_item = LegendItem(label=legend_item_label, renderers=[glyph_renderer])
legend.items.append(new_item)
def _get_range(range_input):
if range_input is None:
return DataRange1d()
if pd and isinstance(range_input, pd.core.groupby.GroupBy):
return FactorRange(factors=sorted(list(range_input.groups.keys())))
if isinstance(range_input, Range):
return range_input
if isinstance(range_input, Sequence):
if all(isinstance(x, string_types) for x in range_input):
return FactorRange(factors=list(range_input))
if len(range_input) == 2:
try:
return Range1d(start=range_input[0], end=range_input[1])
except ValueError: # @mattpap suggests ValidationError instead
pass
raise ValueError("Unrecognized range input: '%s'" % str(range_input))
def _get_scale(range_input, axis_type):
if isinstance(range_input, (DataRange1d, Range1d)) and axis_type in ["linear", "datetime", "auto", None]:
return LinearScale()
elif isinstance(range_input, (DataRange1d, Range1d)) and axis_type == "log":
return LogScale()
elif isinstance(range_input, FactorRange):
return CategoricalScale()
else:
raise ValueError("Unable to determine proper scale for: '%s'" % str(range_input))
def _get_axis_class(axis_type, range_input):
if axis_type is None:
return None
elif axis_type == "linear":
return LinearAxis
elif axis_type == "log":
return LogAxis
elif axis_type == "datetime":
return DatetimeAxis
elif axis_type == "auto":
if isinstance(range_input, FactorRange):
return CategoricalAxis
elif isinstance(range_input, Range1d):
try:
# Easier way to validate type of Range1d parameters
Datetime.validate(Datetime(), range_input.start)
return DatetimeAxis
except ValueError:
pass
return LinearAxis
else:
raise ValueError("Unrecognized axis_type: '%r'" % axis_type)
def _get_num_minor_ticks(axis_class, num_minor_ticks):
if isinstance(num_minor_ticks, int):
if num_minor_ticks <= 1:
raise ValueError("num_minor_ticks must be > 1")
return num_minor_ticks
if num_minor_ticks is None:
return 0
if num_minor_ticks == 'auto':
if axis_class is LogAxis:
return 10
return 5
_known_tools = {
"pan": lambda: PanTool(dimensions='both'),
"xpan": lambda: PanTool(dimensions='width'),
"ypan": lambda: PanTool(dimensions='height'),
"wheel_zoom": lambda: WheelZoomTool(dimensions='both'),
"xwheel_zoom": lambda: WheelZoomTool(dimensions='width'),
"ywheel_zoom": lambda: WheelZoomTool(dimensions='height'),
"zoom_in": lambda: ZoomInTool(dimensions='both'),
"xzoom_in": lambda: ZoomInTool(dimensions='width'),
"yzoom_in": lambda: ZoomInTool(dimensions='height'),
"zoom_out": lambda: ZoomOutTool(dimensions='both'),
"xzoom_out": lambda: ZoomOutTool(dimensions='width'),
"yzoom_out": lambda: ZoomOutTool(dimensions='height'),
"xwheel_pan": lambda: WheelPanTool(dimension="width"),
"ywheel_pan": lambda: WheelPanTool(dimension="height"),
"resize": lambda: ResizeTool(),
"click": lambda: TapTool(behavior="inspect"),
"tap": lambda: TapTool(),
"crosshair": lambda: CrosshairTool(),
"box_select": lambda: BoxSelectTool(),
"xbox_select": lambda: BoxSelectTool(dimensions='width'),
"ybox_select": lambda: BoxSelectTool(dimensions='height'),
"poly_select": lambda: PolySelectTool(),
"lasso_select": lambda: LassoSelectTool(),
"box_zoom": lambda: BoxZoomTool(dimensions='both'),
"xbox_zoom": lambda: BoxZoomTool(dimensions='width'),
"ybox_zoom": lambda: BoxZoomTool(dimensions='height'),
"hover": lambda: HoverTool(tooltips=[
("index", "$index"),
("data (x, y)", "($x, $y)"),
("canvas (x, y)", "($sx, $sy)"),
]),
"save": lambda: SaveTool(),
"previewsave": "save",
"undo": lambda: UndoTool(),
"redo": lambda: RedoTool(),
"reset": lambda: ResetTool(),
"help": lambda: HelpTool(),
}
def _tool_from_string(name):
""" Takes a string and returns a corresponding `Tool` instance. """
known_tools = sorted(_known_tools.keys())
if name in known_tools:
tool_fn = _known_tools[name]
if isinstance(tool_fn, string_types):
tool_fn = _known_tools[tool_fn]
return tool_fn()
else:
matches, text = difflib.get_close_matches(name.lower(), known_tools), "similar"
if not matches:
matches, text = known_tools, "possible"
raise ValueError("unexpected tool name '%s', %s tools are %s" % (name, text, nice_join(matches)))
def _process_axis_and_grid(plot, axis_type, axis_location, minor_ticks, axis_label, rng, dim):
axiscls = _get_axis_class(axis_type, rng)
if axiscls:
if axiscls is LogAxis:
if dim == 0:
plot.x_scale = LogScale()
elif dim == 1:
plot.y_scale = LogScale()
else:
raise ValueError("received invalid dimension value: %r" % dim)
# this is so we can get a ticker off the axis, even if we discard it
axis = axiscls(plot=plot if axis_location else None)
if isinstance(axis.ticker, ContinuousTicker):
axis.ticker.num_minor_ticks = _get_num_minor_ticks(axiscls, minor_ticks)
axis_label = axis_label
if axis_label:
axis.axis_label = axis_label
grid = Grid(plot=plot, dimension=dim, ticker=axis.ticker); grid
if axis_location is not None:
getattr(plot, axis_location).append(axis)
def _process_tools_arg(plot, tools):
""" Adds tools to the plot object
Args:
plot (Plot): instance of a plot object
tools (seq[Tool or str]|str): list of tool types or string listing the
tool names. Those are converted using the _tool_from_string
function. I.e.: `wheel_zoom,box_zoom,reset`.
Returns:
list of Tools objects added to plot, map of supplied string names to tools
"""
tool_objs = []
tool_map = {}
temp_tool_str = ""
repeated_tools = []
if isinstance(tools, (list, tuple)):
for tool in tools:
if isinstance(tool, Tool):
tool_objs.append(tool)
elif isinstance(tool, string_types):
temp_tool_str += tool + ','
else:
raise ValueError("tool should be a string or an instance of Tool class")
tools = temp_tool_str
for tool in re.split(r"\s*,\s*", tools.strip()):
# re.split will return empty strings; ignore them.
if tool == "":
continue
tool_obj = _tool_from_string(tool)
tool_objs.append(tool_obj)
tool_map[tool] = tool_obj
for typename, group in itertools.groupby(
sorted([tool.__class__.__name__ for tool in tool_objs])):
if len(list(group)) > 1:
repeated_tools.append(typename)
if repeated_tools:
warnings.warn("%s are being repeated" % ",".join(repeated_tools))
return tool_objs, tool_map
def _process_active_tools(toolbar, tool_map, active_drag, active_inspect, active_scroll, active_tap):
""" Adds tools to the plot object
Args:
toolbar (Toolbar): instance of a Toolbar object
tools_map (dict[str]|Tool): tool_map from _process_tools_arg
active_drag (str or Tool): the tool to set active for drag
active_inspect (str or Tool): the tool to set active for inspect
active_scroll (str or Tool): the tool to set active for scroll
active_tap (str or Tool): the tool to set active for tap
Returns:
None
Note:
This function sets properties on Toolbar
"""
if active_drag in ['auto', None] or isinstance(active_drag, Tool):
toolbar.active_drag = active_drag
elif active_drag in tool_map:
toolbar.active_drag = tool_map[active_drag]
else:
raise ValueError("Got unknown %r for 'active_drag', which was not a string supplied in 'tools' argument" % active_drag)
if active_inspect in ['auto', None] or isinstance(active_inspect, Tool) or all([isinstance(t, Tool) for t in active_inspect]):
toolbar.active_inspect = active_inspect
elif active_inspect in tool_map:
toolbar.active_inspect = tool_map[active_inspect]
else:
raise ValueError("Got unknown %r for 'active_inspect', which was not a string supplied in 'tools' argument" % active_scroll)
if active_scroll in ['auto', None] or isinstance(active_scroll, Tool):
toolbar.active_scroll = active_scroll
elif active_scroll in tool_map:
toolbar.active_scroll = tool_map[active_scroll]
else:
raise ValueError("Got unknown %r for 'active_scroll', which was not a string supplied in 'tools' argument" % active_scroll)
if active_tap in ['auto', None] or isinstance(active_tap, Tool):
toolbar.active_tap = active_tap
elif active_tap in tool_map:
toolbar.active_tap = tool_map[active_tap]
else:
raise ValueError("Got unknown %r for 'active_tap', which was not a string supplied in 'tools' argument" % active_tap)
def _get_argspecs(glyphclass):
argspecs = OrderedDict()
for arg in glyphclass._args:
spec = {}
descriptor = getattr(glyphclass, arg)
# running python with -OO will discard docstrings -> __doc__ is None
if descriptor.__doc__:
spec['desc'] = "\n ".join(textwrap.dedent(descriptor.__doc__).split("\n"))
else:
spec['desc'] = ""
spec['default'] = descriptor.class_default(glyphclass)
spec['type'] = descriptor.property._sphinx_type()
argspecs[arg] = spec
return argspecs
# This template generates the following:
#
# def foo(self, x, y=10, kwargs):
# kwargs['x'] = x
# kwargs['y'] = y
# return func(self, **kwargs)
_sigfunc_template = """
def %s(self, %s, **kwargs):
%s
return func(self, **kwargs)
"""
def _get_sigfunc(func_name, func, argspecs):
# This code is to wrap the generic func(*args, **kw) glyph method so that
# a much better signature is available to users. E.g., for ``square`` we have:
#
# Signature: p.square(x, y, size=4, angle=0.0, **kwargs)
#
# which provides descriptive names for positional args, as well as any defaults
func_args_with_defaults = []
for arg, spec in argspecs.items():
if spec['default'] is None:
func_args_with_defaults.append(arg)
else:
func_args_with_defaults.append("%s=%r" % (arg, spec['default']))
args_text = ", ".join(func_args_with_defaults)
kwargs_assign_text = "\n".join(" kwargs[%r] = %s" % (x, x) for x in argspecs)
func_text = _sigfunc_template % (func_name, args_text, kwargs_assign_text)
func_code = compile(func_text, "fakesource", "exec")
func_globals = {}
eval(func_code, {"func": func}, func_globals)
return func_globals[func_name]
_arg_template = """ %s (%s) : %s
(default: %r)
"""
_doc_template = """ Configure and add %s glyphs to this Figure.
Args:
%s
Keyword Args:
%s
Other Parameters:
alpha (float) : an alias to set all alpha keyword args at once
color (Color) : an alias to set all color keyword args at once
source (ColumnDataSource) : a user supplied data source
legend (str) : a legend tag for this glyph
x_range_name (str) : name an extra range to use for mapping x-coordinates
y_range_name (str) : name an extra range to use for mapping y-coordinates
level (Enum) : control the render level order for this glyph
It is also possible to set the color and alpha parameters of a "nonselection"
glyph. To do so, prefix any visual parameter with ``'nonselection_'``.
For example, pass ``nonselection_alpha`` or ``nonselection_fill_alpha``.
Returns:
GlyphRenderer
"""
def _add_sigfunc_info(func, argspecs, glyphclass, extra_docs):
func.__name__ = glyphclass.__name__.lower()
omissions = {'js_event_callbacks', 'js_property_callbacks', 'subscribed_events'}
kwlines = []
kws = glyphclass.properties() - set(argspecs)
for kw in kws:
# these are not really useful, and should also really be private, just skip them
if kw in omissions: continue
descriptor = getattr(glyphclass, kw)
typ = descriptor.property._sphinx_type()
if descriptor.__doc__:
desc = "\n ".join(textwrap.dedent(descriptor.__doc__).split("\n"))
else:
desc = ""
kwlines.append(_arg_template % (kw, typ, desc, descriptor.class_default(glyphclass)))
extra_kws = getattr(glyphclass, '_extra_kws', {})
for kw, (typ, desc) in extra_kws.items():
kwlines.append(" %s (%s) : %s" % (kw, typ, desc))
kwlines.sort()
arglines = []
for arg, spec in argspecs.items():
arglines.append(_arg_template % (arg, spec['type'], spec['desc'], spec['default']))
func.__doc__ = _doc_template % (func.__name__, "\n".join(arglines), "\n".join(kwlines))
if extra_docs:
func.__doc__ += extra_docs
def _glyph_function(glyphclass, extra_docs=None):
def func(self, **kwargs):
# Process legend kwargs and remove legend before we get going
legend_item_label = _get_legend_item_label(kwargs)
# Need to check if user source is present before _pop_renderer_args
is_user_source = kwargs.get('source', None) is not None
renderer_kws = _pop_renderer_args(kwargs)
source = renderer_kws['data_source']
if not isinstance(source, ColumnarDataSource):
try:
# try converting the soruce to ColumnDataSource
source = ColumnDataSource(source)
except ValueError as err:
msg = "Failed to auto-convert {curr_type} to ColumnDataSource.\n Original error: {err}".format(
curr_type=str(type(source)),
err=err.message
)
reraise(ValueError, ValueError(msg), sys.exc_info()[2])
# update reddered_kws so that others can use the new source
renderer_kws['data_source'] = source
# handle the main glyph, need to process literals
glyph_ca = _pop_colors_and_alpha(glyphclass, kwargs)
_process_sequence_literals(glyphclass, kwargs, source, is_user_source)
_process_sequence_literals(glyphclass, glyph_ca, source, is_user_source)
# handle the nonselection glyph, we always set one
nsglyph_ca = _pop_colors_and_alpha(glyphclass, kwargs, prefix='nonselection_', default_alpha=0.1)
# handle the selection glyph, if any properties were given
if any(x.startswith('selection_') for x in kwargs):
sglyph_ca = _pop_colors_and_alpha(glyphclass, kwargs, prefix='selection_')
else:
sglyph_ca = None
# handle the hover glyph, if any properties were given
if any(x.startswith('hover_') for x in kwargs):
hglyph_ca = _pop_colors_and_alpha(glyphclass, kwargs, prefix='hover_')
else:
hglyph_ca = None
# handle the mute glyph, if any properties were given
if any(x.startswith('muted_') for x in kwargs):
mglyph_ca = _pop_colors_and_alpha(glyphclass, kwargs, prefix='muted_')
else:
mglyph_ca = None
glyph = _make_glyph(glyphclass, kwargs, glyph_ca)
nsglyph = _make_glyph(glyphclass, kwargs, nsglyph_ca)
sglyph = _make_glyph(glyphclass, kwargs, sglyph_ca)
hglyph = _make_glyph(glyphclass, kwargs, hglyph_ca)
mglyph = _make_glyph(glyphclass, kwargs, mglyph_ca)
glyph_renderer = GlyphRenderer(glyph=glyph,
nonselection_glyph=nsglyph,
selection_glyph=sglyph,
hover_glyph=hglyph,
muted_glyph=mglyph,
**renderer_kws)
if legend_item_label:
_update_legend(self, legend_item_label, glyph_renderer)
for tool in self.select(type=BoxSelectTool):
tool.renderers.append(glyph_renderer)
self.renderers.append(glyph_renderer)
return glyph_renderer
argspecs = _get_argspecs(glyphclass)
sigfunc = _get_sigfunc(glyphclass.__name__.lower(), func, argspecs)
sigfunc.glyph_method = True
_add_sigfunc_info(sigfunc, argspecs, glyphclass, extra_docs)
return sigfunc
|
gpl-2.0
| -7,109,149,979,966,638,000
| 35.438438
| 145
| 0.618963
| false
| 3.794246
| false
| false
| false
|
asajeffrey/servo
|
tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/executors/executorwebdriver.py
|
4
|
24283
|
from __future__ import absolute_import
import json
import os
import socket
import threading
import time
import traceback
import uuid
from six.moves.urllib.parse import urljoin
from .base import (CallbackHandler,
CrashtestExecutor,
RefTestExecutor,
RefTestImplementation,
TestharnessExecutor,
TimedRunner,
strip_server)
from .protocol import (BaseProtocolPart,
TestharnessProtocolPart,
Protocol,
SelectorProtocolPart,
ClickProtocolPart,
SendKeysProtocolPart,
ActionSequenceProtocolPart,
TestDriverProtocolPart,
GenerateTestReportProtocolPart,
SetPermissionProtocolPart,
VirtualAuthenticatorProtocolPart)
from ..testrunner import Stop
import webdriver as client
from webdriver import error
here = os.path.dirname(__file__)
class WebDriverCallbackHandler(CallbackHandler):
unimplemented_exc = (NotImplementedError, client.UnknownCommandException)
class WebDriverBaseProtocolPart(BaseProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def execute_script(self, script, asynchronous=False):
method = self.webdriver.execute_async_script if asynchronous else self.webdriver.execute_script
return method(script)
def set_timeout(self, timeout):
try:
self.webdriver.timeouts.script = timeout
except client.WebDriverException:
# workaround https://bugs.chromium.org/p/chromedriver/issues/detail?id=2057
body = {"type": "script", "ms": timeout * 1000}
self.webdriver.send_session_command("POST", "timeouts", body)
@property
def current_window(self):
return self.webdriver.window_handle
def set_window(self, handle):
self.webdriver.window_handle = handle
def window_handles(self):
return self.webdriver.handles
def load(self, url):
self.webdriver.url = url
def wait(self):
while True:
try:
self.webdriver.execute_async_script("")
except (client.TimeoutException,
client.ScriptTimeoutException,
client.JavascriptErrorException):
# A JavascriptErrorException will happen when we navigate;
# by ignoring it it's possible to reload the test whilst the
# harness remains paused
pass
except (socket.timeout,
client.NoSuchWindowException,
client.UnknownErrorException,
IOError):
break
except Exception:
self.logger.error(traceback.format_exc())
break
class WebDriverTestharnessProtocolPart(TestharnessProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
self.runner_handle = None
with open(os.path.join(here, "runner.js")) as f:
self.runner_script = f.read()
with open(os.path.join(here, "window-loaded.js")) as f:
self.window_loaded_script = f.read()
def load_runner(self, url_protocol):
if self.runner_handle:
self.webdriver.window_handle = self.runner_handle
url = urljoin(self.parent.executor.server_url(url_protocol),
"/testharness_runner.html")
self.logger.debug("Loading %s" % url)
self.webdriver.url = url
self.runner_handle = self.webdriver.window_handle
format_map = {"title": threading.current_thread().name.replace("'", '"')}
self.parent.base.execute_script(self.runner_script % format_map)
def close_old_windows(self):
self.webdriver.actions.release()
handles = [item for item in self.webdriver.handles if item != self.runner_handle]
for handle in handles:
try:
self.webdriver.window_handle = handle
self.webdriver.window.close()
except client.NoSuchWindowException:
pass
self.webdriver.window_handle = self.runner_handle
return self.runner_handle
def get_test_window(self, window_id, parent, timeout=5):
"""Find the test window amongst all the open windows.
This is assumed to be either the named window or the one after the parent in the list of
window handles
:param window_id: The DOM name of the Window
:param parent: The handle of the runner window
:param timeout: The time in seconds to wait for the window to appear. This is because in
some implementations there's a race between calling window.open and the
window being added to the list of WebDriver accessible windows."""
test_window = None
end_time = time.time() + timeout
while time.time() < end_time:
try:
# Try using the JSON serialization of the WindowProxy object,
# it's in Level 1 but nothing supports it yet
win_s = self.webdriver.execute_script("return window['%s'];" % window_id)
win_obj = json.loads(win_s)
test_window = win_obj["window-fcc6-11e5-b4f8-330a88ab9d7f"]
except Exception:
pass
if test_window is None:
after = self.webdriver.handles
if len(after) == 2:
test_window = next(iter(set(after) - {parent}))
elif after[0] == parent and len(after) > 2:
# Hope the first one here is the test window
test_window = after[1]
if test_window is not None:
assert test_window != parent
return test_window
time.sleep(0.1)
raise Exception("unable to find test window")
def test_window_loaded(self):
"""Wait until the page in the new window has been loaded.
Hereby ignore Javascript execptions that are thrown when
the document has been unloaded due to a process change.
"""
while True:
try:
self.webdriver.execute_script(self.window_loaded_script, asynchronous=True)
break
except error.JavascriptErrorException:
pass
class WebDriverSelectorProtocolPart(SelectorProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def elements_by_selector(self, selector):
return self.webdriver.find.css(selector)
class WebDriverClickProtocolPart(ClickProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def element(self, element):
self.logger.info("click " + repr(element))
return element.click()
class WebDriverSendKeysProtocolPart(SendKeysProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def send_keys(self, element, keys):
try:
return element.send_keys(keys)
except client.UnknownErrorException as e:
# workaround https://bugs.chromium.org/p/chromedriver/issues/detail?id=1999
if (e.http_status != 500 or
e.status_code != "unknown error"):
raise
return element.send_element_command("POST", "value", {"value": list(keys)})
class WebDriverActionSequenceProtocolPart(ActionSequenceProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def send_actions(self, actions):
self.webdriver.actions.perform(actions['actions'])
class WebDriverTestDriverProtocolPart(TestDriverProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def send_message(self, cmd_id, message_type, status, message=None):
obj = {
"cmd_id": cmd_id,
"type": "testdriver-%s" % str(message_type),
"status": str(status)
}
if message:
obj["message"] = str(message)
self.webdriver.execute_script("window.postMessage(%s, '*')" % json.dumps(obj))
def _switch_to_frame(self, frame_number):
self.webdriver.switch_frame(frame_number)
def _switch_to_parent_frame(self):
self.webdriver.switch_frame("parent")
class WebDriverGenerateTestReportProtocolPart(GenerateTestReportProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def generate_test_report(self, message):
json_message = {"message": message}
self.webdriver.send_session_command("POST", "reporting/generate_test_report", json_message)
class WebDriverSetPermissionProtocolPart(SetPermissionProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def set_permission(self, descriptor, state, one_realm):
permission_params_dict = {
"descriptor": descriptor,
"state": state,
}
if one_realm is not None:
permission_params_dict["oneRealm"] = one_realm
self.webdriver.send_session_command("POST", "permissions", permission_params_dict)
class WebDriverVirtualAuthenticatorProtocolPart(VirtualAuthenticatorProtocolPart):
def setup(self):
self.webdriver = self.parent.webdriver
def add_virtual_authenticator(self, config):
return self.webdriver.send_session_command("POST", "webauthn/authenticator", config)
def remove_virtual_authenticator(self, authenticator_id):
return self.webdriver.send_session_command("DELETE", "webauthn/authenticator/%s" % authenticator_id)
def add_credential(self, authenticator_id, credential):
return self.webdriver.send_session_command("POST", "webauthn/authenticator/%s/credential" % authenticator_id, credential)
def get_credentials(self, authenticator_id):
return self.webdriver.send_session_command("GET", "webauthn/authenticator/%s/credentials" % authenticator_id)
def remove_credential(self, authenticator_id, credential_id):
return self.webdriver.send_session_command("DELETE", "webauthn/authenticator/%s/credentials/%s" % (authenticator_id, credential_id))
def remove_all_credentials(self, authenticator_id):
return self.webdriver.send_session_command("DELETE", "webauthn/authenticator/%s/credentials" % authenticator_id)
def set_user_verified(self, authenticator_id, uv):
return self.webdriver.send_session_command("POST", "webauthn/authenticator/%s/uv" % authenticator_id, uv)
class WebDriverProtocol(Protocol):
implements = [WebDriverBaseProtocolPart,
WebDriverTestharnessProtocolPart,
WebDriverSelectorProtocolPart,
WebDriverClickProtocolPart,
WebDriverSendKeysProtocolPart,
WebDriverActionSequenceProtocolPart,
WebDriverTestDriverProtocolPart,
WebDriverGenerateTestReportProtocolPart,
WebDriverSetPermissionProtocolPart,
WebDriverVirtualAuthenticatorProtocolPart]
def __init__(self, executor, browser, capabilities, **kwargs):
super(WebDriverProtocol, self).__init__(executor, browser)
self.capabilities = capabilities
self.url = browser.webdriver_url
self.webdriver = None
def connect(self):
"""Connect to browser via WebDriver."""
self.logger.debug("Connecting to WebDriver on URL: %s" % self.url)
host, port = self.url.split(":")[1].strip("/"), self.url.split(':')[-1].strip("/")
capabilities = {"alwaysMatch": self.capabilities}
self.webdriver = client.Session(host, port, capabilities=capabilities)
self.webdriver.start()
def teardown(self):
self.logger.debug("Hanging up on WebDriver session")
try:
self.webdriver.end()
except Exception as e:
message = str(getattr(e, "message", ""))
if message:
message += "\n"
message += traceback.format_exc()
self.logger.debug(message)
self.webdriver = None
def is_alive(self):
try:
# Get a simple property over the connection, with 2 seconds of timeout
# that should be more than enough to check if the WebDriver its
# still alive, and allows to complete the check within the testrunner
# 5 seconds of extra_timeout we have as maximum to end the test before
# the external timeout from testrunner triggers.
self.webdriver.send_session_command("GET", "window", timeout=2)
except (socket.timeout, client.UnknownErrorException, client.InvalidSessionIdException):
return False
return True
def after_connect(self):
self.testharness.load_runner(self.executor.last_environment["protocol"])
class WebDriverRun(TimedRunner):
def set_timeout(self):
try:
self.protocol.base.set_timeout(self.timeout + self.extra_timeout)
except client.UnknownErrorException:
self.logger.error("Lost WebDriver connection")
return Stop
def run_func(self):
try:
self.result = True, self.func(self.protocol, self.url, self.timeout)
except (client.TimeoutException, client.ScriptTimeoutException):
self.result = False, ("EXTERNAL-TIMEOUT", None)
except (socket.timeout, client.UnknownErrorException):
self.result = False, ("CRASH", None)
except Exception as e:
if (isinstance(e, client.WebDriverException) and
e.http_status == 408 and
e.status_code == "asynchronous script timeout"):
# workaround for https://bugs.chromium.org/p/chromedriver/issues/detail?id=2001
self.result = False, ("EXTERNAL-TIMEOUT", None)
else:
message = str(getattr(e, "message", ""))
if message:
message += "\n"
message += traceback.format_exc()
self.result = False, ("INTERNAL-ERROR", message)
finally:
self.result_flag.set()
class WebDriverTestharnessExecutor(TestharnessExecutor):
supports_testdriver = True
protocol_cls = WebDriverProtocol
def __init__(self, logger, browser, server_config, timeout_multiplier=1,
close_after_done=True, capabilities=None, debug_info=None,
supports_eager_pageload=True, cleanup_after_test=True,
**kwargs):
"""WebDriver-based executor for testharness.js tests"""
TestharnessExecutor.__init__(self, logger, browser, server_config,
timeout_multiplier=timeout_multiplier,
debug_info=debug_info)
self.protocol = self.protocol_cls(self, browser, capabilities)
with open(os.path.join(here, "testharness_webdriver_resume.js")) as f:
self.script_resume = f.read()
with open(os.path.join(here, "window-loaded.js")) as f:
self.window_loaded_script = f.read()
self.close_after_done = close_after_done
self.window_id = str(uuid.uuid4())
self.supports_eager_pageload = supports_eager_pageload
self.cleanup_after_test = cleanup_after_test
def is_alive(self):
return self.protocol.is_alive()
def on_environment_change(self, new_environment):
if new_environment["protocol"] != self.last_environment["protocol"]:
self.protocol.testharness.load_runner(new_environment["protocol"])
def do_test(self, test):
url = self.test_url(test)
success, data = WebDriverRun(self.logger,
self.do_testharness,
self.protocol,
url,
test.timeout * self.timeout_multiplier,
self.extra_timeout).run()
if success:
return self.convert_result(test, data)
return (test.result_cls(*data), [])
def do_testharness(self, protocol, url, timeout):
format_map = {"url": strip_server(url)}
# The previous test may not have closed its old windows (if something
# went wrong or if cleanup_after_test was False), so clean up here.
parent_window = protocol.testharness.close_old_windows()
# Now start the test harness
protocol.base.execute_script("window.open('about:blank', '%s', 'noopener')" % self.window_id)
test_window = protocol.testharness.get_test_window(self.window_id,
parent_window,
timeout=5*self.timeout_multiplier)
self.protocol.base.set_window(test_window)
# Wait until about:blank has been loaded
protocol.base.execute_script(self.window_loaded_script, asynchronous=True)
handler = WebDriverCallbackHandler(self.logger, protocol, test_window)
protocol.webdriver.url = url
if not self.supports_eager_pageload:
self.wait_for_load(protocol)
while True:
result = protocol.base.execute_script(
self.script_resume % format_map, asynchronous=True)
# As of 2019-03-29, WebDriver does not define expected behavior for
# cases where the browser crashes during script execution:
#
# https://github.com/w3c/webdriver/issues/1308
if not isinstance(result, list) or len(result) != 2:
try:
is_alive = self.is_alive()
except client.WebDriverException:
is_alive = False
if not is_alive:
raise Exception("Browser crashed during script execution.")
done, rv = handler(result)
if done:
break
# Attempt to cleanup any leftover windows, if allowed. This is
# preferable as it will blame the correct test if something goes wrong
# closing windows, but if the user wants to see the test results we
# have to leave the window(s) open.
if self.cleanup_after_test:
protocol.testharness.close_old_windows()
return rv
def wait_for_load(self, protocol):
# pageLoadStrategy=eager doesn't work in Chrome so try to emulate in user script
loaded = False
seen_error = False
while not loaded:
try:
loaded = protocol.base.execute_script("""
var callback = arguments[arguments.length - 1];
if (location.href === "about:blank") {
callback(false);
} else if (document.readyState !== "loading") {
callback(true);
} else {
document.addEventListener("readystatechange", () => {if (document.readyState !== "loading") {callback(true)}});
}""", asynchronous=True)
except client.JavascriptErrorException:
# We can get an error here if the script runs in the initial about:blank
# document before it has navigated, with the driver returning an error
# indicating that the document was unloaded
if seen_error:
raise
seen_error = True
class WebDriverRefTestExecutor(RefTestExecutor):
protocol_cls = WebDriverProtocol
def __init__(self, logger, browser, server_config, timeout_multiplier=1,
screenshot_cache=None, close_after_done=True,
debug_info=None, capabilities=None, **kwargs):
"""WebDriver-based executor for reftests"""
RefTestExecutor.__init__(self,
logger,
browser,
server_config,
screenshot_cache=screenshot_cache,
timeout_multiplier=timeout_multiplier,
debug_info=debug_info)
self.protocol = self.protocol_cls(self,
browser,
capabilities=capabilities)
self.implementation = RefTestImplementation(self)
self.close_after_done = close_after_done
self.has_window = False
with open(os.path.join(here, "test-wait.js")) as f:
self.wait_script = f.read() % {"classname": "reftest-wait"}
def reset(self):
self.implementation.reset()
def is_alive(self):
return self.protocol.is_alive()
def do_test(self, test):
width_offset, height_offset = self.protocol.webdriver.execute_script(
"""return [window.outerWidth - window.innerWidth,
window.outerHeight - window.innerHeight];"""
)
try:
self.protocol.webdriver.window.position = (0, 0)
except client.InvalidArgumentException:
# Safari 12 throws with 0 or 1, treating them as bools; fixed in STP
self.protocol.webdriver.window.position = (2, 2)
self.protocol.webdriver.window.size = (800 + width_offset, 600 + height_offset)
result = self.implementation.run_test(test)
return self.convert_result(test, result)
def screenshot(self, test, viewport_size, dpi, page_ranges):
# https://github.com/web-platform-tests/wpt/issues/7135
assert viewport_size is None
assert dpi is None
return WebDriverRun(self.logger,
self._screenshot,
self.protocol,
self.test_url(test),
test.timeout,
self.extra_timeout).run()
def _screenshot(self, protocol, url, timeout):
self.protocol.base.load(url)
self.protocol.base.execute_script(self.wait_script, True)
screenshot = self.protocol.webdriver.screenshot()
# strip off the data:img/png, part of the url
if screenshot.startswith("data:image/png;base64,"):
screenshot = screenshot.split(",", 1)[1]
return screenshot
class WebDriverCrashtestExecutor(CrashtestExecutor):
protocol_cls = WebDriverProtocol
def __init__(self, logger, browser, server_config, timeout_multiplier=1,
screenshot_cache=None, close_after_done=True,
debug_info=None, capabilities=None, **kwargs):
"""WebDriver-based executor for reftests"""
CrashtestExecutor.__init__(self,
logger,
browser,
server_config,
screenshot_cache=screenshot_cache,
timeout_multiplier=timeout_multiplier,
debug_info=debug_info)
self.protocol = self.protocol_cls(self,
browser,
capabilities=capabilities)
with open(os.path.join(here, "test-wait.js")) as f:
self.wait_script = f.read() % {"classname": "test-wait"}
def do_test(self, test):
timeout = (test.timeout * self.timeout_multiplier if self.debug_info is None
else None)
success, data = WebDriverRun(self.logger,
self.do_crashtest,
self.protocol,
self.test_url(test),
timeout,
self.extra_timeout).run()
if success:
return self.convert_result(test, data)
return (test.result_cls(*data), [])
def do_crashtest(self, protocol, url, timeout):
protocol.base.load(url)
protocol.base.execute_script(self.wait_script, asynchronous=True)
return {"status": "PASS",
"message": None}
|
mpl-2.0
| 4,533,225,023,122,860,000
| 38.873563
| 140
| 0.59692
| false
| 4.585159
| true
| false
| false
|
google/makani
|
config/m600/control/hover_controllers.py
|
1
|
15735
|
# Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Automatically generated hover controllers.
This file was generated by:
analysis/control/generate_hover_controllers.m
"""
from makani.control import control_types as m
def GetHoverControllers(wing_serial):
"""Returns the hover controller gains."""
if wing_serial == m.kWingSerial01:
low_altitude = {
'kp': 1.79e+03,
'ki': 127.,
'kd': 6.33e+03
}
high_altitude = {
'kp': 687.,
'ki': 40.6,
'kd': 2.91e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.52e+04,
'ki': 1.07e+03,
'kd': 2.06e+04
}
pitch = {
'kp': 5.23e+04,
'ki': 3.31e+03,
'kd': 3.09e+04
}
yaw = {
'kp': 3.42e+05,
'ki': 2.70e+04,
'kd': 1.73e+05
}
tangential_short_tether = {
'kp': 0.0115,
'ki': 0.000166,
'kd': 0.0870
}
tangential_low_altitude_long_tether = {
'kp': 0.0469,
'ki': 0.00169,
'kd': 0.193
}
tangential_high_altitude_long_tether = {
'kp': 0.00713,
'ki': 4.56e-05,
'kd': 0.0331
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0581
}
tension_hard = {
'kp': 0.00,
'ki': 1.08e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.08e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.06e+04,
'ki': 536.,
'kd': 0.00
}
int_yaw = {
'kp': 4.65e+04,
'ki': 9.25e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial04Hover:
low_altitude = {
'kp': 1.90e+03,
'ki': 134.,
'kd': 6.68e+03
}
high_altitude = {
'kp': 729.,
'ki': 43.1,
'kd': 3.08e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.74e+04,
'ki': 1.16e+03,
'kd': 2.23e+04
}
pitch = {
'kp': 5.71e+04,
'ki': 3.62e+03,
'kd': 3.38e+04
}
yaw = {
'kp': 3.33e+05,
'ki': 2.63e+04,
'kd': 1.69e+05
}
tangential_short_tether = {
'kp': 0.0115,
'ki': 0.000166,
'kd': 0.0870
}
tangential_low_altitude_long_tether = {
'kp': 0.0469,
'ki': 0.00169,
'kd': 0.193
}
tangential_high_altitude_long_tether = {
'kp': 0.00713,
'ki': 4.56e-05,
'kd': 0.0331
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0574
}
tension_hard = {
'kp': 0.00,
'ki': 1.04e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.04e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.46e+04,
'ki': 588.,
'kd': 0.00
}
int_yaw = {
'kp': 4.52e+04,
'ki': 9.01e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial04Crosswind:
low_altitude = {
'kp': 1.81e+03,
'ki': 128.,
'kd': 6.39e+03
}
high_altitude = {
'kp': 694.,
'ki': 41.0,
'kd': 2.94e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.84e+04,
'ki': 1.21e+03,
'kd': 2.32e+04
}
pitch = {
'kp': 5.91e+04,
'ki': 3.75e+03,
'kd': 3.50e+04
}
yaw = {
'kp': 3.45e+05,
'ki': 2.72e+04,
'kd': 1.75e+05
}
tangential_short_tether = {
'kp': 0.00937,
'ki': 0.000135,
'kd': 0.0710
}
tangential_low_altitude_long_tether = {
'kp': 0.0382,
'ki': 0.00138,
'kd': 0.157
}
tangential_high_altitude_long_tether = {
'kp': 0.00582,
'ki': 3.72e-05,
'kd': 0.0270
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0498
}
tension_hard = {
'kp': 0.00,
'ki': 1.08e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.08e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.59e+04,
'ki': 606.,
'kd': 0.00
}
int_yaw = {
'kp': 4.68e+04,
'ki': 9.32e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial05Hover:
low_altitude = {
'kp': 1.86e+03,
'ki': 132.,
'kd': 6.55e+03
}
high_altitude = {
'kp': 713.,
'ki': 42.2,
'kd': 3.02e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.69e+04,
'ki': 1.14e+03,
'kd': 2.19e+04
}
pitch = {
'kp': 5.60e+04,
'ki': 3.55e+03,
'kd': 3.31e+04
}
yaw = {
'kp': 3.27e+05,
'ki': 2.58e+04,
'kd': 1.65e+05
}
tangential_short_tether = {
'kp': 0.0115,
'ki': 0.000166,
'kd': 0.0870
}
tangential_low_altitude_long_tether = {
'kp': 0.0469,
'ki': 0.00169,
'kd': 0.193
}
tangential_high_altitude_long_tether = {
'kp': 0.00713,
'ki': 4.56e-05,
'kd': 0.0331
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0577
}
tension_hard = {
'kp': 0.00,
'ki': 1.06e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.06e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.38e+04,
'ki': 577.,
'kd': 0.00
}
int_yaw = {
'kp': 4.44e+04,
'ki': 8.83e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial05Crosswind:
low_altitude = {
'kp': 1.77e+03,
'ki': 126.,
'kd': 6.24e+03
}
high_altitude = {
'kp': 677.,
'ki': 40.0,
'kd': 2.86e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.78e+04,
'ki': 1.18e+03,
'kd': 2.27e+04
}
pitch = {
'kp': 5.78e+04,
'ki': 3.67e+03,
'kd': 3.42e+04
}
yaw = {
'kp': 3.37e+05,
'ki': 2.66e+04,
'kd': 1.71e+05
}
tangential_short_tether = {
'kp': 0.00933,
'ki': 0.000135,
'kd': 0.0707
}
tangential_low_altitude_long_tether = {
'kp': 0.0381,
'ki': 0.00137,
'kd': 0.157
}
tangential_high_altitude_long_tether = {
'kp': 0.00579,
'ki': 3.71e-05,
'kd': 0.0269
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0500
}
tension_hard = {
'kp': 0.00,
'ki': 1.10e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.10e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.49e+04,
'ki': 593.,
'kd': 0.00
}
int_yaw = {
'kp': 4.58e+04,
'ki': 9.11e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial06Hover:
low_altitude = {
'kp': 1.90e+03,
'ki': 135.,
'kd': 6.70e+03
}
high_altitude = {
'kp': 730.,
'ki': 43.2,
'kd': 3.09e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.74e+04,
'ki': 1.16e+03,
'kd': 2.24e+04
}
pitch = {
'kp': 5.71e+04,
'ki': 3.62e+03,
'kd': 3.38e+04
}
yaw = {
'kp': 3.34e+05,
'ki': 2.64e+04,
'kd': 1.69e+05
}
tangential_short_tether = {
'kp': 0.0115,
'ki': 0.000166,
'kd': 0.0870
}
tangential_low_altitude_long_tether = {
'kp': 0.0469,
'ki': 0.00169,
'kd': 0.193
}
tangential_high_altitude_long_tether = {
'kp': 0.00713,
'ki': 4.56e-05,
'kd': 0.0331
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0574
}
tension_hard = {
'kp': 0.00,
'ki': 1.04e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.04e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.47e+04,
'ki': 590.,
'kd': 0.00
}
int_yaw = {
'kp': 4.53e+04,
'ki': 9.02e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial06Crosswind:
low_altitude = {
'kp': 1.81e+03,
'ki': 128.,
'kd': 6.39e+03
}
high_altitude = {
'kp': 694.,
'ki': 41.0,
'kd': 2.94e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.84e+04,
'ki': 1.21e+03,
'kd': 2.32e+04
}
pitch = {
'kp': 5.91e+04,
'ki': 3.75e+03,
'kd': 3.50e+04
}
yaw = {
'kp': 3.45e+05,
'ki': 2.72e+04,
'kd': 1.75e+05
}
tangential_short_tether = {
'kp': 0.00937,
'ki': 0.000135,
'kd': 0.0709
}
tangential_low_altitude_long_tether = {
'kp': 0.0382,
'ki': 0.00138,
'kd': 0.157
}
tangential_high_altitude_long_tether = {
'kp': 0.00582,
'ki': 3.72e-05,
'kd': 0.0270
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0498
}
tension_hard = {
'kp': 0.00,
'ki': 1.08e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.08e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.59e+04,
'ki': 606.,
'kd': 0.00
}
int_yaw = {
'kp': 4.68e+04,
'ki': 9.32e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial07Hover:
low_altitude = {
'kp': 1.90e+03,
'ki': 134.,
'kd': 6.68e+03
}
high_altitude = {
'kp': 729.,
'ki': 43.1,
'kd': 3.08e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.74e+04,
'ki': 1.16e+03,
'kd': 2.23e+04
}
pitch = {
'kp': 5.71e+04,
'ki': 3.62e+03,
'kd': 3.38e+04
}
yaw = {
'kp': 3.33e+05,
'ki': 2.63e+04,
'kd': 1.69e+05
}
tangential_short_tether = {
'kp': 0.0115,
'ki': 0.000166,
'kd': 0.0870
}
tangential_low_altitude_long_tether = {
'kp': 0.0469,
'ki': 0.00169,
'kd': 0.193
}
tangential_high_altitude_long_tether = {
'kp': 0.00713,
'ki': 4.56e-05,
'kd': 0.0331
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0574
}
tension_hard = {
'kp': 0.00,
'ki': 1.04e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.04e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.46e+04,
'ki': 588.,
'kd': 0.00
}
int_yaw = {
'kp': 4.52e+04,
'ki': 9.01e+03,
'kd': 0.00
}
elif wing_serial == m.kWingSerial07Crosswind:
low_altitude = {
'kp': 1.81e+03,
'ki': 128.,
'kd': 6.39e+03
}
high_altitude = {
'kp': 694.,
'ki': 41.0,
'kd': 2.94e+03
}
transform_tether_elevation = {
'kp': 0.00,
'ki': -16.0,
'kd': 0.00
}
reel_tether_elevation = {
'kp': 0.00,
'ki': 0.0375,
'kd': 0.00
}
roll = {
'kp': 0.00,
'ki': 0.00,
'kd': 0.00
}
low_thrust_pitch = {
'kp': 2.84e+04,
'ki': 1.21e+03,
'kd': 2.32e+04
}
pitch = {
'kp': 5.91e+04,
'ki': 3.75e+03,
'kd': 3.50e+04
}
yaw = {
'kp': 3.45e+05,
'ki': 2.72e+04,
'kd': 1.75e+05
}
tangential_short_tether = {
'kp': 0.00937,
'ki': 0.000135,
'kd': 0.0710
}
tangential_low_altitude_long_tether = {
'kp': 0.0382,
'ki': 0.00138,
'kd': 0.157
}
tangential_high_altitude_long_tether = {
'kp': 0.00582,
'ki': 3.72e-05,
'kd': 0.0270
}
radial = {
'kp': 0.00,
'ki': 0.00,
'kd': -0.0498
}
tension_hard = {
'kp': 0.00,
'ki': 1.08e-05,
'kd': 0.00
}
tension_soft = {
'kp': 0.00,
'ki': 1.08e-06,
'kd': 0.00
}
int_pitch = {
'kp': 4.59e+04,
'ki': 606.,
'kd': 0.00
}
int_yaw = {
'kp': 4.68e+04,
'ki': 9.32e+03,
'kd': 0.00
}
else:
assert False, 'wing_serial %d was not recognized' % wing_serial
return {
'low_altitude': low_altitude,
'high_altitude': high_altitude,
'transform_tether_elevation': transform_tether_elevation,
'reel_tether_elevation': reel_tether_elevation,
'roll': roll,
'low_thrust_pitch': low_thrust_pitch,
'pitch': pitch,
'yaw': yaw,
'tangential_short_tether': tangential_short_tether,
'tangential_low_altitude_long_tether': (
tangential_low_altitude_long_tether),
'tangential_high_altitude_long_tether': (
tangential_high_altitude_long_tether),
'radial': radial,
'tension_hard': tension_hard,
'tension_soft': tension_soft,
'int_pitch': int_pitch,
'int_yaw': int_yaw,
}
|
apache-2.0
| -702,106,154,424,924,200
| 19.250965
| 74
| 0.398411
| false
| 2.654352
| false
| false
| false
|
codeforamerica/typeseam
|
typeseam/form_filler/front.py
|
1
|
3914
|
from datetime import datetime, timezone
from pytz import timezone as ptimezone
import re
import json
import requests
class Front:
def __init__(self, token):
self.headers = {
'Authorization': 'Bearer {}'.format(token),
'Accept': 'application/json'
}
self.event_types = 'q[types][]=inbound&q[types][]=outbound'
self.root_url = 'https://api2.frontapp.com/events?'
self.payload = []
def get_events(self, after=None):
self.payload = []
request_url = self.root_url + self.event_types
if after:
request_url += '&q[after]={}'.format(after)
self.pull_payload(request_url)
return self.parse_events()
def pull_payload(self, url):
next_page = url
while next_page:
response = requests.get(
next_page, headers=self.headers)
data = response.json()
self.payload.extend(data['_results'])
next_page = data["_pagination"]["next"]
def parse_events(self):
events = []
for event in self.payload:
data = event["conversation"]
message = data["last_message"]
if message["type"] == "email":
message["subject"] = data["subject"]
if is_referral(message):
events.append(get_referral_info(message))
elif is_submission(message):
events.append(get_submission_info(message))
elif is_opening(message):
events.append(get_opening_info(message))
return events
def get_opening_info(msg):
return {
"type": "opened",
"time": get_datetime(msg),
"by": get_opener(msg),
"key": is_opening(msg)
}
def is_from_cmr(msg):
for entity in msg["recipients"]:
if entity["handle"] == "clearmyrecord@codeforamerica.org":
return entity["role"] == "from"
return False
def is_to_louise(msg):
for entity in msg["recipients"]:
if entity["handle"] == "louise.winterstein@sfgov.org":
return entity["role"] == "to"
return False
def is_from_server(msg):
for entity in msg["recipients"]:
if entity["handle"] == "no-reply@codeforamerica.org":
return entity["role"] == "from"
return False
def get_referral_author(msg):
return msg["author"]["username"]
def get_datetime(msg):
return msg["created_at"]
def get_referral_key(msg):
pattern = re.compile(
"\.org/sanfrancisco/(?P<key>[0-9a-f]+)/"
)
results = pattern.findall(msg["text"])
if results and len(results) == 1:
return results[0]
else:
raise Exception(
"Couldn't find a uuid in {}".format(
json.dumps(msg, indent=2)
))
def utc_to_cali(timestamp, fmt="%c"):
PDT = ptimezone('US/Pacific')
dt = datetime.fromtimestamp(timestamp, timezone.utc)
return dt.astimezone(PDT).strftime(fmt)
def is_referral(msg):
return is_from_cmr(msg) and is_to_louise(msg)
def get_referral_info(msg):
return {
"type": "referred",
"by": get_referral_author(msg),
"time": get_datetime(msg),
"key": get_referral_key(msg)
}
def is_submission(msg):
srch = "New application to http://clearmyrecord.codeforamerica.org/"
return srch in msg["subject"]
def get_submission_info(msg):
return {
"type": "received",
"time": get_datetime(msg),
"key": get_referral_key(msg)
}
def get_opener(msg):
srch = "viewed by "
idx = msg["subject"].rfind(srch)
email = msg["subject"][idx + len(srch):]
return email
def is_opening(msg):
pattern = re.compile("Application (?P<key>[0-9a-f]+) viewed by")
results = pattern.findall(msg["subject"])
if results:
return results[0]
return False
|
bsd-3-clause
| 1,931,358,864,134,680,600
| 26.180556
| 72
| 0.569239
| false
| 3.620722
| false
| false
| false
|
pgodel/rdiff-backup
|
rdiff_backup/Globals.py
|
1
|
11031
|
# Copyright 2002 Ben Escoto
#
# This file is part of rdiff-backup.
#
# rdiff-backup is free software; you can redistribute it and/or modify
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# rdiff-backup is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with rdiff-backup; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
"""Hold a variety of constants usually set at initialization."""
import re, os
# The current version of rdiff-backup
version = "1.3.3"
# If this is set, use this value in seconds as the current time
# instead of reading it from the clock.
current_time = None
# This determines how many bytes to read at a time when copying
blocksize = 131072
# This is used by the BufferedRead class to determine how many
# bytes to request from the underlying file per read(). Larger
# values may save on connection overhead and latency.
conn_bufsize = 393216
# This is used in the CacheCollatedPostProcess and MiscIterToFile
# classes. The number represents the number of rpaths which may be
# stuck in buffers when moving over a remote connection.
pipeline_max_length = 500
# True if script is running as a server
server = None
# uid and gid of the owner of the rdiff-backup process. This can
# vary depending on the connection.
try:
process_uid = os.getuid()
process_gid = os.getgid()
process_groups = [process_gid] + os.getgroups()
except AttributeError:
process_uid = 0
process_gid = 0
process_groups = [0]
# If true, when copying attributes, also change target's uid/gid
change_ownership = None
# If true, change the permissions of unwriteable mirror files
# (such as directories) so that they can be written, and then
# change them back. This defaults to 1 just in case the process
# is not running as root (root doesn't need to change
# permissions).
change_mirror_perms = (process_uid != 0)
# If true, try to reset the atimes of the source partition.
preserve_atime = None
# The following three attributes represent whether extended attributes
# are supported. If eas_active is true, then the current session
# supports them. If eas_write is true, then the extended attributes
# should also be written to the destination side. Finally, eas_conn
# is relative to the current connection, and should be true iff that
# particular connection supports extended attributes.
eas_active = None
eas_write = None
eas_conn = None
# The following settings are like the extended attribute settings, but
# apply to access control lists instead.
acls_active = None
acls_write = None
acls_conn = None
# Like the above, but applies to support of Windows
# access control lists.
win_acls_active = None
win_acls_write = None
win_acls_conn = None
# Like above two setting groups, but applies to support of Mac OS X
# style resource forks.
resource_forks_active = None
resource_forks_write = None
resource_forks_conn = None
# Like the above, but applies to MacOS Carbon Finder creator/type info.
# As of 1.0.2 this has defaulted to off because of bugs
carbonfile_active = None
carbonfile_write = None
carbonfile_conn = None
# This will be set as soon as the LocalConnection class loads
local_connection = None
# All connections should be added to the following list, so
# further global changes can be propagated to the remote systems.
# The first element should be Globals.local_connection. For a
# server, the second is the connection to the client.
connections = []
# Each process should have a connection number unique to the
# session. The client has connection number 0.
connection_number = 0
# Dictionary pairing connection numbers with connections. Set in
# SetConnections for all connections.
connection_dict = {}
# True if the script is the end that reads the source directory
# for backups. It is true for purely local sessions.
isbackup_reader = None
# Connection of the real backup reader (for which isbackup_reader
# is true)
backup_reader = None
# True if the script is the end that writes to the increment and
# mirror directories. True for purely local sessions.
isbackup_writer = None
# Connection of the backup writer
backup_writer = None
# Connection of the client
client_conn = None
# When backing up, issource should be true on the reader and isdest on
# the writer. When restoring, issource should be true on the mirror
# and isdest should be true on the target.
issource = None
isdest = None
# This list is used by the set function below. When a new
# connection is created with init_connection, its Globals class
# will match this one for all the variables mentioned in this
# list.
changed_settings = []
# The RPath or QuotedRPath of the rdiff-backup-data directory.
rbdir = None
# chars_to_quote is a string whose characters should be quoted. It
# should be true if certain characters in filenames on the source side
# should be escaped (see FilenameMapping for more info).
chars_to_quote = None
quoting_char = ';'
# If true, the timestamps use the following format: "2008-09-01T04-49-04-07-00"
# (instead of "2008-09-01T04:49:04-07:00"). This creates timestamps which
# don't need to be escaped on Windows.
use_compatible_timestamps = 0
# If true, emit output intended to be easily readable by a
# computer. False means output is intended for humans.
parsable_output = None
# If true, then hardlinks will be preserved to mirror and recorded
# in the increments directory. There is also a difference here
# between None and 0. When restoring, None or 1 means to preserve
# hardlinks iff can find a hardlink dictionary. 0 means ignore
# hardlink information regardless.
preserve_hardlinks = 1
# If this is false, then rdiff-backup will not compress any
# increments. Default is to compress based on regexp below.
compression = 1
# Increments based on files whose names match this
# case-insensitive regular expression won't be compressed (applies
# to .snapshots and .diffs). The second below will be the
# compiled version of the first.
no_compression_regexp_string = ("(?i).*\\.(gz|z|bz|bz2|tgz|zip|rpm|deb|"
"jpg|jpeg|gif|png|jp2|mp3|ogg|avi|wmv|mpeg|mpg|rm|mov|flac|shn|pgp|"
"gpg|rz|lzh|zoo|lharc|rar|arj|asc)$")
no_compression_regexp = None
# If true, filelists and directory statistics will be split on
# nulls instead of newlines.
null_separator = None
# Determines whether or not ssh will be run with the -C switch
ssh_compression = 1
# If true, print statistics after successful backup
print_statistics = None
# Controls whether file_statistics file is written in
# rdiff-backup-data dir. These can sometimes take up a lot of space.
file_statistics = 1
# On the writer connection, the following will be set to the mirror
# Select iterator.
select_mirror = None
# On the backup writer connection, holds the root incrementing branch
# object. Access is provided to increment error counts.
ITRB = None
# security_level has 4 values and controls which requests from remote
# systems will be honored. "all" means anything goes. "read-only"
# means that the requests must not write to disk. "update-only" means
# that requests shouldn't destructively update the disk (but normal
# incremental updates are OK). "minimal" means only listen to a few
# basic requests.
security_level = "all"
# If this is set, it indicates that the remote connection should only
# deal with paths inside of restrict_path.
restrict_path = None
# If set, a file will be marked as changed if its inode changes. See
# the man page under --no-compare-inode for more information.
compare_inode = 1
# If set, directories can be fsync'd just like normal files, to
# guarantee that any changes have been committed to disk.
fsync_directories = None
# If set, exit with error instead of dropping ACLs or ACL entries.
never_drop_acls = None
# Apply this mask to permissions before chmoding. (Set to 0777 to
# prevent highbit permissions on systems which don't support them.)
permission_mask = 07777
# If true, symlinks permissions are affected by the process umask, and
# we should change the umask when creating them in order to preserve
# the original permissions
symlink_perms = None
# If set, the path that should be used instead of the default Python
# tempfile.tempdir value on remote connections
remote_tempdir = None
def get(name):
"""Return the value of something in this module"""
return globals()[name]
def is_not_None(name):
"""Returns true if value is not None"""
return globals()[name] is not None
def set(name, val):
"""Set the value of something in this module
Use this instead of writing the values directly if the setting
matters to remote sides. This function updates the
changed_settings list, so other connections know to copy the
changes.
"""
changed_settings.append(name)
globals()[name] = val
def set_local(name, val):
"""Like set above, but only set current connection"""
globals()[name] = val
def set_integer(name, val):
"""Like set, but make sure val is an integer"""
try: intval = int(val)
except ValueError:
Log.FatalError("Variable %s must be set to an integer -\n"
"received %s instead." % (name, val))
set(name, intval)
def set_float(name, val, min = None, max = None, inclusive = 1):
"""Like set, but make sure val is float within given bounds"""
def error():
s = "Variable %s must be set to a float" % (name,)
if min is not None and max is not None:
s += " between %s and %s " % (min, max)
if inclusive: s += "inclusive"
else: s += "not inclusive"
elif min is not None or max is not None:
if inclusive: inclusive_string = "or equal to "
else: inclusive_string = ""
if min is not None:
s += " greater than %s%s" % (inclusive_string, min)
else: s+= " less than %s%s" % (inclusive_string, max)
Log.FatalError(s)
try: f = float(val)
except ValueError: error()
if min is not None:
if inclusive and f < min: error()
elif not inclusive and f <= min: error()
if max is not None:
if inclusive and f > max: error()
elif not inclusive and f >= max: error()
set(name, f)
def get_dict_val(name, key):
"""Return val from dictionary in this class"""
return globals()[name][key]
def set_dict_val(name, key, val):
"""Set value for dictionary in this class"""
globals()[name][key] = val
def postset_regexp(name, re_string, flags = None):
"""Compile re_string on all existing connections, set to name"""
for conn in connections:
conn.Globals.postset_regexp_local(name, re_string, flags)
def postset_regexp_local(name, re_string, flags):
"""Set name to compiled re_string locally"""
if flags: globals()[name] = re.compile(re_string, flags)
else: globals()[name] = re.compile(re_string)
|
gpl-2.0
| -367,777,364,650,201,340
| 32.941538
| 79
| 0.741728
| false
| 3.60962
| false
| false
| false
|
jtoppins/beaker
|
Client/src/bkr/client/commands/cmd_user_modify.py
|
1
|
2506
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"""
bkr user-modify: Modify Beaker users
====================================
.. program:: bkr user-modify
Synopsis
--------
| :program:`bkr user-modify` [*options*] [:option:`--add-submission-delegate` <user>]
| [:option:`--remove-submission-delegate` <user>]
Description
-----------
Modify a Beaker user.
Allows the adding or removing of submission delegates of the currently
logged in user.
.. _user-modify-options:
Options
-------
.. option:: --add-submission-delegate=<user>
Adds a new submission delegate
.. option:: --remove-submission-delegate=<user>
Removes an existing submission delegate
Common :program:`bkr` options are described in the :ref:`Options
<common-options>` section of :manpage:`bkr(1)`.
Exit status
-----------
Non-zero on error, otherwise zero.
Examples
--------
Add a new submission delegate:
bkr user-modify --add-submission-delegate=mydelegate
Remove an existing delegate:
bkr user-modify --remove-submission-delegate=mydelegate
See also
--------
:manpage:`bkr(1)`
"""
from bkr.client import BeakerCommand
from xmlrpclib import Fault
from sys import exit
class User_Modify(BeakerCommand):
"""Modify certain user properties"""
enabled=True
def options(self):
self.parser.usage = "%%prog %s [options]" % self.normalized_name
self.parser.add_option(
"-a",
"--add-submission-delegate",
help="Add a new submission delegate"
)
self.parser.add_option(
"-r",
"--remove-submission-delegate",
help="Remove an existing submission delegate"
)
def run(self, *args, **kwargs):
delegate_to_add = kwargs.get('add_submission_delegate', None)
delegate_to_remove = kwargs.get('remove_submission_delegate', None)
self.set_hub(**kwargs)
if delegate_to_remove:
self.hub.prefs. \
remove_submission_delegate_by_name(delegate_to_remove)
print 'Removed submission delegate %s' % delegate_to_remove
if delegate_to_add:
self.hub.prefs. \
add_submission_delegate_by_name(delegate_to_add)
print 'Added submission delegate %s' % delegate_to_add
exit(0)
|
gpl-2.0
| 2,265,340,603,310,640,000
| 24.06
| 85
| 0.638867
| false
| 3.921753
| false
| false
| false
|
lanhel/viperaccept
|
setup.py
|
1
|
1637
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
#----------------------------------------------------------------------------
"""HTTP content negotiation application."""
__author__ = ('Lance Finn Helsten',)
__version__ = '0.0'
__copyright__ = """Copyright (C) 2014 Lance Finn Helsten"""
__license__ = """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
import setuptools
setuptools.setup(
name = "viperaccept",
version = __version__,
author = 'Lance Finn Helsten',
author_email = 'lanhel@flyingtitans.com',
description = __doc__,
long_description = open('README.rst').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
],
packages = [
'viperaccept'
],
# scripts = [
# ],
)
|
apache-2.0
| -5,658,288,544,415,330,000
| 31.098039
| 77
| 0.609041
| false
| 4.307895
| false
| false
| false
|
FlorisHoogenboom/sklearn-helpers
|
tests/test_preprocessing.py
|
1
|
3095
|
import unittest
import numpy as np
import pandas as pd
from sklearn_helpers.preprocessing import \
EnhancedLabelEncoder, MultiColumnLabelEncoder
class EnhancedLabelEncoderTest(unittest.TestCase):
def test_accepts_only_1d(self):
"""It should only accept only a 1d array"""
ehe = EnhancedLabelEncoder()
train = np.array([
[1,2],
[2,1]
])
self.assertRaises(ValueError, lambda: ehe.fit(train))
# If it is flattened, it should not raise.
train = train.flatten()
ehe.fit(train)
def test_handle_unknown_error(self):
"""If handle_unkown is 'error' it should throw on unseen labels"""
ehe = EnhancedLabelEncoder(handle_unknown='error')
train = np.array(['a', 'b', 'a'])
test = np.array(['a','c'])
ehe.fit(train)
# Check that a ValueError is raised on transform
self.assertRaises(ValueError, lambda: ehe.transform(test))
def test_handle_unknown_ignore(self):
"""If handle_unknown is 'ignore' it should map unseen labels to a new value"""
ehe = EnhancedLabelEncoder(handle_unknown='ignore')
train = np.array(['a', 'b', 'a'])
test = np.array(['a','c'])
ehe.fit(train)
# Check that the new label is mapped to the next value
self.assertTrue(
(np.array([0,2]) == ehe.transform(test)).all()
)
class MultiColumnLabelEncoderTest(unittest.TestCase):
def test_handle_ignore(self):
"""If handle_unknown is 'ignore' it should map unseen labels to a new value"""
mce = MultiColumnLabelEncoder(handle_unknown='ignore')
train = np.array([
['a', 'b'],
['c', 'a']
])
test = np.array([
['a', 'd'],
['c', 'd']
])
mce.fit(train)
test_transformed = np.array([
[0.,2.],
[1.,2.]
])
self.assertTrue(
(mce.transform(test) == test_transformed).all()
)
def test_accepts_pandas(self):
"""It shouold accept a Pandas dataframe"""
mce = MultiColumnLabelEncoder(handle_unknown='ignore')
train = pd.DataFrame(
np.array([
['a', 'b'],
['c', 'a']
]),
columns=['col1', 'col2']
)
# This should not throw
mce.fit_transform(train, np.array([1,2]))
def test_classes(self):
"""It should return classes for each column"""
def test_accepts_pandas(self):
"""It shouold accept a Pandas dataframe"""
mce = MultiColumnLabelEncoder(
handle_unknown='ignore'
)
train = pd.DataFrame(
np.array([
['a', 'b'],
['c', 'a']
]),
columns=['col1', 'col2']
)
mce.fit(train, np.array([1,2]))
self.assertEqual(
mce.classes_[0][0],
'a'
)
self.assertEqual(
mce.classes_[1][1],
'b'
)
|
mit
| 4,735,568,165,528,348,000
| 26.149123
| 86
| 0.519871
| false
| 3.932656
| true
| false
| false
|
jkibele/benthic_photo_survey
|
bps_package/ui_pref_help.py
|
1
|
1950
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'pref_help.ui'
#
# Created: Sun Mar 8 18:17:55 2015
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_PrefHelpDialog(object):
def setupUi(self, PrefHelpDialog):
PrefHelpDialog.setObjectName(_fromUtf8("PrefHelpDialog"))
PrefHelpDialog.resize(447, 326)
self.verticalLayout = QtGui.QVBoxLayout(PrefHelpDialog)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.textBrowser = QtGui.QTextBrowser(PrefHelpDialog)
self.textBrowser.setObjectName(_fromUtf8("textBrowser"))
self.verticalLayout.addWidget(self.textBrowser)
self.buttonBox = QtGui.QDialogButtonBox(PrefHelpDialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(PrefHelpDialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), PrefHelpDialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), PrefHelpDialog.reject)
QtCore.QMetaObject.connectSlotsByName(PrefHelpDialog)
def retranslateUi(self, PrefHelpDialog):
PrefHelpDialog.setWindowTitle(_translate("PrefHelpDialog", "BPS Help", None))
|
bsd-3-clause
| 8,490,062,574,565,492,000
| 39.625
| 109
| 0.726667
| false
| 3.9
| false
| false
| false
|
zabracks/sshuttle
|
src/server.py
|
1
|
10287
|
import re
import struct
import socket
import traceback
import time
import sys
import os
if not globals().get('skip_imports'):
import ssnet
import helpers
import hostwatch
import compat.ssubprocess as ssubprocess
from ssnet import Handler, Proxy, Mux, MuxWrapper
from helpers import log, debug1, debug2, debug3, Fatal, \
resolvconf_random_nameserver
if not globals().get('latency_control'):
latency_control = None
def _ipmatch(ipstr):
if ipstr == 'default':
ipstr = '0.0.0.0/0'
m = re.match(r'^(\d+(\.\d+(\.\d+(\.\d+)?)?)?)(?:/(\d+))?$', ipstr)
if m:
g = m.groups()
ips = g[0]
width = int(g[4] or 32)
if g[1] is None:
ips += '.0.0.0'
width = min(width, 8)
elif g[2] is None:
ips += '.0.0'
width = min(width, 16)
elif g[3] is None:
ips += '.0'
width = min(width, 24)
return (struct.unpack('!I', socket.inet_aton(ips))[0], width)
def _ipstr(ip, width):
if width >= 32:
return ip
else:
return "%s/%d" % (ip, width)
def _maskbits(netmask):
if not netmask:
return 32
for i in range(32):
if netmask[0] & _shl(1, i):
return 32 - i
return 0
def _shl(n, bits):
return n * int(2 ** bits)
def _list_routes():
argv = ['netstat', '-rn']
p = ssubprocess.Popen(argv, stdout=ssubprocess.PIPE)
routes = []
for line in p.stdout:
cols = re.split(r'\s+', line)
ipw = _ipmatch(cols[0])
if not ipw:
continue # some lines won't be parseable; never mind
maskw = _ipmatch(cols[2]) # linux only
mask = _maskbits(maskw) # returns 32 if maskw is null
width = min(ipw[1], mask)
ip = ipw[0] & _shl(_shl(1, width) - 1, 32 - width)
routes.append(
(socket.AF_INET, socket.inet_ntoa(struct.pack('!I', ip)), width))
rv = p.wait()
if rv != 0:
log('WARNING: %r returned %d\n' % (argv, rv))
log('WARNING: That prevents --auto-nets from working.\n')
return routes
def list_routes():
for (family, ip, width) in _list_routes():
if not ip.startswith('0.') and not ip.startswith('127.'):
yield (family, ip, width)
def _exc_dump():
exc_info = sys.exc_info()
return ''.join(traceback.format_exception(*exc_info))
def start_hostwatch(seed_hosts):
s1, s2 = socket.socketpair()
pid = os.fork()
if not pid:
# child
rv = 99
try:
try:
s2.close()
os.dup2(s1.fileno(), 1)
os.dup2(s1.fileno(), 0)
s1.close()
rv = hostwatch.hw_main(seed_hosts) or 0
except Exception:
log('%s\n' % _exc_dump())
rv = 98
finally:
os._exit(rv)
s1.close()
return pid, s2
class Hostwatch:
def __init__(self):
self.pid = 0
self.sock = None
class DnsProxy(Handler):
def __init__(self, mux, chan, request):
# FIXME! IPv4 specific
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
Handler.__init__(self, [sock])
self.timeout = time.time() + 30
self.mux = mux
self.chan = chan
self.tries = 0
self.peer = None
self.request = request
self.sock = sock
# FIXME! IPv4 specific
self.sock.setsockopt(socket.SOL_IP, socket.IP_TTL, 42)
self.try_send()
def try_send(self):
if self.tries >= 3:
return
self.tries += 1
# FIXME! Support IPv6 nameservers
self.peer = resolvconf_random_nameserver()[1]
self.sock.connect((self.peer, 53))
debug2('DNS: sending to %r\n' % self.peer)
try:
self.sock.send(self.request)
except socket.error, e:
if e.args[0] in ssnet.NET_ERRS:
# might have been spurious; try again.
# Note: these errors sometimes are reported by recv(),
# and sometimes by send(). We have to catch both.
debug2('DNS send to %r: %s\n' % (self.peer, e))
self.try_send()
return
else:
log('DNS send to %r: %s\n' % (self.peer, e))
return
def callback(self):
try:
data = self.sock.recv(4096)
except socket.error, e:
if e.args[0] in ssnet.NET_ERRS:
# might have been spurious; try again.
# Note: these errors sometimes are reported by recv(),
# and sometimes by send(). We have to catch both.
debug2('DNS recv from %r: %s\n' % (self.peer, e))
self.try_send()
return
else:
log('DNS recv from %r: %s\n' % (self.peer, e))
return
debug2('DNS response: %d bytes\n' % len(data))
self.mux.send(self.chan, ssnet.CMD_DNS_RESPONSE, data)
self.ok = False
class UdpProxy(Handler):
def __init__(self, mux, chan, family):
sock = socket.socket(family, socket.SOCK_DGRAM)
Handler.__init__(self, [sock])
self.timeout = time.time() + 30
self.mux = mux
self.chan = chan
self.sock = sock
if family == socket.AF_INET:
self.sock.setsockopt(socket.SOL_IP, socket.IP_TTL, 42)
def send(self, dstip, data):
debug2('UDP: sending to %r port %d\n' % dstip)
try:
self.sock.sendto(data, dstip)
except socket.error, e:
log('UDP send to %r port %d: %s\n' % (dstip[0], dstip[1], e))
return
def callback(self):
try:
data, peer = self.sock.recvfrom(4096)
except socket.error, e:
log('UDP recv from %r port %d: %s\n' % (peer[0], peer[1], e))
return
debug2('UDP response: %d bytes\n' % len(data))
hdr = "%s,%r," % (peer[0], peer[1])
self.mux.send(self.chan, ssnet.CMD_UDP_DATA, hdr + data)
def main():
if helpers.verbose >= 1:
helpers.logprefix = ' s: '
else:
helpers.logprefix = 'server: '
assert latency_control is not None
debug1('latency control setting = %r\n' % latency_control)
routes = list(list_routes())
debug1('available routes:\n')
for r in routes:
debug1(' %d/%s/%d\n' % r)
# synchronization header
sys.stdout.write('\0\0SSHUTTLE0001')
sys.stdout.flush()
handlers = []
mux = Mux(socket.fromfd(sys.stdin.fileno(),
socket.AF_INET, socket.SOCK_STREAM),
socket.fromfd(sys.stdout.fileno(),
socket.AF_INET, socket.SOCK_STREAM))
handlers.append(mux)
routepkt = ''
for r in routes:
routepkt += '%d,%s,%d\n' % r
mux.send(0, ssnet.CMD_ROUTES, routepkt)
hw = Hostwatch()
hw.leftover = ''
def hostwatch_ready():
assert(hw.pid)
content = hw.sock.recv(4096)
if content:
lines = (hw.leftover + content).split('\n')
if lines[-1]:
# no terminating newline: entry isn't complete yet!
hw.leftover = lines.pop()
lines.append('')
else:
hw.leftover = ''
mux.send(0, ssnet.CMD_HOST_LIST, '\n'.join(lines))
else:
raise Fatal('hostwatch process died')
def got_host_req(data):
if not hw.pid:
(hw.pid, hw.sock) = start_hostwatch(data.strip().split())
handlers.append(Handler(socks=[hw.sock],
callback=hostwatch_ready))
mux.got_host_req = got_host_req
def new_channel(channel, data):
(family, dstip, dstport) = data.split(',', 2)
family = int(family)
dstport = int(dstport)
outwrap = ssnet.connect_dst(family, dstip, dstport)
handlers.append(Proxy(MuxWrapper(mux, channel), outwrap))
mux.new_channel = new_channel
dnshandlers = {}
def dns_req(channel, data):
debug2('Incoming DNS request channel=%d.\n' % channel)
h = DnsProxy(mux, channel, data)
handlers.append(h)
dnshandlers[channel] = h
mux.got_dns_req = dns_req
udphandlers = {}
def udp_req(channel, cmd, data):
debug2('Incoming UDP request channel=%d, cmd=%d\n' % (channel, cmd))
if cmd == ssnet.CMD_UDP_DATA:
(dstip, dstport, data) = data.split(",", 2)
dstport = int(dstport)
debug2('is incoming UDP data. %r %d.\n' % (dstip, dstport))
h = udphandlers[channel]
h.send((dstip, dstport), data)
elif cmd == ssnet.CMD_UDP_CLOSE:
debug2('is incoming UDP close\n')
h = udphandlers[channel]
h.ok = False
del mux.channels[channel]
def udp_open(channel, data):
debug2('Incoming UDP open.\n')
family = int(data)
mux.channels[channel] = lambda cmd, data: udp_req(channel, cmd, data)
if channel in udphandlers:
raise Fatal('UDP connection channel %d already open' % channel)
else:
h = UdpProxy(mux, channel, family)
handlers.append(h)
udphandlers[channel] = h
mux.got_udp_open = udp_open
while mux.ok:
if hw.pid:
assert(hw.pid > 0)
(rpid, rv) = os.waitpid(hw.pid, os.WNOHANG)
if rpid:
raise Fatal(
'hostwatch exited unexpectedly: code 0x%04x\n' % rv)
ssnet.runonce(handlers, mux)
if latency_control:
mux.check_fullness()
mux.callback()
if dnshandlers:
now = time.time()
for channel, h in dnshandlers.items():
if h.timeout < now or not h.ok:
debug3('expiring dnsreqs channel=%d\n' % channel)
del dnshandlers[channel]
h.ok = False
for channel, h in udphandlers.items():
if not h.ok:
debug3('expiring UDP channel=%d\n' % channel)
del udphandlers[channel]
h.ok = False
|
lgpl-2.1
| -5,990,303,340,717,896,000
| 29.707463
| 77
| 0.521532
| false
| 3.535052
| false
| false
| false
|
crazyskateface/LC
|
chat/admin.py
|
1
|
1664
|
from django.contrib import admin
from chat.models import UserProfile, Comments, Roll, Emblem
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
# Register your models here.
class UserProfileAdmin(admin.StackedInline):
model = UserProfile
can_delete = False
verbose_name_plural = 'User'
fields = ('user','ign','isMod','banned','verified','primRole','secRole','tier','division')
class MyUserCreationForm(UserCreationForm):
def clean_username(self):
# Since User.username is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
username = self.cleaned_data["username"]
try:
User._default_manager.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(self.error_messages['duplicate_username'])
class Meta(UserCreationForm.Meta):
model = User
class UserAdmin(UserAdmin):
add_form = MyUserCreationForm
inlines = (UserProfileAdmin, )
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
class CommentsAdmin(admin.ModelAdmin):
fields = ('user','text','datetime')
admin.site.register(Comments,CommentsAdmin)
class RollAdmin(admin.ModelAdmin):
fields = ('name',)
admin.site.register(Roll, RollAdmin)
class EmblemAdmin(admin.ModelAdmin):
fields = ('name', 'url',)
admin.site.register(Emblem, EmblemAdmin)
# class MyUserAdmin(UserAdmin):
# add_form = MyUserCreationForm
#
# admin.site.register(UserProfile, MyUserAdmin)
|
mit
| -309,975,475,201,732,700
| 28.714286
| 94
| 0.709135
| false
| 3.834101
| false
| false
| false
|
g-goessel/mathdoku_solve
|
fonctions.py
|
1
|
4674
|
"""
fonctions
"""
from itertools import permutations, product
from functools import reduce
import numpy as np
def combi_possibles(val_tot,nbr_cases,nbr_max):
"""
retourne la liste des combinaisons possibles
"""
#test si la valeur est certaine
if nbr_cases==1:
return [(val_tot,)]
combi=list()
list_div=[i for i in range(1,nbr_max+1) if val_tot/i==int(val_tot/i)]
combi_max=list(product([i for i in range(1,nbr_max+1)], repeat=nbr_cases))
combi_max_multipli=list(product(list_div, repeat=nbr_cases))
if val_tot <= nbr_max**2:
#on peut avoir une addition
for i in combi_max:
soustraction = reduce(lambda x,y: x-y, i)
somme = sum(i)
division = reduce(lambda x,y: x/y, i)
if somme == val_tot:
combi.append(i)
if soustraction == val_tot:
for j in list(permutations(i)):
combi.append(j)
if division == val_tot:
for j in list(permutations(i)):
combi.append(j)
for i in combi_max_multipli:
produit = reduce(lambda x,y: x*y, i)
if produit == val_tot:
combi.append(i)
return combi
def bonoupas(matrice):
"""
Cette foncton va tester si matrice est correcte ou pas en vérifiant que
les chiffres n'apparaissent qu'une seule fois par ligne et par colonne
Retourne True si matrice est valable et False dans le cas contraire
"""
size = len(matrice)
#on fixe (i_ref,j_ref) les coordonées d'une case que l'on veut vérifier comme étant unique sur ca ligne/colonne
for i_ref in range(size):
for j_ref in range(size):
#On vérifie l'unicité sur la colonne
for i in range(size):
if (matrice[i][j_ref]==matrice[i_ref][j_ref] and i != i_ref) and matrice[i][j_ref]!=0: return False
#Puis sur la ligne
for j in range(size):
if matrice[i_ref][j]==matrice[i_ref][j_ref] and j != j_ref: return False
return True
# Optimisations diverses
def optimize(user_data):
"""
On peut enlever les doublons
"""
for i in user_data:
user_data[i][2]=list(set(user_data[i][2]))
"""
On utilise les blocs avec une seule probabilité pour éliminer un grand nombre de cas certainement impossibles
"""
#on récupère la liste des blocs unitaires
blocs_solo=list()
for i in user_data:
if len(user_data[i][2])==1:
blocs_solo.append(i)
for bloc_solo in blocs_solo:
coord_bloc_solo=user_data[bloc_solo][1][0]
for bloc_to_clean in user_data:
if bloc_to_clean==bloc_solo: pass
else :
#on crée la liste contenant la liste des cases qui vont nous intéresser dans bloc_to_clean
cases_to_clean=[i for i,x in enumerate(user_data[bloc_to_clean][1]) if x[0]==coord_bloc_solo[0] or x[1]==coord_bloc_solo[1]]
for case_to_clean in cases_to_clean:
for i,coord in enumerate(user_data[bloc_to_clean][2]):
if user_data[bloc_to_clean][2][i][case_to_clean] == user_data[bloc_solo][0]:
del(user_data[bloc_to_clean][2][i])
"""
On efface des combinaisons qui ne sont pas possibles car le meme chiffre apparait plusieurs fois sur la meme ligne/colonne
"""
for bloc in user_data:
#Dans chaque bloc on liste tous les emplacements qui ne peuvent cohexister
emplacements=[]
liste_x=[i[0] for i in user_data[bloc][1]]
liste_x_small=list(set(liste_x))
for key,x in enumerate(liste_x_small):
if liste_x.count(x)>1:
emplacements.append([i for i,j in enumerate(liste_x) if j == x and i != key])
liste_y=[i[1] for i in user_data[bloc][1]]
liste_y_small=list(set(liste_y))
for key,y in enumerate(liste_y_small):
if liste_y.count(y)>1:
emplacements.append([i for i,j in enumerate(liste_y) if j == y and i != key])
#Ensuite on élimine les combinaisons qui ne respectent pas ce critère
for key,combinaison in enumerate(user_data[bloc][2]):
for combinaison_limitante in emplacements:
coord_interessantes=[combinaison[i] for i in list(combinaison_limitante)]
if len(coord_interessantes)!=len(set(coord_interessantes)):
user_data[bloc][2].pop(key)
return user_data
|
mpl-2.0
| -8,464,025,482,168,778,000
| 34.124031
| 140
| 0.574893
| false
| 3.123324
| false
| false
| false
|
Microsoft/PTVS
|
Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/conda/core/portability.py
|
1
|
7021
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from logging import getLogger
from os.path import realpath
import re
import struct
from ..base.constants import PREFIX_PLACEHOLDER
from ..common.compat import on_win
from ..exceptions import CondaIOError, BinaryPrefixReplacementError
from ..gateways.disk.update import CancelOperation, update_file_in_place_as_binary
from ..models.enums import FileMode
log = getLogger(__name__)
# three capture groups: whole_shebang, executable, options
SHEBANG_REGEX = (br'^(#!' # pretty much the whole match string
br'(?:[ ]*)' # allow spaces between #! and beginning of the executable path
br'(/(?:\\ |[^ \n\r\t])*)' # the executable is the next text block without an escaped space or non-space whitespace character # NOQA
br'(.*)' # the rest of the line can contain option flags
br')$') # end whole_shebang group
class _PaddingError(Exception):
pass
def update_prefix(path, new_prefix, placeholder=PREFIX_PLACEHOLDER, mode=FileMode.text):
if on_win and mode == FileMode.text:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
def _update_prefix(original_data):
# Step 1. do all prefix replacement
data = replace_prefix(mode, original_data, placeholder, new_prefix)
# Step 2. if the shebang is too long, shorten it using /usr/bin/env trick
if not on_win:
data = replace_long_shebang(mode, data)
# Step 3. if the before and after content is the same, skip writing
if data == original_data:
raise CancelOperation()
# Step 4. if we have a binary file, make sure the byte size is the same before
# and after the update
if mode == FileMode.binary and len(data) != len(original_data):
raise BinaryPrefixReplacementError(path, placeholder, new_prefix,
len(original_data), len(data))
return data
update_file_in_place_as_binary(realpath(path), _update_prefix)
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == FileMode.text:
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
elif mode == FileMode.binary:
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
raise CondaIOError("Invalid mode: %r" % mode)
return data
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
if on_win:
# on Windows for binary files, we currently only replace a pyzzer-type entry point
# we skip all other prefix replacement
if has_pyzzer_entry_point(data):
return replace_pyzzer_entry_point_shebang(data, a, b)
else:
return data
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b)) * occurances
if padding < 0:
raise _PaddingError
return match.group().replace(a, b) + b'\0' * padding
original_data_len = len(data)
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
data = pat.sub(replace, data)
assert len(data) == original_data_len
return data
def has_pyzzer_entry_point(data):
pos = data.rfind(b'PK\x05\x06')
return pos >= 0
def replace_pyzzer_entry_point_shebang(all_data, placeholder, new_prefix):
"""Code adapted from pyzzer. This is meant to deal with entry point exe's created by distlib,
which consist of a launcher, then a shebang, then a zip archive of the entry point code to run.
We need to change the shebang.
https://bitbucket.org/vinay.sajip/pyzzer/src/5d5740cb04308f067d5844a56fbe91e7a27efccc/pyzzer/__init__.py?at=default&fileviewer=file-view-default#__init__.py-112 # NOQA
"""
# Copyright (c) 2013 Vinay Sajip.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
launcher = shebang = None
pos = all_data.rfind(b'PK\x05\x06')
if pos >= 0:
end_cdr = all_data[pos + 12:pos + 20]
cdr_size, cdr_offset = struct.unpack('<LL', end_cdr)
arc_pos = pos - cdr_size - cdr_offset
data = all_data[arc_pos:]
if arc_pos > 0:
pos = all_data.rfind(b'#!', 0, arc_pos)
if pos >= 0:
shebang = all_data[pos:arc_pos]
if pos > 0:
launcher = all_data[:pos]
if data and shebang and launcher:
if hasattr(placeholder, 'encode'):
placeholder = placeholder.encode('utf-8')
if hasattr(new_prefix, 'encode'):
new_prefix = new_prefix.encode('utf-8')
shebang = shebang.replace(placeholder, new_prefix)
all_data = b"".join([launcher, shebang, data])
return all_data
def replace_long_shebang(mode, data):
# this function only changes a shebang line if it exists and is greater than 127 characters
if mode == FileMode.text:
shebang_match = re.match(SHEBANG_REGEX, data, re.MULTILINE)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env %s%s' % (executable_name, options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
# TODO: binary shebangs exist; figure this out in the future if text works well
pass
return data
|
apache-2.0
| -6,387,582,113,646,030,000
| 41.041916
| 172
| 0.64478
| false
| 3.920156
| false
| false
| false
|
sudhaMR/Django-Perception
|
imgpage/urls.py
|
1
|
1204
|
"""perception URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import url,patterns
from imgpage import views
from perception.settings import STATIC_PATH, DEBUG
urlpatterns = patterns('',
url(r'^$',views.add_category,name='add_category'),
url(r'^about/',views.about,name='about'),
url(r'^taginfo/',views.taginfo,name='taginfo'),
url(r'^static/(.*)$', 'django.views.static.serve', {'document_root': STATIC_PATH, 'show_indexes': True}),
url(r'^static/', 'django.views.static.serve', {'document_root': STATIC_PATH, 'show_indexes': True}),
url(r'^add_category/$', views.add_category, name='add_category'))
|
mit
| 2,675,888,157,375,171,000
| 45.307692
| 109
| 0.689369
| false
| 3.410765
| false
| false
| false
|
mila/django-noticebox
|
setup.py
|
1
|
1110
|
#!/usr/bin/env python
import codecs
from setuptools import setup, find_packages
url='http://github.com/mila/django-noticebox/tree/master'
try:
long_description = codecs.open('README.rst', "r", "utf-8").read()
except IOError:
long_description = "See %s" % url
setup(
name='django-noticebox',
version=__import__("noticebox").__version__,
description='Django-noticebox is a reusable Django application which '
'provides functionality for sending notices to site users. '
'The notices can be displayed when user signs in, '
'sent by email or both.',
long_description=long_description,
author='Miloslav Pojman',
author_email='miloslav.pojman@gmail.com',
url=url,
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
include_package_data=True,
zip_safe=False,
)
|
bsd-3-clause
| -6,995,972,845,359,806,000
| 29
| 76
| 0.637838
| false
| 4.021739
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.