repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
Johnetordoff/osf.io | api_tests/files/serializers/test_file_serializer.py | Python | apache-2.0 | 8,118 | 0.003449 | from datetime import datetime
import pytest
from pytz import utc
from addons.base.utils import get_mfr_url
from api.files.serializers import FileSerializer, get_file_download_link, get_file_render_link
from api_tests import utils
from osf_tests.factories import (
UserFactory,
PreprintFactory,
NodeFactory,
DraftNodeFactory
)
from tests.utils import make_drf_request_with_version
from website import settings
@pytest.fixture()
def user():
return UserFactory()
def build_expected_render_link(mfr_url, download_url, with_version=True):
if with_version:
return '{}/render?url={}%26direct%26mode=render'.format(mfr_url, download_url)
else:
return '{}/render?url={}?direct%26mode=render'.format(mfr_url, download_url)
@pytest.mark.django_db
class TestFileSerializer:
@pytest.fixture()
def node(self, user):
return NodeFactory(creator=user)
@pytest.fixture()
def draft_node(self, user):
return DraftNodeFactory(creator=user)
@pytest.fixture()
def file_one(self, node, user):
return utils.create_test_file(node, user, create_guid=False)
@pytest.fixture()
def node_folder(self, node):
return node.get_addon('osfstorage').get_root()
@pytest.fixture()
def draft_node_folder(self, draft_node):
return draft_node.get_addon('osfstorage').get_root()
@pytest.fixture()
def preprint(self, user):
return PreprintFactory(creator=user)
@pytest.fixture()
def primary_file(self, preprint):
return preprint.primary_file
def test_file_serializer(self, file_one, node, node_folder):
created = file_one.versions.last().created
modified = file_one.versions.first().created
created_tz_aware = created.replace(tzinfo=utc)
modified_tz_aware = modified.replace(tzinfo=utc)
new_format = '%Y-%m-%dT%H:%M:%S.%fZ'
download_base = '/download/{}'
path = file_one._id
mfr_url = get_mfr_url(file_one, 'osfstorage')
# test_date_modified_formats_to_old_format
| req = make_drf_request_with_version(version='2.0')
data = FileSerializer(file_one, context={'request': req}).data['data']
assert modified_tz_aware == data['attributes']['date_modified']
# test_date_modified_formats_to_new_format
req = make_drf_request_with_ | version(version='2.2')
data = FileSerializer(file_one, context={'request': req}).data['data']
assert datetime.strftime(
modified, new_format
) == data['attributes']['date_modified']
# test_date_created_formats_to_old_format
req = make_drf_request_with_version(version='2.0')
data = FileSerializer(file_one, context={'request': req}).data['data']
assert created_tz_aware == data['attributes']['date_created']
# test_date_created_formats_to_new_format
req = make_drf_request_with_version(version='2.2')
data = FileSerializer(file_one, context={'request': req}).data['data']
assert datetime.strftime(
created, new_format
) == data['attributes']['date_created']
# check download file link with path
assert download_base.format(path) in data['links']['download']
# check render file link with path
assert download_base.format(path) in data['links']['render']
assert mfr_url in data['links']['render']
# check download file link with guid
guid = file_one.get_guid(create=True)._id
req = make_drf_request_with_version()
data = FileSerializer(file_one, context={'request': req}).data['data']
assert download_base.format(guid) in data['links']['download']
# check render file link with guid
assert download_base.format(guid) in data['links']['render']
assert mfr_url in data['links']['render']
# check html link in file serializer
assert data['links']['html'] == '{}{}/files/osfstorage/{}'.format(settings.DOMAIN, node._id, file_one._id)
# check download/render/html link for folder
folder = node.get_addon('osfstorage').get_root().append_folder('Test_folder')
folder.save()
req = make_drf_request_with_version(version='2.2')
data = FileSerializer(folder, context={'request': req}).data['data']
assert 'render' not in data['links']
assert 'download' not in data['links']
assert 'html' not in data['links']
# Ensure that the files relationship link is pointing to the correct root endpoint
data = FileSerializer(node_folder, context={'request': req}).data['data']
assert 'draft_nodes' not in data['relationships']['files']['links']['related']['href']
def test_serialize_preprint_file(self, preprint, primary_file):
req = make_drf_request_with_version(version='2.2')
data = FileSerializer(primary_file, context={'request': req}).data['data']
mfr_url = get_mfr_url(preprint, 'osfstorage')
# Check render file link with path
download_link = data['links']['download']
assert data['links']['render'] == build_expected_render_link(mfr_url, download_link, with_version=False)
# Check render file link with guid
primary_file.get_guid(create=True)._id
req = make_drf_request_with_version()
data = FileSerializer(primary_file, context={'request': req}).data['data']
download_link = data['links']['download']
assert data['links']['render'] == build_expected_render_link(mfr_url, download_link, with_version=False)
# Check html link
assert data['links']['html'] == '{}{}/files/osfstorage/{}'.format(settings.DOMAIN, preprint._id, primary_file._id)
def test_get_file_download_and_render_links(self, file_one, node):
mfr_link = get_mfr_url(file_one.target, 'osfstorage')
# file links with path
download_link = get_file_download_link(file_one)
assert download_link == '{}download/{}/'.format(settings.DOMAIN, file_one._id)
assert get_file_render_link(mfr_link, download_link) == build_expected_render_link(mfr_link, download_link, with_version=False)
# file versions link with path
download_link = get_file_download_link(file_one, version=2)
assert download_link == '{}download/{}/?revision=2'.format(settings.DOMAIN, file_one._id)
assert get_file_render_link(mfr_link, download_link, version=2) == build_expected_render_link(mfr_link, download_link)
# file links with guid
file_one.get_guid(create=True)
download_link = get_file_download_link(file_one)
assert download_link == '{}download/{}/'.format(settings.DOMAIN, file_one.get_guid()._id)
assert get_file_render_link(mfr_link, download_link) == build_expected_render_link(mfr_link, download_link, with_version=False)
# file version links with guid
download_link = get_file_download_link(file_one, version=2)
assert download_link == '{}download/{}/?revision=2'.format(settings.DOMAIN, file_one.get_guid()._id)
assert get_file_render_link(mfr_link, download_link, version=2) == build_expected_render_link(mfr_link, download_link)
def test_no_node_relationship_after_version_2_7(self, file_one):
req_2_7 = make_drf_request_with_version(version='2.7')
data_2_7 = FileSerializer(file_one, context={'request': req_2_7}).data['data']
assert 'node' in data_2_7['relationships'].keys()
req_2_8 = make_drf_request_with_version(version='2.8')
data_2_8 = FileSerializer(file_one, context={'request': req_2_8}).data['data']
assert 'node' not in data_2_8['relationships'].keys()
def test_draft_node_relationships(self, draft_node, draft_node_folder):
# Ensure that the files relationship link is pointing to the correct root endpoint
req = make_drf_request_with_version()
data = FileSerializer(draft_node_folder, context={'request': req}).data['data']
assert 'draft_nodes' in data['relationships']['files']['links']['related']['href']
|
rdorado79/chatbotlib | chatbot/loader.py | Python | mit | 1,057 | 0.012299 | from lxml import etree
import sys
from chatbot.core import Chatbot
class ReadChatbotDefinitionException(Exception):
def __init__(self, message):
self.message = message
def load(filename,context={}):
c = Chatbot(context=context)
c.load(filename)
return c
'''
try:
parser = etree.XMLParser()
tree = etree.parse(filename, parser)
root = tree.getroot()
# TODO: validate with an Schema or a DTD
start=root.attrib["start"]
if "bot-prompt" in root.attrib:
botPrompt=root.attrib["bot-prompt"]
else:
botPrompt="chatbot> "
if "user-prompt" in root.attrib:
userPrompt=root.attrib["user-prompt"]
else:
userPrompt="you> "
bot = chatbot.core.Chatbot(start, botPrompt, userPrompt)
for el in root.getchildren():
if el.tag == "state":
readState(bot, el)
elif el.tag == "function":
readFunction(bot,el)
bot.setReady()
return bot
except FileNotFoundError | as err:
print("File n | ot found: '"+filename+"'")
sys.exit(2)
'''
|
keithfancher/Todo-Indicator | todotxt/test_list.py | Python | gpl-3.0 | 11,017 | 0.00118 | #!/usr/bin/env python
# Copyright 2012-2014 Keith Fancher
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import tempfile
import unittest
from list import TodoTxtList
class TestTodoTxtList(unittest.TestCase):
def test_init_from_text(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
self.assertEqual(3, test_list.num_items())
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item two', test_list.items[1].text)
self.assertEqual('Z', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
self.assertEqual('Item three', test_list.items[2].text)
self.assertEqual(None, test_list.items[2].priority)
self.assertTrue(test_list.items[2].is_completed)
def test_init_from_file(self):
file_name = 'sample-todo.txt'
test_list = TodoTxtList(file_name)
self.assertEqual(8, test_list.num_items())
self.assertEqual('Do that really important thing', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Summon AppIndicator documentation from my ass', test_list.items[1].text)
self.assertEqual('D', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
self.assertEqual('This other important thing', test_list.items[2].text)
self.assertEqual('A', test_list.items[2].priority)
self.assertFalse(test_list.items[2].is_completed)
self.assertEqual('Walk the cat', test_list.items[3].text)
self.assertEqual('B', test_list.items[3].priority)
self.assertFalse(test_list.items[3].is_completed)
self.assertEqual('Something with no priority!', test_list.items[4].text)
self.assertEqual(None, test_list.items[4].priority)
self.assertFalse(test_list.items[4].is_completed)
self.assertEqual('Cook the dog', test_list.items[5].text)
self.assertEqual('C', test_list.items[5].priority)
self.assertFalse(test_list.items[5].is_completed)
self.assertEqual('Be annoyed at GTK3 docs', test_list.items[6].text)
self.assertEqual(None, test_list.items[6].priority)
self.assertTrue(test_list.items[6].is_completed)
self.assertEqual('Something I already did', test_list.items[7].text)
self.assertEqual(None, test_list.items[7].priority)
self.assertTrue(test_list.items[7].is_completed)
def test_reload_from_file(self):
test_list = TodoTxtList() # Start with an empty list
test_list.reload_from_file() # Should do nothing
test_list.todo_filename = 'sample-todo.txt'
test_list.reload_from_file()
self.assertEqual(8, test_list.num_items())
self.assertEqual('Do that really important thing', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Summon AppIndicator documentation from my ass', test_list.items[1].text)
self.assertEqual('D', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
self.assertEqual('This other important thing', test_list.items[2].text)
self.assertEqual('A', test_list.items[2].priority)
self.assertFalse(test_list.items[2].is_completed)
self.assertEqual('Walk the cat', test_list.items[3].text)
self.assertEqual('B', test_list.items[3].priority)
self.assertFalse(test_list.items[3].is_completed)
self.assertEqual('Something with no priority!', test_list.items[4].text)
self.assertEqual(None, test_list.items[4].priority)
self.assertFalse(test_list.items[4].is_completed)
self.assertEqual('Cook the dog', test_list.items[5].text)
self.assertEqual('C', test_list.items[5].priority)
self.assertFalse(test_list.items[5].is_completed)
self.assertEqual('Be annoyed at GTK3 docs', test_list.items[6].text)
self.assertEqual(None, test_list.items[6].priority)
self.assertTrue(test_list.items[6].is_completed)
self.assertEqual('Something I already did', test_list.items[7].text)
self.assertEqual(None, test_list.items[7].priority)
self.assertTrue(test_list.items[7].is_completed)
def test_has_items(self):
test_list = TodoTxtList()
self.assertFalse(test_list.has_items())
test_list = TodoTxtList(None, 'An item')
self.assertTrue(test_list.has_items())
def test_remove_item(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
self.assertEqual(3, test_list.num_items())
test_list.remove_item('Item two')
self.assertEqual(2, test_list.num_items())
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item three', test_list.items[1].text)
self.assertEqual(None, test_list.items[1].priority)
self.assertTrue(test_list.items[1].is_completed)
def test_remove_completed_items(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
self.assertEqual(3, test_list.num_items())
test_list.remove_completed_items()
self.assertEqual(2, test_list.num_items())
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item two', test_list.items[1].text)
self.assertEqual('Z', test_list.items[1].priority)
self.assertFalse(test_list.items[1].is_completed)
def test_mark_item_completed(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
test_list.mark_item_completed('Item two')
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item two', test_list.items[1].text)
self.assertEqual('Z', test_list.items[1].priority)
| self.assertTrue(test_list.items[1].is_completed)
self.assertEqual | ('Item three', test_list.items[2].text)
self.assertEqual(None, test_list.items[2].priority)
self.assertTrue(test_list.items[2].is_completed)
def test_mark_item_completed_with_full_text(self):
todo_text = "(A) Item one\n(Z) Item two\nx Item three\n\n \n"
test_list = TodoTxtList(None, todo_text)
test_list.mark_item_completed_with_full_text('(Z) Item two')
self.assertEqual('Item one', test_list.items[0].text)
self.assertEqual('A', test_list.items[0].priority)
self.assertFalse(test_list.items[0].is_completed)
self.assertEqual('Item two', test_list.items[1].text)
self.assertEqual('Z', test_list.items[1].priority)
self.assertTrue(test_list.items[1].is_completed)
self.assertEqual('Item three', test_list.items[2].text)
self.assertEqual(None, test_list.items[2].p |
openstack/cinder | cinder/volume/drivers/dell_emc/powermax/provision.py | Python | apache-2.0 | 32,178 | 0 | # Copyright (c) 2020 Dell Inc. or its subsidiaries.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from oslo_log import log as logging
from oslo_service import loopingcall
from oslo_utils import units
from cinder import coordination
from cinder import exception
from cinder.i18n import _
from cinder.volume.drivers.dell_emc.powermax import utils
LOG = logging.getLogger(__name__)
WRITE_DISABLED = "Write Disabled"
UNLINK_INTERVAL = 15
UNLINK_RETRIES = 30
class PowerMaxProvision(object):
"""Provisioning Class for Dell EMC PowerMax volume drivers.
It supports VMAX 3, All Flash and PowerMax arrays.
"""
def __init__(self, rest):
self.utils = utils.PowerMaxUtils()
self.rest = rest
def create_storage_group(
self, array, storagegroup_name, srp, slo, workload,
extra_specs, do_disable_compression=False):
"""Create a new storage group.
:param array: the array serial number
:param storagegroup_name: the group name (String)
:param srp: the SRP (String)
:param slo: the SLO (String)
:param workload: the workload (String)
:param extra_specs: additional info
:param do_disable_compression: disable compression flag
:returns: storagegroup - storage group object
"""
start_time = time.time()
@coordination.synchronized("emc-sg-{storage_group}-{array}")
def do_create_storage_group(storage_group, array):
# Check if storage group has been recently created
storagegroup = self.rest.get_storage_group(
array, storagegroup_name)
if storagegroup is None:
storagegroup = self.rest.create_storage_group( |
array, storage_group, srp, slo, workload, extra_specs,
do_disable_compression)
LOG.debug("Create storage group took: %(delta)s H:MM:SS.",
{'delta': self.utils.get_time_delta(start_time,
| time.time())})
LOG.info("Storage group %(sg)s created successfully.",
{'sg': storagegroup_name})
else:
LOG.info("Storage group %(sg)s already exists.",
{'sg': storagegroup_name})
return storagegroup
return do_create_storage_group(storagegroup_name, array)
def create_volume_from_sg(self, array, volume_name, storagegroup_name,
volume_size, extra_specs, rep_info=None):
"""Create a new volume in the given storage group.
:param array: the array serial number
:param volume_name: the volume name -- string
:param storagegroup_name: the storage group name
:param volume_size: volume size -- string
:param extra_specs: extra specifications
:param rep_info: replication session info dict -- optional
:returns: volume info -- dict
"""
@coordination.synchronized("emc-sg-{storage_group}-{array}")
def do_create_volume_from_sg(storage_group, array):
start_time = time.time()
if rep_info and rep_info.get('initial_device_list', False):
local_device_list = self.rest.get_volume_list(
extra_specs['array'],
{'storageGroupId': storagegroup_name})
rep_info['initial_device_list'] = local_device_list
volume_dict = self.rest.create_volume_from_sg(
array, volume_name, storage_group,
volume_size, extra_specs, rep_info)
LOG.debug("Create volume from storage group "
"took: %(delta)s H:MM:SS.",
{'delta': self.utils.get_time_delta(start_time,
time.time())})
return volume_dict
return do_create_volume_from_sg(storagegroup_name, array)
def delete_volume_from_srp(self, array, device_id, volume_name):
"""Delete a volume from the srp.
:param array: the array serial number
:param device_id: the volume device id
:param volume_name: the volume name
"""
start_time = time.time()
LOG.debug("Delete volume %(volume_name)s from srp.",
{'volume_name': volume_name})
self.rest.delete_volume(array, device_id)
LOG.debug("Delete volume took: %(delta)s H:MM:SS.",
{'delta': self.utils.get_time_delta(
start_time, time.time())})
def create_volume_snapvx(self, array, source_device_id,
snap_name, extra_specs, ttl=0):
"""Create a snapVx of a volume.
:param array: the array serial number
:param source_device_id: source volume device id
:param snap_name: the snapshot name
:param extra_specs: the extra specifications
:param ttl: time to live in hours, defaults to 0
"""
@coordination.synchronized("emc-snapvx-{src_device_id}")
def do_create_volume_snap(src_device_id):
start_time = time.time()
LOG.debug("Create Snap Vx snapshot of: %(source)s.",
{'source': src_device_id})
self.rest.create_volume_snap(
array, snap_name, src_device_id, extra_specs, ttl)
LOG.debug("Create volume snapVx took: %(delta)s H:MM:SS.",
{'delta': self.utils.get_time_delta(start_time,
time.time())})
do_create_volume_snap(source_device_id)
def create_volume_replica(
self, array, source_device_id, target_device_id,
snap_name, extra_specs, create_snap=False, copy_mode=False):
"""Create a snap vx of a source and copy to a target.
:param array: the array serial number
:param source_device_id: source volume device id
:param target_device_id: target volume device id
:param snap_name: the name for the snap shot
:param extra_specs: extra specifications
:param create_snap: Flag for create snapvx
:param copy_mode: If copy mode should be used for SnapVX target links
"""
start_time = time.time()
if create_snap:
# We are creating a temporary snapshot. Specify a ttl of 1 hour
self.create_volume_snapvx(array, source_device_id,
snap_name, extra_specs, ttl=1)
# Link source to target
@coordination.synchronized("emc-snapvx-{src_device_id}")
def do_modify_volume_snap(src_device_id):
self.rest.modify_volume_snap(
array, src_device_id, target_device_id, snap_name,
extra_specs, link=True, copy=copy_mode)
do_modify_volume_snap(source_device_id)
LOG.debug("Create element replica took: %(delta)s H:MM:SS.",
{'delta': self.utils.get_time_delta(start_time,
time.time())})
def unlink_snapvx_tgt_volume(
self, array, target_device_id, source_device_id, snap_name,
extra_specs, snap_id, loop=True):
"""Unlink a snapshot from its target volume.
:param array: the array serial number
:param source_device_id: source volume device id
:param target_device_id: target volume device id
:param snap_name: the name for the snap shot
:param extra_specs: extra specificati |
egcodes/haberbus | aristotle/tests/test_util.py | Python | gpl-3.0 | 63 | 0 | from unittest im | port Te | stCase
class Test(TestCase):
pass
|
Fusion-Data-Platform/fdp | fdp/lib/datasources.py | Python | mit | 1,353 | 0 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 24 12:49:36 2017
@author: drsmith
"""
import os
from .globals import FdpError
def canonicalMachineName(machine=''):
aliases = {'nstxu': ['nstx', 'nstxu', 'nstx-u'],
'diiid': ['diiid', 'diii-d', 'd3d'],
'cmod': ['cmod', 'c-mod']}
for key, value in aliases.items():
if machine.lower() in value:
return key
# invalid machine name
raise FdpError('"{}" is not a valid machine name\n'.format(machine))
MDS_SERVERS = {
'nstxu': {'hostname': 'skylark.pppl.gov | ',
'port': '8000'},
'diiid': {'hostname': 'atlas.gat.com',
| 'port': '8000'}
}
EVENT_SERVERS = {
'nstxu': {'hostname': 'skylark.pppl.gov',
'port': '8000'},
'diiid': {'hostname': 'atlas.gat.com',
'port': '8000'},
'ltx': {'hostname': 'lithos.pppl.gov',
'port': '8000'}
}
LOGBOOK_CREDENTIALS = {
'nstxu': {'server': 'sql2008.pppl.gov',
'instance': None,
'username': None,
'password': None,
'database': None,
'port': '62917',
'table': 'entries',
'loginfile': os.path.join(os.getenv('HOME'),
'nstxlogs.sybase_login')
}
}
|
brain-tec/server-tools | html_text/models/ir_fields_converter.py | Python | agpl-3.0 | 2,350 | 0 | # Copyright 2016-2017 Jairo Llopis <jairo.llopis@tecnativa.com>
# Copyright 2016 Tecnativa - Vicent Cubells
# License AGPL-3.0 or later (https://www.gnu.org/ | licenses/agpl).
import logging
from lxml import etree, html
from odoo import api, models
_logger = logging.getLogger(__name__)
class IrFieldsConverter(models.AbstractModel):
_inherit = "ir.fields.conv | erter"
@api.model
def text_from_html(self, html_content, max_words=None, max_chars=None,
ellipsis=u"…", fail=False):
"""Extract text from an HTML field in a generator.
:param str html_content:
HTML contents from where to extract the text.
:param int max_words:
Maximum amount of words allowed in the resulting string.
:param int max_chars:
Maximum amount of characters allowed in the resulting string. If
you apply this limit, beware that the last word could get cut in an
unexpected place.
:param str ellipsis:
Character(s) to be appended to the end of the resulting string if
it gets truncated after applying limits set in :param:`max_words`
or :param:`max_chars`. If you want nothing applied, just set an
empty string.
:param bool fail:
If ``True``, exceptions will be raised. Otherwise, an empty string
will be returned on failure.
"""
# Parse HTML
try:
doc = html.fromstring(html_content)
except (TypeError, etree.XMLSyntaxError, etree.ParserError):
if fail:
raise
else:
_logger.exception("Failure parsing this HTML:\n%s",
html_content)
return ""
# Get words
words = u"".join(doc.xpath("//text()")).split()
# Truncate words
suffix = max_words and len(words) > max_words
if max_words:
words = words[:max_words]
# Get text
text = u" ".join(words)
# Truncate text
suffix = suffix or max_chars and len(text) > max_chars
if max_chars:
text = text[:max_chars - (len(ellipsis) if suffix else 0)].strip()
# Append ellipsis if needed
if suffix:
text += ellipsis
return text
|
enthought/etsproxy | enthought/enable/base_tool.py | Python | bsd-3-clause | 85 | 0 | # proxy | module
from __future__ import absolute_import
from enable.base_tool impor | t *
|
adfinis-sygroup/freedomvote | app/core/widgets.py | Python | gpl-3.0 | 1,773 | 0.007332 | from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe
from django.forms.widgets import ClearableFileInput, CheckboxInput
from easy_thumbnails.files import get_thumbnailer
from django.templatetags.static import static
from django.utils.encoding import force_text
class ImagePreviewFileInput(ClearableFileInput):
def __init__(self, *args, **kwargs):
super(ClearableFileInput, self).__init__(*args, **kwargs)
self.template_name = "core/custom_file_input.html"
def render(self, name, value, attrs=None,):
substitutions = {
'clear_checkbox_label': self.clear_checkbox_label,
'initial' : '<img class="img-responsive img-thumbnail" width="%s" src="%s">' % (
force_text('100%'),
force_text(get_thumbnailer(value)['medium'].url i | f value and hasattr(value, 'url') else static('images/placeholder.svg'))
)
}
template = '%(initial)s%(input)s'
substitutions['input'] = super(ClearableFileInput, self).render(name, value, attrs)
if not self.is_required:
template = '%(initial)s%(clear_template)s%(input)s'
checkbox_name = self.clear_checkbox_name(name)
checkbox_id = self.clear_checkbox_id(checkbox_name)
s | ubstitutions['clear_checkbox_name'] = conditional_escape(checkbox_name)
substitutions['clear_checkbox_id'] = conditional_escape(checkbox_id)
substitutions['clear'] = CheckboxInput().render(checkbox_name, False, attrs={'id': checkbox_id})
substitutions['clear_template'] = self.clear_checkbox_name(checkbox_name)
return mark_safe(template % substitutions)
def clear_checkbox_name(self, name):
return "" |
JeanOlivier/Laveqed | gui_laveqed.py | Python | gpl-3.0 | 20,280 | 0.015927 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from Tkinter import *
from ttk import *
from ScrolledText import ScrolledText as Text
from PIL import Image, ImageTk
import tkFileDialog,os,cairo,tempfile,time,shutil,tkFont
from laveqed import laveqed
from rsvg_windows import rsvg_windows
try:
import rsvg
except ImportError:
rsvg=rsvg_windows() # Untested
TITLE = 'laveqed GUI'
APP_WIN_WIDTH = 800
APP_WIN_HEIGHT = 400
FONTNAME='Ubuntu Mono'
LOGOFILENAME='laveqed_logo.svg'
CONFIGFILE='laveqed_config.xml'
class laveqed_gui(object):
def __init__(self, title):
print('Starting \t\t:\tWelcome in laveqed\'s GUI!')
os.environ['XMODIFIERS'] = "@im=none" # Fix for non-working ^ after a while
self.win=Tk()
self.win.title(title)
self.center(self.win)
self.win.protocol("WM_DELETE_WINDOW", self.close)
self.previewSize=(713,45)
self._topLevelOpened = False
self.displayScale = 1
self.buildGUI()
self._set_vars() # Sets variables for use by laveqed, also creates temp folder and cd into it
self._makelogo() # Either loads pre-calculated logo or generates it.
self.text_widget.focus() # So we can type right away!
def _buildFrames(self):
self.main_frame=Frame(self.win)
self.main_frame.pack(fill='both',expand=True)
# So the picture isn't too much to the top
self.space_frame=Label(self.main_frame)
self.space_frame.pack(side='top',fill='both', expand=False, padx=4, pady=10)
self.top_frame=LabelFrame(self.main_frame,relief=FLAT)
self.top_frame.pack(side='top',fill='both', expand=True, padx=4, pady=4)
self.png_frame=Label(self.top_frame,anchor='center')
self.png_frame.pack(fill='both', expand=True, padx=4, pady=4)
self.bottom_frame=LabelFrame(self.main_frame,relief=FLAT)
self.bottom_frame.pack(side='bottom', fill=X, expand=False, padx=4, pady=4)
self.text_frame=LabelFrame(self.bottom_frame,relief=FLAT)
self.text_frame.pack(side='left', fill=X, expand=True, padx=4, pady=4)
def _tag_configure(self, text):
# Color tags for syntax highli | ght
text.tag_configure('red',foreground='red')
text.tag_configure('green',foreground='green')
text.tag_configure('purple',foreground='purple')
text.tag_configure('blue',foreground='blue')
| # Bold baby!
text.tag_configure('bold',font=self.bold_font)
def _buildWidgets(self):
self.text_widget=Text(self.text_frame,bd=2,padx=4,pady=4,\
wrap=WORD,font=(FONTNAME,14),undo=True)
self.text_widget.pack(fill='both',expand=True,padx=4,pady=4)
self.bold_font = tkFont.Font(self.text_widget, self.text_widget.cget("font"))
self.bold_font.configure(weight="bold")
self._tag_configure(self.text_widget)
# self.text_widget=Text(self.text_frame,bd=2,padx=4,pady=4,\
# wrap=WORD,font=(FONTNAME,14),undo=True)
# self.text_widget.pack(fill='both',expand=True,padx=4,pady=4)
# # Color tags for syntax highlight
# self.text_widget.tag_configure('red',foreground='red')
# self.text_widget.tag_configure('green',foreground='green')
# self.text_widget.tag_configure('purple',foreground='purple')
# self.text_widget.tag_configure('blue',foreground='blue')
# # Bold baby!
# #self.orig_font = tkFont.Font(self.text_widget, self.text_widget.cget("font"))
# self.bold_font = tkFont.Font(self.text_widget, self.text_widget.cget("font"))
# self.bold_font.configure(weight="bold")
# self.text_widget.tag_configure('bold',font=self.bold_font)
# #self.text_widget.tag_configure('plain',font=self.orig_font,foreground='black',background='white')
def _buildMenus(self):
self.menubar=Menu(self.win)
# File menu
filemenu=Menu(self.menubar,tearoff=0)
filemenu.add_command(label="Open", command=self.open_svg,accelerator='Ctrl+O')
filemenu.add_command(label="Save as...", command=self.save_svg,accelerator='Ctrl+S')
filemenu.add_separator()
filemenu.add_command(label="Exit", command=self.close,accelerator='Ctrl+Q')
self.menubar.add_cascade(label="File", menu=filemenu)
# laveqed menu
laveqedmenu=Menu(self.menubar,tearoff=0)
laveqedmenu.add_command(label='Run',command=self.build_svg,accelerator='Ctrl-Enter')
laveqedmenu.add_command(label='Preferences',command=self.preferences,accelerator='Ctrl-P')
self.menubar.add_cascade(label="laveqed", menu=laveqedmenu)
self.win.config(menu=self.menubar)
def _set_vars(self):
if os.path.isfile(CONFIGFILE):
pass # Parse the xml and set vars accordingly
else: # No config file? -> Get defaults laveqed -ambles and scale
tmp=laveqed()
self.preamble=tmp.preamble
self.postamble=tmp.postamble
self.scale=tmp.scale
self.eqonly=False # Loads -ambles by default if eqonly == False
# Creating a temporary folder to work inside of
self.owd=os.getcwd() # Original Working Directory, for friendly fileOpenDialog
self.cwd=tempfile.mkdtemp()
print('Making temp folder\t:\t'+self.cwd)
os.chdir(self.cwd)
try :
shutil.copy2(self.owd+'/'+LOGOFILENAME,self.cwd+'/'+LOGOFILENAME)
except:
pass
def _binding(self):
# Allows select all in Text Widget
self.win.bind_class("Text","<Control-a>", self.selectall)
# Main window binds
self.win.bind('<Control-Return>',self.build_svg_fixCtrlReturn)
self.win.bind('<Control-s>',self.save_svg)
self.win.bind('<Control-o>',self.open_svg_fixCtrlO)
self.win.bind('<Control-p>', self.preferences)
self.win.bind('<Control-q>',self.close)
# Text widget binds
self.text_widget.bind('<Control-h>',self.hat)
self.text_widget.bind('<KeyRelease>',self.set_syntax)
# SVG binds
self.win.bind('<Control-plus>', self.ZoomInSVG)
self.win.bind('<Control-minus>', self.ZoomOutSVG)
self.win.bind('<Control-0>', self.ZoomResetSVG)
def _makelogo(self):
self.name=LOGOFILENAME[:-4]
if not os.path.isfile(LOGOFILENAME):
equation=r'\text{L\hspace{-3.5pt}\raisebox{2pt}{\scriptsize A}\!}{\color{gray!68}\text{\TeX}}\text{ V{\color{gray!80}ectorial} Eq{\color{gray!80}uation} Ed{\color{gray!80}itor}}'
self.text_widget.insert('1.0',equation)
self.build_svg()
self.text_widget.delete('1.0',END)
self.load_svg()
def buildGUI(self):
# Style
#self.win.style=Style()
#self.win.style.theme_use('clam')
#Order matters for some elements; e.g. better build frames before widgets
self._buildFrames()
self._buildWidgets()
self._buildMenus()
self._binding()
def center(self, win, x=APP_WIN_WIDTH, y=APP_WIN_HEIGHT):
win.update_idletasks()
width = x
height = y
x = (win.winfo_screenwidth() // 2) - (width // 2)
y = (win.winfo_screenheight() // 2) - (height // 2)
win.geometry('{}x{}+{}+{}'.format(width, height, x, y))
def load_svg(self,event=None):
filename=self.name+'.svg'
if os.path.isfile(filename):
#self.tk_image=self.svgPhotoImage(filename, scale=self.displayScale)
self.openDisplaySVG(filename)
print('Loading svg file\t:\t'+filename+' (Success!)')
# If opening failed, put a blank image the same size as SVGLOGOFILE
else: # Note, this should never occurs now... Left here because
print('Loading svg file\t:\t'+filename+' (Failed!)')
self.tk_image = ImageTk.PhotoImage('RGBA')
self.image=Image.new('RGB',self.previewSize,self.win['background'])
self.tk_image.paste(self.image)
self.png_frame.config(image=self.tk_image)
def close(self,var=None):
|
AusDTO/dto-digitalmarketplace-buyer-frontend | app/main/forms/brief_forms.py | Python | mit | 3,546 | 0.003384 | from wtforms import IntegerField, SelectMultipleField
from wtforms.validators import NumberRange
from dmutils.forms import DmForm
import flask_featureflags
class BriefSearchForm(DmForm):
page = IntegerField(default=1, validators=(NumberRange(min=1),))
status = SelectMultipleField("Status", choices=(
("live", "Open",),
("closed", "Closed",)
))
# lot choices expected to be set at runtime
lot = SelectMultipleField("Category")
def __init__(self, *args, **kwargs):
"""
Requires extra keyword arguments:
- `framework` - information on the target framework as returned by the api
- `data_api_client` - a data api client (should be able to remove the need for this arg at some point)
"""
super(BriefSearchForm, self).__init__(*args, **kwargs)
try:
# popping this kwarg so we don't risk it getting fed to wtforms default implementation which might use it
# as a data field if there were a name collision
framework = kwargs.pop("framework")
self._framework_slug = framework["slug"]
self.lot.choices = tuple((lot["slug"], lot["name"],) for lot in framework["lots"] if lot["allowsBrief"])
| except KeyError:
raise TypeError("Expected keyword argument 'framework' with framework information")
try:
# data_a | pi_client argument only needed so we can fit in with the current way the tests mock.patch the
# the data_api_client directly on the view. would be nice to able to use the global reference to this
self._data_api_client = kwargs.pop("data_api_client")
except KeyError:
raise TypeError("Expected keyword argument 'data_api_client'")
def get_briefs(self):
if not self.validate():
raise ValueError("Invalid form")
statuses = self.status.data or tuple(id for id, label in self.status.choices)
lots = self.lot.data or tuple(id for id, label in self.lot.choices)
# disable framework filtering when digital marketplace framework is live
kwargs = {} if flask_featureflags.is_active('DM_FRAMEWORK') else {"framework": self._framework_slug}
return self._data_api_client.find_briefs(
status=",".join(statuses),
lot=",".join(lots),
page=self.page.data,
per_page=75,
human=True,
**kwargs
)
def get_filters(self):
"""
generate the same "filters" structure as expected by search page templates
"""
if not self.validate():
raise ValueError("Invalid form")
return [
{
"label": field.label,
"filters": [
{
"label": choice_label,
"name": field.name,
"id": "{}-{}".format(field.id, choice_id),
"value": choice_id,
"checked": field.data and choice_id in field.data,
}
for choice_id, choice_label in field.choices
],
}
for field in (self.lot, self.status,)
]
def filters_applied(self):
"""
returns boolean indicating whether the results are actually filtered at all
"""
if not self.validate():
raise ValueError("Invalid form")
return bool(self.lot.data or self.status.data)
|
mikel-egana-aranguren/SADI-Galaxy-Docker | galaxy-dist/eggs/nose-0.11.1-py2.7.egg/nose/result.py | Python | gpl-3.0 | 5,943 | 0.001514 | """
Test Result
-----------
Provides a TextTestResult that extends unittest._TextTestResult to
provide support for error cl | asses (such as the builtin skip and
deprecated classes), and hooks for plugins to take over or extend
reporting.
"""
import logging
from unittest import _TextTestResult
from nose.config import Config
from nose.util import isclass, ln as _ln # backwards compat
log = logging.getLogger('nose.result')
def _exception_detail(exc):
# this is what stdlib module traceback does
try:
return str(exc)
except:
return '<unprintable %s object>' % type(exc).__name__
| class TextTestResult(_TextTestResult):
"""Text test result that extends unittest's default test result
support for a configurable set of errorClasses (eg, Skip,
Deprecated, TODO) that extend the errors/failures/success triad.
"""
def __init__(self, stream, descriptions, verbosity, config=None,
errorClasses=None):
if errorClasses is None:
errorClasses = {}
self.errorClasses = errorClasses
if config is None:
config = Config()
self.config = config
_TextTestResult.__init__(self, stream, descriptions, verbosity)
def addError(self, test, err):
"""Overrides normal addError to add support for
errorClasses. If the exception is a registered class, the
error will be added to the list for that class, not errors.
"""
stream = getattr(self, 'stream', None)
ec, ev, tb = err
try:
exc_info = self._exc_info_to_string(err, test)
except TypeError:
# 2.3 compat
exc_info = self._exc_info_to_string(err)
for cls, (storage, label, isfail) in self.errorClasses.items():
if isclass(ec) and issubclass(ec, cls):
if isfail:
test.passed = False
storage.append((test, exc_info))
# Might get patched into a streamless result
if stream is not None:
if self.showAll:
message = [label]
detail = _exception_detail(err[1])
if detail:
message.append(detail)
stream.writeln(": ".join(message))
elif self.dots:
stream.write(label[:1])
return
self.errors.append((test, exc_info))
test.passed = False
if stream is not None:
if self.showAll:
self.stream.writeln('ERROR')
elif self.dots:
stream.write('E')
def printErrors(self):
"""Overrides to print all errorClasses errors as well.
"""
_TextTestResult.printErrors(self)
for cls in self.errorClasses.keys():
storage, label, isfail = self.errorClasses[cls]
if isfail:
self.printErrorList(label, storage)
# Might get patched into a result with no config
if hasattr(self, 'config'):
self.config.plugins.report(self.stream)
def printSummary(self, start, stop):
"""Called by the test runner to print the final summary of test
run results.
"""
write = self.stream.write
writeln = self.stream.writeln
taken = float(stop - start)
run = self.testsRun
plural = run != 1 and "s" or ""
writeln(self.separator2)
writeln("Ran %s test%s in %.3fs" % (run, plural, taken))
writeln()
summary = {}
eckeys = self.errorClasses.keys()
eckeys.sort()
for cls in eckeys:
storage, label, isfail = self.errorClasses[cls]
count = len(storage)
if not count:
continue
summary[label] = count
if len(self.failures):
summary['failures'] = len(self.failures)
if len(self.errors):
summary['errors'] = len(self.errors)
if not self.wasSuccessful():
write("FAILED")
else:
write("OK")
items = summary.items()
if items:
items.sort()
write(" (")
write(", ".join(["%s=%s" % (label, count) for
label, count in items]))
writeln(")")
else:
writeln()
def wasSuccessful(self):
"""Overrides to check that there are no errors in errorClasses
lists that are marked as errors and should cause a run to
fail.
"""
if self.errors or self.failures:
return False
for cls in self.errorClasses.keys():
storage, label, isfail = self.errorClasses[cls]
if not isfail:
continue
if storage:
return False
return True
def _addError(self, test, err):
try:
exc_info = self._exc_info_to_string(err, test)
except TypeError:
# 2.3: does not take test arg
exc_info = self._exc_info_to_string(err)
self.errors.append((test, exc_info))
if self.showAll:
self.stream.write('ERROR')
elif self.dots:
self.stream.write('E')
def _exc_info_to_string(self, err, test=None):
# 2.3/2.4 -- 2.4 passes test, 2.3 does not
try:
return _TextTestResult._exc_info_to_string(self, err, test)
except TypeError:
# 2.3: does not take test arg
return _TextTestResult._exc_info_to_string(self, err)
def ln(*arg, **kw):
from warnings import warn
warn("ln() has moved to nose.util from nose.result and will be removed "
"from nose.result in a future release. Please update your imports ",
DeprecationWarning)
return _ln(*arg, **kw)
|
ramineni/myironic | ironic/db/sqlalchemy/api.py | Python | apache-2.0 | 22,173 | 0.000316 | # -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""SQLAlchemy storage backend."""
import collections
import datetime
from oslo.utils import timeutils
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import session as db_session
from oslo_db.sqlalchemy import utils as db_utils
from sqlalchemy.orm.exc import NoResultFound
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LW
from ironic.common import states
from ironic.common import utils
from ironic.db import api
from ironic.db.sqlalchemy import models
from ironic.openstack.common import log
CONF = cfg.CON | F
CONF.import_opt('heartbeat_timeout',
'ironic.conductor.manager', |
group='conductor')
LOG = log.getLogger(__name__)
_FACADE = None
def _create_facade_lazily():
global _FACADE
if _FACADE is None:
_FACADE = db_session.EngineFacade.from_config(CONF)
return _FACADE
def get_engine():
facade = _create_facade_lazily()
return facade.get_engine()
def get_session(**kwargs):
facade = _create_facade_lazily()
return facade.get_session(**kwargs)
def get_backend():
"""The backend is this module itself."""
return Connection()
def model_query(model, *args, **kwargs):
"""Query helper for simpler session usage.
:param session: if present, the session to use
"""
session = kwargs.get('session') or get_session()
query = session.query(model, *args)
return query
def add_identity_filter(query, value):
"""Adds an identity filter to a query.
Filters results by ID, if supplied value is a valid integer.
Otherwise attempts to filter results by UUID.
:param query: Initial query to add filter to.
:param value: Value for filtering results by.
:return: Modified query.
"""
if utils.is_int_like(value):
return query.filter_by(id=value)
elif utils.is_uuid_like(value):
return query.filter_by(uuid=value)
else:
raise exception.InvalidIdentity(identity=value)
def add_port_filter(query, value):
"""Adds a port-specific filter to a query.
Filters results by address, if supplied value is a valid MAC
address. Otherwise attempts to filter results by identity.
:param query: Initial query to add filter to.
:param value: Value for filtering results by.
:return: Modified query.
"""
if utils.is_valid_mac(value):
return query.filter_by(address=value)
else:
return add_identity_filter(query, value)
def add_port_filter_by_node(query, value):
if utils.is_int_like(value):
return query.filter_by(node_id=value)
else:
query = query.join(models.Node,
models.Port.node_id == models.Node.id)
return query.filter(models.Node.uuid == value)
def add_node_filter_by_chassis(query, value):
if utils.is_int_like(value):
return query.filter_by(chassis_id=value)
else:
query = query.join(models.Chassis,
models.Node.chassis_id == models.Chassis.id)
return query.filter(models.Chassis.uuid == value)
def _check_port_change_forbidden(port, session):
node_id = port['node_id']
if node_id is not None:
query = model_query(models.Node, session=session)
query = query.filter_by(id=node_id)
node_ref = query.one()
if node_ref['reservation'] is not None:
raise exception.NodeLocked(node=node_ref['uuid'],
host=node_ref['reservation'])
def _paginate_query(model, limit=None, marker=None, sort_key=None,
sort_dir=None, query=None):
if not query:
query = model_query(model)
sort_keys = ['id']
if sort_key and sort_key not in sort_keys:
sort_keys.insert(0, sort_key)
query = db_utils.paginate_query(query, model, limit, sort_keys,
marker=marker, sort_dir=sort_dir)
return query.all()
class Connection(api.Connection):
"""SqlAlchemy connection."""
def __init__(self):
pass
def _add_nodes_filters(self, query, filters):
if filters is None:
filters = []
if 'chassis_uuid' in filters:
# get_chassis_by_uuid() to raise an exception if the chassis
# is not found
chassis_obj = self.get_chassis_by_uuid(filters['chassis_uuid'])
query = query.filter_by(chassis_id=chassis_obj.id)
if 'associated' in filters:
if filters['associated']:
query = query.filter(models.Node.instance_uuid != None)
else:
query = query.filter(models.Node.instance_uuid == None)
if 'reserved' in filters:
if filters['reserved']:
query = query.filter(models.Node.reservation != None)
else:
query = query.filter(models.Node.reservation == None)
if 'maintenance' in filters:
query = query.filter_by(maintenance=filters['maintenance'])
if 'driver' in filters:
query = query.filter_by(driver=filters['driver'])
if 'provision_state' in filters:
query = query.filter_by(provision_state=filters['provision_state'])
if 'provisioned_before' in filters:
limit = timeutils.utcnow() - datetime.timedelta(
seconds=filters['provisioned_before'])
query = query.filter(models.Node.provision_updated_at < limit)
return query
def get_nodeinfo_list(self, columns=None, filters=None, limit=None,
marker=None, sort_key=None, sort_dir=None):
# list-ify columns default values because it is bad form
# to include a mutable list in function definitions.
if columns is None:
columns = [models.Node.id]
else:
columns = [getattr(models.Node, c) for c in columns]
query = model_query(*columns, base_model=models.Node)
query = self._add_nodes_filters(query, filters)
return _paginate_query(models.Node, limit, marker,
sort_key, sort_dir, query)
def get_node_list(self, filters=None, limit=None, marker=None,
sort_key=None, sort_dir=None):
query = model_query(models.Node)
query = self._add_nodes_filters(query, filters)
return _paginate_query(models.Node, limit, marker,
sort_key, sort_dir, query)
def reserve_node(self, tag, node_id):
session = get_session()
with session.begin():
query = model_query(models.Node, session=session)
query = add_identity_filter(query, node_id)
# be optimistic and assume we usually create a reservation
count = query.filter_by(reservation=None).update(
{'reservation': tag}, synchronize_session=False)
try:
node = query.one()
if count != 1:
# Nothing updated and node exists. Must already be
# locked.
raise exception.NodeLocked(node=node_id,
host=node['reservation'])
return node
except NoResultFound:
raise exception.NodeNotFound(node_id)
def release_node(self, tag, node_id):
session = get_session()
with session.begin():
query = model_query(models.Node, session=session)
|
2014c2g5/2014cadp | wsgi/local_data/brython_programs/brython_fourbar1.py | Python | gpl-3.0 | 11,960 | 0.012758 | #要注意 javascript 轉 python 語法差異
#document.getElementById -> doc[]
#module Math -> math
#Math.PI -> math.pi
#abs -> fabs
#array 可用 list代替
import math
import time
from browser import doc
import browser.timer
# 點類別
class Point(object):
# 起始方法
def __init__(self, x, y):
self.x = x
self.y = y
# 繪製方法
def drawMe(self, g, r):
self.g = g
self.r = r
self.g.save()
self.g.moveTo(self.x,self.y)
self.g.beginPath()
# 根據 r 半徑繪製一個圓代表點的所在位置
self.g.arc(self.x, self.y, self.r, 0, 2*math.pi, true)
self.g.moveTo(self.x,self.y)
self.g.lineTo(self.x+self.r, self.y)
self.g.moveTo(self.x, self.y)
self.g.lineTo(self.x-self.r, self.y)
self.g.moveTo(self.x, self.y)
self.g.lineTo(self.x, self.y+self.r)
self.g.moveTo(self.x, self.y)
self.g.lineTo(self.x, self.y-self.r)
self.g.restore()
self.g.stroke()
# 加入 Eq 方法
def Eq(self, pt):
self.x = pt.x
self.y = pt.y
# 加入 setPoint 方法
def setPoint(self, px, py):
self.x = px
self.y = py
# 加上 distance(pt) 方法, 計算點到 pt 的距離
def distance(self, pt):
self.pt = pt
x = self.x - self.pt.x
y = self.y - self.pt.y
return math.sqrt(x * x + y * y)
# 利用文字標示點的座標位置
def tag(self, g):
self.g = g
self.g.beginPath()
self.g.fillText("%d, %d"%(self.x, self.y),self.x, self.y)
self.g.stroke()
# Line 類別物件
class Line(object):
# 起始方法
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
# 直線的第一點, 設為線尾
self.Tail = self.p1
# 直線組成的第二點, 設為線頭
self.Head = self.p2
# 直線的長度屬性
self.length = math.sqrt(math.pow(self.p2.x-self.p1.x, 2)+math.pow(self.p2.y-self.p1.y,2))
# setPP 以指定頭尾座標點來定義直線
def setPP(self, p1, p2):
self.p1 = p1
self.p2 = p2
self.Tail = self.p1
self.Head = self.p2
self.length = math.sqrt(math.pow(self.p2.x-self.p1.x, 2)+math.pow(self.p2.y-self.p1.y,2))
# setRT 方法 for Line, 應該已經確定 Tail 點, 然後以 r, t | 作為設定 Head 的參考
def setRT(self, r, t):
self.r = r
self.t = t
x = self.r * math.cos(self.t)
y = self.r * math.sin(self.t)
self.Tail.Eq(self.p1)
self.Head.setPoint(self.Tail.x + x,self.Tail.y + y)
# getR 方法 for Line
def getR(self):
# x 分量與 y 分量
x = self.p1.x - self.p2.x
y = self.p1.y - self.p2.y
return math.sqrt(x * | x + y * y)
# 根據定義 atan2(y,x), 表示 (x,y) 與 正 x 軸之間的夾角, 介於 pi 與 -pi 間
def getT(self):
x = self.p2.x - self.p1.x
y = self.p2.y - self.p1.y
if (math.fabs(x) < math.pow(10,-100)):
if(y < 0.0):
return (-math.pi/2)
else:
return (math.pi/2)
else:
return math.atan2(y, x)
# setTail 方法 for Line
def setTail(self, pt):
self.pt = pt
self.Tail.Eq(pt)
self.Head.setPoint(self.pt.x + self.x, self.pt.y + self.y)
# getHead 方法 for Line
def getHead(self):
return self.Head
def getTail(self):
return self.Tail
def drawMe(self, g):
self.g = g
self.g.beginPath()
self.g.moveTo(self.p1.x,self.p1.y)
self.g.lineTo(self.p2.x,self.p2.y)
self.g.stroke()
def test(self):
return ("this is pure test to Inherit")
class Link(Line):
def __init__(self, p1, p2):
self.p1 = p1
self.p2 = p2
self.length = math.sqrt(math.pow((self.p2.x - self.p1.x), 2) + math.pow((self.p2.y - self.p1.y), 2))
#g context
def drawMe(self, g):
self.g = g
hole = 5
radius = 10
length = self.getR()
# alert(length)
# 儲存先前的繪圖狀態
self.g.save()
self.g.translate(self.p1.x,self.p1.y)
#alert(str(self.p1.x)+","+str(self.p1.y))
#self.g.rotate(-((math.pi/2)-self.getT()))
self.g.rotate(-math.pi*0.5 + self.getT())
#alert(str(self.getT()))
#self.g.rotate(10*math.pi/180)
#this.g.rotate(-(Math.PI/2-this.getT()));
# 必須配合畫在 y 軸上的 Link, 進行座標轉換, 也可以改為畫在 x 軸上...
self.g.beginPath()
self.g.moveTo(0,0)
self.g.arc(0, 0, hole, 0, 2*math.pi, true)
self.g.stroke()
self.g.moveTo(0,length)
self.g.beginPath()
self.g.arc(0,length, hole, 0, 2*math.pi, true)
self.g.stroke()
self.g.moveTo(0,0)
self.g.beginPath()
self.g.arc(0,0, radius, 0, math.pi, true)
self.g.moveTo(0+radius,0)
self.g.lineTo(0+radius,0+length)
self.g.stroke()
self.g.moveTo(0,0+length)
self.g.beginPath()
self.g.arc(0, 0+length, radius, math.pi, 0, true)
self.g.moveTo(0-radius,0+length)
self.g.lineTo(0-radius,0)
self.g.stroke()
self.g.restore()
self.g.beginPath()
self.g.fillStyle = "red"
self.g.font = "bold 18px sans-serif"
self.g.fillText("%d, %d"%(self.p2.x, self.p2.y),self.p2.x, self.p2.y)
self.g.stroke()
self.g.restore()
class Triangle(object):
def __init__(self, p1, p2, p3):
self.p1 = p1
self.p2 = p2
self.p3 = p3
def getLenp3(self):
p1 = self.p1
ret = p1.distance(self.p2)
return ret
def getLenp1(self):
p2 = self.p2
ret = p2.distance(self.p3)
return ret
def getLenp2(self):
p1 = self.p1
ret = p1.distance(self.p3)
return ret
# 角度
def getAp1(self):
ret = math.acos(((self.getLenp2() * self.getLenp2() + self.getLenp3() * self.getLenp3()) - self.getLenp1() * self.getLenp1()) / (2* self.getLenp2() * self.getLenp3()))
return ret
#
def getAp2(self):
ret =math.acos(((self.getLenp1() * self.getLenp1() + self.getLenp3() * self.getLenp3()) - self.getLenp2() * self.getLenp2()) / (2* self.getLenp1() * self.getLenp3()))
return ret
def getAp3(self):
ret = math.acos(((self.getLenp1() * self.getLenp1() + self.getLenp2() * self.getLenp2()) - self.getLenp3() * self.getLenp3()) / (2* self.getLenp1() * self.getLenp2()))
return ret
def drawMe(self, g):
self.g = g
r = 5
# 繪出三個頂點
self.p1.drawMe(self.g,r)
self.p2.drawMe(self.g,r)
self.p3.drawMe(self.g,r)
line1 = Line(self.p1,self.p2)
line2 = Line(self.p1,self.p3)
line3 = Line(self.p2,self.p3)
# 繪出三邊線
line1.drawMe(self.g)
line2.drawMe(self.g)
line3.drawMe(self.g)
# ends Triangle def
# 透過三個邊長定義三角形
def setSSS(self, lenp3, lenp1, lenp2):
self.lenp3 = lenp3
self.lenp1 = lenp1
self.lenp2 = lenp2
self.ap1 = math.acos(((self.lenp2 * self.lenp2 + self.lenp3 * self.lenp3) - self.lenp1 * self.lenp1) / (2* self.lenp2 * self.lenp3))
self.ap2 = math.acos(((self.lenp1 * self.lenp1 + self.lenp3 * self.lenp3) - self.lenp2 * self.lenp2) / (2* self.lenp1 * self.lenp3))
self.ap3 = math.acos(((self.lenp1 * self.lenp1 + self.lenp2 * self.lenp2) - self.lenp3 * self.lenp3) / (2* self.lenp1 * self.lenp2))
# 透過兩個邊長與夾角定義三角形
def setSAS(self, lenp3, ap2, lenp1):
self.lenp3 = lenp3
self.ap2 = ap2
self.lenp1 = lenp1
self.lenp2 = math.sqrt((self.lenp3 * self.lenp3 + self.lenp1 * self.lenp1) - 2* self.lenp3 * self.lenp1 * math.cos(self.ap2))
#等於 SSS(AB, BC, CA)
def setSaSS(self, lenp2, lenp3, lenp1):
self.lenp2 = lenp2
self.lenp3 = lenp3
self.lenp1 = lenp1
if(self.lenp1 > (self.lenp2 + self.lenp3)):
#<CAB 夾角為 180 度, 三點共線且 A 介於 BC 之間
ret = math.pi
else :
# <CAB 夾角為 0, 三點共線且 A 不在 BC 之間
if((self.lenp1 < (self.lenp2 - self.lenp3)) or (self.lenp1 < (self.lenp3 - self.lenp2))):
ret = 0.0
else :
# 透過餘絃定理求出夾角 <CAB
ret = math.acos(((self.lenp2 * self.lenp2 + self.lenp3 * self.lenp3) - self.lenp1 * self.lenp1) / (2 * self.lenp2 * |
jamielennox/tempest | tempest/tests/test_auth.py | Python | apache-2.0 | 15,978 | 0 | # Copyright 2014 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
from oslotest import mockpatch
from tempest import auth
from tempest import config
from tempest import exceptions
from tempest.services.identity.json import token_client as v2_client
from tempest.services.identity.v3.json import token_client as v3_client
from tempest.tests import base
from tempest.tests import fake_config
from tempest.tests import fake_credentials
from tempest.tests import fake_http
from tempest.tests import fake_identity
def fake_get_default_credentials(credential_type, fill_in=True):
return fake_credentials.FakeCredentials()
def fake_get_credentials(credential_type=None, fill_in=True, **kwargs):
return fake_credentials.FakeCredentials()
class BaseAuthTestsSetUp(base.TestCase):
_auth_provider_class = None
credentials = fake_credentials.FakeCredentials()
def _auth(self, credentials, **params):
"""
returns auth method according to keystone
"""
return self._auth_provider_class(credentials, **params)
def setUp(self):
super(BaseAuthTestsSetUp, self).setUp()
self.useFixture(fake_config.ConfigFixture())
self.stubs.Set(config, 'TempestConfigPrivate', fake_config.FakePrivate)
self.fake_http = fake_http.fake_httplib2(return_type=200)
self.stubs.Set(auth, 'get_credentials', fake_get_credentials)
self.stubs.Set(auth, 'get_default_credentials',
fake_get_default_credentials)
self.auth_provider = self._auth(self.credentials)
class TestBaseAuthProvider(BaseAuthTestsSetUp):
"""
This tests auth.AuthProvider class which is base for the other so we
obviously don't test not implemented method or the ones which strongly
depends on them.
"""
class FakeAuthProviderImpl(auth.AuthProvider):
def _decorate_request():
pass
def _fill_credentials():
pass
def _get_auth():
pass
def base_url():
pass
def is_expired():
pass
_auth_provider_class = FakeAuthProviderImpl
def test_check_credentials_bad_type(self):
self.assertFalse(self.auth_provider.check_credentials([]))
def test_instantiate_with_dict(self):
# Dict credentials are only supported for backward compatibility
auth_provider = self._auth(credentials={})
self.assertIsInstance(auth_provider.credentials, auth.Credentials)
def test_auth_data_property_when_cache_exists(self):
self.auth_provider.cache = 'foo'
self.useFixture(mockpatch.PatchObject(self.auth_provider,
'is_expired',
return_value=False))
self.assertEqual('foo', getattr(self.auth_provider, 'auth_data'))
def test_delete_auth_data_property_through_deleter(self):
self.auth_provider.cache = 'foo'
del self.auth_provider.auth_data
self.assertIsNone(self.auth_provider.cache)
def test_delete_auth_data_property_through_clear_auth(self):
| self.auth_provider.cache = 'foo'
self.auth_provider.clear_auth()
self.assertIsNone(self.auth_provider.cache)
def test_set_and_reset_alt_auth_data(self):
self.auth_provider.set_alt_auth_data('foo', 'bar')
self.assertEqual(self.auth_provider.alt_part, 'foo')
self.assertEqual(self.auth_provider.alt_auth_data, 'bar')
self.auth_provider.reset_alt_auth_data()
se | lf.assertIsNone(self.auth_provider.alt_part)
self.assertIsNone(self.auth_provider.alt_auth_data)
def test_auth_class(self):
self.assertRaises(TypeError,
auth.AuthProvider,
fake_credentials.FakeCredentials)
class TestKeystoneV2AuthProvider(BaseAuthTestsSetUp):
_endpoints = fake_identity.IDENTITY_V2_RESPONSE['access']['serviceCatalog']
_auth_provider_class = auth.KeystoneV2AuthProvider
credentials = fake_credentials.FakeKeystoneV2Credentials()
def setUp(self):
super(TestKeystoneV2AuthProvider, self).setUp()
self.stubs.Set(v2_client.TokenClientJSON, 'raw_request',
fake_identity._fake_v2_response)
self.target_url = 'test_api'
def _get_fake_alt_identity(self):
return fake_identity.ALT_IDENTITY_V2_RESPONSE['access']
def _get_result_url_from_endpoint(self, ep, endpoint_type='publicURL',
replacement=None):
if replacement:
return ep[endpoint_type].replace('v2', replacement)
return ep[endpoint_type]
def _get_token_from_fake_identity(self):
return fake_identity.TOKEN
def _get_from_fake_identity(self, attr):
access = fake_identity.IDENTITY_V2_RESPONSE['access']
if attr == 'user_id':
return access['user']['id']
elif attr == 'tenant_id':
return access['token']['tenant']['id']
def _test_request_helper(self, filters, expected):
url, headers, body = self.auth_provider.auth_request('GET',
self.target_url,
filters=filters)
self.assertEqual(expected['url'], url)
self.assertEqual(expected['token'], headers['X-Auth-Token'])
self.assertEqual(expected['body'], body)
def _auth_data_with_expiry(self, date_as_string):
token, access = self.auth_provider.auth_data
access['token']['expires'] = date_as_string
return token, access
def test_request(self):
filters = {
'service': 'compute',
'endpoint_type': 'publicURL',
'region': 'FakeRegion'
}
url = self._get_result_url_from_endpoint(
self._endpoints[0]['endpoints'][1]) + '/' + self.target_url
expected = {
'body': None,
'url': url,
'token': self._get_token_from_fake_identity(),
}
self._test_request_helper(filters, expected)
def test_request_with_alt_auth_cleans_alt(self):
self.auth_provider.set_alt_auth_data(
'body',
(fake_identity.ALT_TOKEN, self._get_fake_alt_identity()))
self.test_request()
# Assert alt auth data is clear after it
self.assertIsNone(self.auth_provider.alt_part)
self.assertIsNone(self.auth_provider.alt_auth_data)
def test_request_with_alt_part_without_alt_data(self):
"""
Assert that when alt_part is defined, the corresponding original
request element is kept the same.
"""
filters = {
'service': 'compute',
'endpoint_type': 'publicURL',
'region': 'fakeRegion'
}
self.auth_provider.set_alt_auth_data('url', None)
url, headers, body = self.auth_provider.auth_request('GET',
self.target_url,
filters=filters)
self.assertEqual(url, self.target_url)
self.assertEqual(self._get_token_from_fake_identity(),
headers['X-Auth-Token'])
self.assertEqual(body, None)
def test_request_with_bad_service(self):
filters = {
'service': 'BAD_SERVICE',
'endpoint_type': 'publicURL',
'region': 'fakeRegion'
}
self.assertRaises(exceptions.EndpointNotFound,
|
rbramwell/pulp | server/pulp/server/managers/auth/user/cud.py | Python | gpl-2.0 | 7,971 | 0.001756 | """
Contains the manager class and exceptions for operations surrounding the creation,
update, and deletion on a Pulp user.
"""
from gettext import gettext as _
import re
from celery import task
from pulp.server import config
from pulp.server.async.tasks import Task
from pulp.server.db.model.auth import User
from pulp.server.exceptions import (PulpDataException, DuplicateResource, InvalidValue,
MissingResource)
from pulp.server.managers import factory
from pulp.server.managers.auth.role.cud import SUPER_USER_ROLE
# letters, numbers, underscore, hyphen, period
_USER_LOGIN_REGEX = re.compile(r'^[.\-_A-Za-z0-9]+$')
class UserManager(object):
"""
Performs user related functions relating to CRUD operations.
"""
@staticmethod
def create_user(login, password=None, name=None, roles=None):
"""
Creates a new Pulp user and adds it to specified to roles.
@param login: login name / unique identifier for the user
@type login: str
@param password: password for login credentials
@type password: str
@param name: user's full name
@type name: str
@param roles: list of roles user will belong to
@type roles: list
@raise DuplicateResource: if there is already a user with the requested login
@raise InvalidValue: if any of the fields are unacceptable
"""
existing_user = User.get_collection().find_one({'login': login})
if existing_user is not None:
raise DuplicateResource(login)
invalid_values = []
if login is None or _USER_LOGIN_REGEX.match(login) is None:
invalid_values.append('login')
if invalid_type(name, basestring):
invalid_values.append('name')
if invalid_type(roles, list):
invalid_values.append('roles')
if invalid_values:
raise InvalidValue(invalid_values)
# Use the login for name of the user if one was not specified
name = name or login
roles = roles or None
# Encode plain-text password
hashed_password = None
if password:
hashed_password = factory.password_manager().hash_password(password)
# Creation
create_me = User(login=login, password=hashed_password, name=name, roles=roles)
User.get_collection().save(create_me, safe=True)
# Grant permissions
permission_manager = factory.permission_manager()
permission_manager.grant_automatic_permissions_for_user(create_me['login'])
# Retrieve the user to return the SON object
created = User.get_collection().find_one({'login': login})
created.pop('password')
return created
@staticmethod
def update_user(login, delta):
"""
Updates the user. Following fields may be updated through this call:
* password
* name
* roles
Other fields found in delta will be ignored.
@param login: identifies the user
@type login: str
@param delta: list of attributes and their new values to change
@type delta: dict
@raise MissingResource: if there is no user with login
"""
user = User.get_collection().find_one({'login': login})
if user is None:
raise MissingResource(login)
# Check invalid values
invalid_values = []
if 'password' in delta:
password = delta.pop('password')
if password is None or invalid_type(password, basestring):
invalid_values.append('password')
else:
user['password'] = factory.password_manager().hash_password(password)
if 'name' in delta:
name = delta.pop('name')
if name is None or invalid_type(name, basestring):
invalid_values.append('name')
else:
user['name'] = name
if 'roles' in delta:
roles = delta.pop('roles')
if roles is None or invalid_type(roles, list):
invalid_values.append('roles')
else:
# Add new roles to the user and remove deleted roles from the user according to
# delta
role_manager = factory.role_manager()
old_roles = user['roles']
for new_role in roles:
if new_role not in old_roles:
role_manager.add_user_to_role(new_role, login)
for old_role in old_roles:
if old_role not in roles:
role_manager.remove_user_from_role(old_role, login)
user['roles'] = roles
if invalid_values:
raise InvalidValue(invalid_values)
if delta:
raise InvalidValue(delta.keys())
User.get_collection().save(user, safe=True)
# Retrieve the user to return the SON object
updated = User.get_collection().find_one({'login': login})
updated.pop('password')
return updated
@staticmethod
def delete_user(login):
"""
Deletes the given user. Deletion of last superuser is not permitted.
@param login: identifies the user being deleted
@type login: str
@raise MissingResource: if the given user does not exist
@raise InvalidValue: if login value is invalid
"""
# Raise exception if login is invalid
if login is None or invalid_type(login, basestring):
raise InvalidValue(['login'])
# Check whether user exists
found = User.get_collection().find_one({'login': login})
if found is None:
raise MissingResource(login)
# Make sure user is not t | he last super user
if factory.user_query_manager().is_last_super_user(login):
raise PulpDataException(_("The last superuser [%s] cannot be deleted" % login))
# Revoke all permissions from the user
permission_manager = factory.permission_ma | nager()
permission_manager.revoke_all_permissions_from_user(login)
User.get_collection().remove({'login': login}, safe=True)
def ensure_admin(self):
"""
This function ensures that there is at least one super user for the system.
If no super users are found, the default admin user (from the pulp config)
is looked up or created and added to the super users role.
"""
role_manager = factory.role_manager()
if self.get_admins():
return
default_login = config.config.get('server', 'default_login')
admin = User.get_collection().find_one({'login': default_login})
if admin is None:
default_password = config.config.get('server', 'default_password')
admin = UserManager.create_user(login=default_login,
password=default_password)
role_manager.add_user_to_role(SUPER_USER_ROLE, default_login)
@staticmethod
def get_admins():
"""
Get a list of users with the super-user role.
:return: list of users who are admins.
:rtype: list of User
"""
user_query_manager = factory.user_query_manager()
try:
super_users = user_query_manager.find_users_belonging_to_role(SUPER_USER_ROLE)
return super_users
except MissingResource:
return None
create_user = task(UserManager.create_user, base=Task)
delete_user = task(UserManager.delete_user, base=Task, ignore_result=True)
update_user = task(UserManager.update_user, base=Task)
def invalid_type(input_value, valid_type):
"""
@return: true if input_value is not of valid_type
@rtype: bool
"""
if input_value is not None and not isinstance(input_value, valid_type):
return True
return False
|
bentiss/hid-replay | tools/capture_usbmon.py | Python | gpl-2.0 | 13,959 | 0.02479 | #!/bin/env python
# -*- coding: utf-8 -*-
#
# Hid replay / capture_usbmon.py
#
# must be run as root.
#
# This program is useful to capture both the raw usb events from an input
# device and its kernel generated events.
#
# Requires several tools to be installed: usbmon, evemu and pyudev
#
# Copyright (c) 2014 Benjamin Tissoires <benjamin.tissoires@gmail.com>
# Copyright (c) 2014 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import threading
import subprocess
import shlex
import time
from optparse import OptionParser
import pyudev
import inspect
module_path = os.path.abspath(inspect.getsourcefile(lambda _: None))
module_dirname = os.path.dirname(module_path)
usbmon2hid_replay = module_dirname + "/usbmon2hid-replay.py"
class UDevObject(object):
""" Abstract class for an udev tree element"""
def __init__(self, device, parent, children_class):
"device being an udev device"
self.device = device
self.parent = parent
self.children_class = children_class
self.childrens = {}
self.bind_file = None
def is_child_type(self, other):
"is the other UDevObject from the correct child type"
# abstract method, has to be overwritten in the subclasses
return False
def is_parent(self, other):
"true if the current UDevObject is a parent of the given UDevObject"
return self.device.sys_path in other.sys_path
def add_child(self, device):
"""add a child to the hierarchy: instanciate a new subclass of UDevObject
stored in self.children_class.
"""
if not self.children_class:
return
child = self.children_class(device, self)
self.childrens[device.sys_path] = child
def removed(self):
"called when the item is removed from the parent"
# abstract method, has to be overwritten in the subclasses
pass
def clean(self):
"remove all of the children of the UDevObject"
for child in self.childrens.values():
child.removed()
child.clean()
self.childrens = {}
def udev_event(self, action, device):
"called when a udev event is processed"
if self.is_child_type(device):
# the device is our direct child, add/remove it to the hierarchy
if action == "add":
self.add_child(device)
else:
if device.sys_path in self.childrens.keys():
# be sure to notify the "removed" call before deleting it
self.childrens[device.sys_path].removed()
del(self.childrens[device.sys_path])
else:
# maybe our children know how to handle it
for child in self.childrens.values():
if child.is_parent(device):
child.udev_event(action, device)
def get_name(self):
"return a more convenient name for the current object"
return self.device.sys_path
def print_tree(self, level = 0):
"convenient function to print a tree of the current known devices"
print self.get_name()
for child in self.childrens.values():
print (" " * level) + u' └',
child.print_tree(level + 1)
def unbind(self):
"unbind the device from its current driver"
path = self.device.sys_path
unbind_path = "{0}/driver/unbind".format(path)
bind_path = "{0}/driver/bind".format(path)
if not os.path.exists(unbind_path):
return False
self.unbind_file = open(unbind_path, "w")
self.bind_file = open(bind_path, "w")
self.unbind_file.write(self.device.sys_name)
self.unbind_file.close()
return True
def rebind(self):
"rebind the device to its driver (unbind has to be called first)"
if not self.bind_file:
raise Exception, "trying to rebind an unbind device"
self.bind_file.write(self.device.sys_name)
self.bind_file.close()
self.bind_file = None
class EventNode(UDevObject):
def __init__(self, device, parent):
# no children devices for this one
UDevObject.__init__(self, device, parent, None)
self.index = int(self.device.sys_name.replace("event", ""))
self.start_evemu()
def get_name(self):
return "{0}_{1}.ev".format(self.parent.get_name(), self.index)
def removed(self):
# close the underlying evemu process when the device is removed
self.p.terminate()
self.p.wait()
self.output.close()
def | start_evemu(self):
# start an evemu-record of the event node
print "dumping evdev events in", self.get_name()
self.output = open(self.get_name(), 'w')
evemu_command = "evemu-record /dev/input/{0}".format(self.device.sys_name)
print evemu_command
self.p = | subprocess.Popen(shlex.split(evemu_command), stdout=self.output)
class USBInterface(UDevObject):
def __init__(self, device, parent):
UDevObject.__init__(self, device, parent, EventNode)
self.intf_number = device.sys_name.split(':')[-1]
self.lsusb()
def is_child_type(self, other):
return other.subsystem == u'input' and u'event' in other.sys_name
def get_name(self):
return "{0}_{1}".format(self.parent.get_name(), self.intf_number)
def write_hid_file(self):
"convert the usbmon recording into a hid recording"
intf = self.intf_number.split(".")[-1]
usbmon = self.parent.get_usbmon_filename()
usbmon_command = "python {0} {1} --intf {2}".format(usbmon2hid_replay, usbmon, intf)
f = open(self.get_name() + ".hid", "w")
p = subprocess.Popen(shlex.split(usbmon_command), stdout=f)
p.wait()
print "written", self.get_name() + ".hid"
def removed(self):
self.parent.remove_interface(self)
def lsusb(self):
"""when the usb driver does not checks for the report descriptors, we have
to ask them ourself: call `lsusb -v' when the driver is not bound."""
# unbind the device first
if not self.unbind():
return
# call lsusb -v
lsusbcall = "lsusb -v -d {0}:{1}".format(self.parent.vid, self.parent.pid)
subprocess.call(shlex.split(lsusbcall), stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
#rebind the device
self.rebind()
class USBDev(UDevObject):
""" A USB device object:
- will keep the interface hierarchy
- at unplug, convert the usb recording into the various hid files (one
per known interface)
"""
def __init__(self, device):
UDevObject.__init__(self, device, None, USBInterface)
self.vid = device.get("ID_VENDOR_ID")
self.pid = device.get("ID_MODEL_ID")
self.vendor = device.get("ID_VENDOR").replace(".", "")
self.start_usbmon()
self.removed_intf = []
def is_child_type(self, other):
return other.device_type == u'usb_interface'
def get_name(self):
return "{0}_{1}_{2}".format(self.vendor, self.vid, self.pid)
def get_usbmon_filename(self):
"return the usbmon file name were the events are recorded"
return self.get_name() + ".usbmon"
def start_usbmon(self):
"start the usbmon subprocess"
number = self.device.device_node.split('/')[-1]
bus = int(self.device.device_node.split('/')[-2])
# start usbmon
print "dumping usb events in", self.get_usbmon_filename()
self.usbmon_file = open(self.get_usbmon_filename(), 'w')
USBMon.add_listener(bus, number, self.usbmon_file)
def remove_interface(self, intf):
"when an interface is removed, this method is called"
self.removed_intf.append(intf)
def terminate(self):
"""clean up and terminate the usb device:
- stop the usbmon capture for this device
- remove any zombi child
- ask for each known interface to translate the usbmon capture into a hid one
"""
number = self.device.device_node.split('/')[-1]
bus = int(self.device.device_node.split('/')[-2])
USBMon.remove_listener(bus, number)
self.usbmon_file.close()
self.clean()
for intf in self.removed_intf:
intf.write_hid_file()
class USBMon(threading.Thread):
"""usbmon recorder class:
- calling a new object USBMon(bus) starts recording usb events on this bus
- each device gets buffered in its own queue of event |
story645/hpcc | set_partition/sets/gensets.py | Python | mit | 414 | 0.004831 | """
Hannah Aizenman
10/13/2013
Generates a random subset of size 10^P for p in [1,MAX_P) from [0, 10^8)
"""
import random
MAX_P = 8
max_value = | 10**MAX_P
large_set = range(max_value)
for p in xrange(1,MAX_P):
print "list of size: 10^{0}".format(p)
f = open("p{0}.txt".format(p), 'w')
sample = random.sample(large_set, 10**p)
f.wri | te("\n".join(map(lambda x: str(x), sample)))
f.close()
|
dek-odoo/python-samples | python exercises/dek_program078.py | Python | apache-2.0 | 584 | 0.003425 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#- Author : (DEK) Devendra Kavthekar
# program078:
# Please write a program to generate a list with 5 random numbers between
# 100 and 200 inclusive.
# Hints:
# Use random.sample() to generate a list of | random values.
import random
def main(startLimit, endLimit):
print random.sample(range(startLimit, endLimit + 1), 5)
if __name__ == '__main__':
# startLimit = int(raw_input("Input Start Value: "))
# endLimit = int(raw_input("Input Stop Value: "))
# main(startLimit, | endLimit)
main(100, 200)
|
Laurawly/tvm-1 | python/tvm/topi/unique.py | Python | apache-2.0 | 12,249 | 0.003102 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name
"""Unique operator"""
from tvm import te, tir
from ..te import hybrid
from .scan import cumsum
from .sort import sort, argsort
def _calc_adjacent_diff_ir(data, output, binop=tir.Sub):
"""Low level IR to calculate adjacent difference in an 1-D array.
Parameters
----------
data : Buffer
Input 1-D Buffer.
output: Buffer
A buffer to store adjacent difference, of the same shape as data. The adjacent difference
is defined as: output[0] = 0, output[i] = binop(data[i], data[i-1])
where i > 0 and i < len(data).
binop: function, optional
A binary associative op to use for calculating adjacent difference. The function takes two
TIR expressions and produce a new TIR expression. By default it uses tvm.tir.Sub to
compute the adjacent difference.
"""
ib = tir.ir_builder.create()
data_ptr = ib.buffer_ptr(data)
output_ptr = ib.buffer_ptr(output)
with ib.for_range(0, data.shape[0], kind="parallel") as i:
with ib.if_scope(i == 0):
output_ptr[0] = 0
with ib.else_scope():
output_ptr[i] = tir.Cast(output.dtype, binop(data_ptr[i], data_ptr[i - 1]))
return ib.get()
def _calc_adjacent_diff(data, out_dtype="int32", binop=tir.Sub):
"""Function calculate adjacent difference in an 1-D array.
Parameters
----------
data : tvm.te.Tensor
Input 1-D tensor.
output_dtype : str
| The output tensor data type.
binop: function, optional
A binary associative op to use for calculating difference. The function takes two
TIR expressions and produce a new TIR expression. By default it uses tvm.tir.Sub to
compute the adjacent difference.
Returns
-------
output : tvm.te.Tensor
1-D tensor storing the adjacent difference of the input tensor. The adjacent difference
is defined as: output[0] = 0, output[i] = binop(d | ata[i], data[i-1])
where i > 0 and i < len(data).
"""
return te.extern(
[data.shape],
[data],
lambda ins, outs: _calc_adjacent_diff_ir(ins[0], outs[0], binop=binop),
dtype=[out_dtype],
name="_calc_adjacent_diff",
tag="_calc_adjacent_diff_cpu",
)
@hybrid.script
def _calc_num_unique(inc_scan):
"""Helper function to get the number of unique elements fron inc_scan tensor"""
output = output_tensor((1,), "int32")
output[0] = inc_scan[inc_scan.shape[0] - 1] + int32(1)
return output
def _calc_unique_ir(
data, argsorted_indices, inc_scan, index_converter, unique_elements, inverse_indices, counts
):
"""Low level IR to calculate unique elements, inverse indices, and counts (optional) of
unique elements of 1-D array.
Parameters
----------
data : Buffer
Input 1-D Buffer.
argsorted_indices : Buffer
A buffer that stores the argsorted indices of the input data.
inc_scan : Buffer
A buffer that stores the inclusive scan of the binary tir.NE adjacent difference
of the sorted data.
index_converter (optional) : Buffer
An optional index converter that transforms the unique element index
such that new_idx = index_converter[old_idx].
unique_elements : Buffer
A buffer that stores the unique elements.
inverse_indices : Buffer
A buffer that stores the the index of each input data element in the unique element array.
counts (optional) : Buffer
A buffer that stores the count of each unique element.
"""
ib = tir.ir_builder.create()
data_ptr = ib.buffer_ptr(data)
argsorted_indices_ptr = ib.buffer_ptr(argsorted_indices)
inc_scan_ptr = ib.buffer_ptr(inc_scan)
unique_elements_ptr = ib.buffer_ptr(unique_elements)
inverse_indices_ptr = ib.buffer_ptr(inverse_indices)
index_converter_ptr = None
if isinstance(index_converter, tir.Buffer):
index_converter_ptr = ib.buffer_ptr(index_converter)
if isinstance(counts, tir.Buffer):
counts_ptr = ib.buffer_ptr(counts)
# use indices_ptr as a tmp buffer to store tids with inc_scan[tid] != inc_scan[tid-1]
unique_seq_indices_ptr = ib.buffer_ptr(inverse_indices)
data_length = data.shape[0]
# if need to return counts
if isinstance(counts, tir.Buffer):
num_unique = inc_scan_ptr[inc_scan.shape[0] - 1] + 1
num_elements = data.shape[0]
unique_seq_indices_ptr[num_unique - 1] = num_elements
with ib.new_scope():
with ib.for_range(0, data_length, kind="parallel") as i:
with ib.if_scope(i > 0):
with ib.if_scope(inc_scan_ptr[i] != inc_scan_ptr[i - 1]):
unique_seq_indices_ptr[inc_scan_ptr[i] - 1] = i
with ib.new_scope():
with ib.for_range(0, num_unique, kind="parallel") as i:
unique_idx = i if not index_converter_ptr else index_converter_ptr[i]
with ib.if_scope(i == 0):
counts_ptr[unique_idx] = unique_seq_indices_ptr[i]
with ib.else_scope():
counts_ptr[unique_idx] = (
unique_seq_indices_ptr[i] - unique_seq_indices_ptr[i - 1]
)
# calculate unique elements and inverse indices
with ib.new_scope():
with ib.for_range(0, data_length, kind="parallel") as i:
data_idx = argsorted_indices_ptr[i]
unique_idx = (
inc_scan_ptr[i] if not index_converter_ptr else index_converter_ptr[inc_scan_ptr[i]]
)
inverse_indices_ptr[data_idx] = unique_idx
with ib.if_scope(i == 0):
unique_elements_ptr[unique_idx] = data_ptr[data_idx]
with ib.else_scope():
with ib.if_scope(inc_scan_ptr[i] != inc_scan_ptr[i - 1]):
unique_elements_ptr[unique_idx] = data_ptr[data_idx]
return ib.get()
@hybrid.script
def _calc_first_occurence(argsorted_indices, inc_scan):
"""Hybrid script to calculate the first occurence of each unique element in the input data.
Parameters
----------
argsorted_indices : tvm.te.Tensor
A tensor that stores the argsorted indices of the input data.
inc_scan : tvm.te.Tensor
A tensor that stores the inclusive scan of the binary tir.NE adjacent difference
of the sorted data.
first_occurence : tvm.te.Tensor
A tensor that stores the first occurence of each unique element in the input data.
"""
first_occurence = output_tensor(argsorted_indices.shape, "int32")
for i in parallel(argsorted_indices.shape[0]):
first_occurence[i] = argsorted_indices.shape[0]
for i in parallel(argsorted_indices.shape[0]):
if i == 0 or inc_scan[i] != inc_scan[i - 1]:
first_occurence[inc_scan[i]] = argsorted_indices[i]
return first_occurence
def unique(data, is_sorted=True, return_counts=False):
"""
Find the unique elements of a 1-D tensor. Please note `output` and `counts` are all padded to
have the same length of `data` and element with index >= num_unique[0] has undefined value.
Parameters
----------
data : tvm.te.Tensor
A 1-D tensor of integers.
sorted : bool
Whether to sort the unique elements in ascending order before returni |
lcpt/xc | verif/tests/elements/crd_transf/test_element_axis_03.py | Python | gpl-3.0 | 3,424 | 0.034765 | # -*- coding: utf-8 -*-
__author__= "Luis C. Pérez Tato (LCPT) and Ana Ortega (AOO)"
__copyright__= "Copyright 2015, LCPT and AOO"
__license__= "GPL"
__version__= "3.0"
__email__= "l.pereztato@gmail.com"
import xc_base
import geom
import xc
from solution import predefined_solutions
from model import predefined_spaces
from materials import typical_materials
import math
b= 0.4
h= 0.8
A= b*h
E= 200000*9.81/1e-4 # Estimated concrete elastic modulus.
nu= 0.3 # Poisson's ratio
G= E/(2*(1+nu)) # Shear modulus
Iy= (1/12.0*h*b**3) # Cross section moment of inertia (m4)
Iz= (1/12.0*b*h**3) # Cross section moment of inertia (m4)
J= 0.721e-8 # Cross section torsion constant (m4)
L= 1 # Element length expressed in meters.
F= 1.0e3 # Load magnitude (kN)
# Problem type
feProblem= xc.FEProblem()
preprocessor= feProblem.getPreprocessor
nodes= preprocessor.getNodeHandler
modelSpace= predefined_spaces.StructuralMechanics3D(nodes)
nodes.defaultTag= 1 #First node number.
nod= nodes.newNodeXYZ(0.0,0.0,0.0)
nod= nodes.newNodeXYZ(L,0.0,0)
# Materials
sectionProperties= xc.CrossSectionProperties3d()
sectionProperties.A= A; sectionProperties.E= E; sectionProperties.G= G
sectionProperties.Iz= Iz; sectionProperties.Iy= Iy; sectionProperties.J= J
sectionProperties.rotate(math.radians(90))
section= typical_materials.defElasticSectionFromMechProp3d(preprocessor, "section",sectionProperties)
lin= modelSpace.newLinearCrdTransf("lin",xc.Vector([0,1,0]))
# Elements definition
elements= preprocessor.getElementHandler
elements.defaultTransformation= "lin"
elements.defaultMaterial= "section"
elements.defaultTag= 1 #Tag for the next element.
beam3d= elements.newElement("ElasticBeam3d",xc.ID([1,2]))
se | ctionAngle= 0
fuerte= beam3d.getVDirStrongAxisGlobalCoord(True) # initialGeometry= True
debil= beam3d.getVDirWeakAxisGlobalCoord(True) # initialGeometry= True
sectionAngle= beam3d.getStrongAxisAngle()
ratio1= ((debil[0])**2+(debil[2])**2)
ratio2= ((fuerte[0])**2+(fuerte[1])**2)
# Constraints
modelSpace.fixNode000_000(1)
# Loads definition
loadHand | ler= preprocessor.getLoadHandler
lPatterns= loadHandler.getLoadPatterns
#Load modulation.
ts= lPatterns.newTimeSeries("constant_ts","ts")
lPatterns.currentTimeSeries= "ts"
lp0= lPatterns.newLoadPattern("default","0")
lp0.newNodalLoad(2,xc.Vector([0,-F,F,0,0,0]))
#We add the load case to domain.
lPatterns.addToDomain("0")
# Solution
analisis= predefined_solutions.simple_static_linear(feProblem)
result= analisis.analyze(1)
deltaYTeor= (-F*L**3/(3*E*Iz))
deltaZTeor= (F*L**3/(3*E*Iy))
nodes= preprocessor.getNodeHandler
nod2= nodes.getNode(2)
deltaY= nod2.getDisp[1]
deltaZ= nod2.getDisp[2] # Node 2 yAxis displacement
ratio3= (deltaY-deltaYTeor)/deltaYTeor
ratio4= (deltaZ-deltaZTeor)/deltaZTeor
ratio5= (deltaY/deltaZ)+(Iy/Iz)
'''
print "deltaY/deltaZ= ",deltaY/deltaZ
print "Iy/Iz= ",(Iy/Iz)
print "fuerte: ",fuerte
print "ratio1= ",ratio1
print "debil: ",debil
print "ratio2= ",ratio2
print "deltaY= ",deltaY
print "deltaYTeor= ",deltaYTeor
print "ratio3= ",ratio3
print "deltaZ= ",deltaZ
print "deltaZTeor= ",deltaZTeor
print "ratio4= ",ratio4
print "ratio5= ",ratio5
'''
import os
from miscUtils import LogMessages as lmsg
fname= os.path.basename(__file__)
if (ratio1 < 1e-15) & (ratio2 < 1e-15) & (abs(sectionAngle) < 1e-12) & (ratio3 < 1e-5) & (ratio4 < 1e-6) & (ratio5 < 1e-6):
print "test ",fname,": ok."
else:
lmsg.error(fname+' ERROR.')
|
plotly/python-api | packages/python/plotly/plotly/validators/barpolar/hoverlabel/font/_familysrc.py | Python | mit | 475 | 0.002105 | import _plotly_utils.basevalidators
class FamilysrcVa | lidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="familysrc", parent_name="barpolar.hoverlabel.font", **kwargs
):
super(FamilysrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_ty | pe", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
valentin-krasontovitsch/ansible | lib/ansible/modules/messaging/rabbitmq/rabbitmq_exchange.py | Python | gpl-3.0 | 6,655 | 0.002705 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Manuel Sousa <manuel.sousa@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rabbitmq_exchange
author: Manuel Sousa (@manuel-sousa)
version_added: "2.0"
short_description: Manage rabbitMQ exchanges
description:
- This module uses rabbitMQ Rest API to create/delete exchanges
requirements: [ "requests >= 1.0.0" ]
options:
name:
description:
- Name of the exchange to create
required: true
state:
description:
- Whether the exchange should be present or absent
choices: [ "present", "absent" ]
required: false
default: present
durable:
description:
- whether exchange is durable or not
required: false
type: bool
default: yes
exchange_type:
description:
- type for the exchange
required: false
choices: [ "fanout", "direct", "headers", "topic" ]
aliases: [ "type" ]
default: direct
auto_delete:
description:
- if the exchange should delete itself after all queues/exchanges unbound from it
required: false
type: bool
default: no
internal:
description:
- exchange is available only for other exchanges
required: false
type: bool
default: no
arguments:
description:
- extra arguments for exchange. If defined this argume | nt is a key/value dictionary
required: false
default: {}
extends_documentation_fragment:
- rabbitmq
'''
EXAMPLES = '''
# Create direct exchange
- rabbitmq_exchange:
name: directExchange
# Create topic exchange on vhost
- rabbitmq_exchange:
name: topicExchange
| type: topic
vhost: myVhost
'''
import json
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib import parse as urllib_parse
from ansible.module_utils.rabbitmq import rabbitmq_argument_spec
def main():
argument_spec = rabbitmq_argument_spec()
argument_spec.update(
dict(
state=dict(default='present', choices=['present', 'absent'], type='str'),
name=dict(required=True, type='str'),
durable=dict(default=True, type='bool'),
auto_delete=dict(default=False, type='bool'),
internal=dict(default=False, type='bool'),
exchange_type=dict(default='direct', aliases=['type'], type='str'),
arguments=dict(default=dict(), type='dict')
)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
url = "%s://%s:%s/api/exchanges/%s/%s" % (
module.params['login_protocol'],
module.params['login_host'],
module.params['login_port'],
urllib_parse.quote(module.params['vhost'], ''),
urllib_parse.quote(module.params['name'], '')
)
if not HAS_REQUESTS:
module.fail_json(msg="requests library is required for this module. To install, use `pip install requests`")
result = dict(changed=False, name=module.params['name'])
# Check if exchange already exists
r = requests.get(url, auth=(module.params['login_user'], module.params['login_password']),
verify=module.params['cacert'], cert=(module.params['cert'], module.params['key']))
if r.status_code == 200:
exchange_exists = True
response = r.json()
elif r.status_code == 404:
exchange_exists = False
response = r.text
else:
module.fail_json(
msg="Invalid response from RESTAPI when trying to check if exchange exists",
details=r.text
)
if module.params['state'] == 'present':
change_required = not exchange_exists
else:
change_required = exchange_exists
# Check if attributes change on existing exchange
if not change_required and r.status_code == 200 and module.params['state'] == 'present':
if not (
response['durable'] == module.params['durable'] and
response['auto_delete'] == module.params['auto_delete'] and
response['internal'] == module.params['internal'] and
response['type'] == module.params['exchange_type']
):
module.fail_json(
msg="RabbitMQ RESTAPI doesn't support attribute changes for existing exchanges"
)
# Exit if check_mode
if module.check_mode:
result['changed'] = change_required
result['details'] = response
result['arguments'] = module.params['arguments']
module.exit_json(**result)
# Do changes
if change_required:
if module.params['state'] == 'present':
r = requests.put(
url,
auth=(module.params['login_user'], module.params['login_password']),
headers={"content-type": "application/json"},
data=json.dumps({
"durable": module.params['durable'],
"auto_delete": module.params['auto_delete'],
"internal": module.params['internal'],
"type": module.params['exchange_type'],
"arguments": module.params['arguments']
}),
verify=module.params['cacert'],
cert=(module.params['cert'], module.params['key'])
)
elif module.params['state'] == 'absent':
r = requests.delete(url, auth=(module.params['login_user'], module.params['login_password']),
verify=module.params['cacert'], cert=(module.params['cert'], module.params['key']))
# RabbitMQ 3.6.7 changed this response code from 204 to 201
if r.status_code == 204 or r.status_code == 201:
result['changed'] = True
module.exit_json(**result)
else:
module.fail_json(
msg="Error creating exchange",
status=r.status_code,
details=r.text
)
else:
module.exit_json(
changed=False,
name=module.params['name']
)
if __name__ == '__main__':
main()
|
WangWenjun559/Weiss | classifier/liblinear.py | Python | apache-2.0 | 9,368 | 0.028288 | #!/usr/bin/env python
from ctypes import *
from ctypes.util import find_library
from os import path
import sys
__all__ = ['liblinear', 'feature_node', 'gen_feature_nodearray', 'problem',
'parameter', 'model', 'toPyModel', 'L2R_LR', 'L2R_L2LOSS_SVC_DUAL',
'L2R_L2LOSS_SVC', 'L2R_L1LOSS_SVC_DUAL', 'MCSVM_CS',
'L1R_L2LOSS_SVC', 'L1R_LR', 'L2R_LR_DUAL', 'L2R_L2LOSS_SVR',
'L2R_L2LOSS_SVR_DUAL', 'L2R_L1LOSS_SVR_DUAL', 'print_null']
try:
dirname = path.dirname(path.abspath(__file__))
if sys.platform == 'win32':
liblinear = CDLL(path.join(dirname, r'..\windows\liblinear.dll'))
else:
liblinear = CDLL(path.join(dirname, 'liblinear.so.2'))
except:
# For unix the prefix 'lib' is not considered.
if find_library('linear'):
liblinear = CDLL(find_library('linear'))
elif find_library('liblinear'):
liblinear = CDLL(find_library('liblinear'))
else:
raise Exception('LIBLINEAR library not found.')
L2R_LR = 0
L2R_L2LOSS_SVC_DUAL = 1
L2R_L2LOSS_SVC = 2
L2R_L1LOSS_SVC_DUAL = 3
MCSVM_CS = 4
L1R_L2LOSS_SVC = 5
L1R_LR = 6
L2R_LR_DUAL = 7
L2R_L2LOSS_SVR = 11
L2R_L2LOSS_SVR_DUAL = 12
L2R_L1LOSS_SVR_DUAL = 13
PRINT_STRING_FUN = CFUNCTYPE(None, c_char_p)
def print_null(s):
return
def genFields(names, types):
return list(zip(names, types))
def fillprototype(f, restype, argtypes):
f.restype = restype
f.argtypes = argtypes
class feature_node(Structure):
_names = ["index", "value"]
_types = [c_int, c_double]
_fields_ = genFields(_names, _types)
def __str__(self):
return '%d:%g' % (self.index, self.value)
def gen_feature_nodearray(xi, feature_max=None, issparse=True):
if isinsta | nce(xi, dict):
index_range = xi.keys()
elif isinstance(xi, (list, tuple)):
| xi = [0] + xi # idx should start from 1
index_range = range(1, len(xi))
else:
raise TypeError('xi should be a dictionary, list or tuple')
if feature_max:
assert(isinstance(feature_max, int))
index_range = filter(lambda j: j <= feature_max, index_range)
if issparse:
index_range = filter(lambda j:xi[j] != 0, index_range)
index_range = sorted(index_range)
ret = (feature_node * (len(index_range)+2))()
ret[-1].index = -1 # for bias term
ret[-2].index = -1
for idx, j in enumerate(index_range):
ret[idx].index = j
ret[idx].value = xi[j]
max_idx = 0
if index_range :
max_idx = index_range[-1]
return ret, max_idx
class problem(Structure):
_names = ["l", "n", "y", "x", "bias"]
_types = [c_int, c_int, POINTER(c_double), POINTER(POINTER(feature_node)), c_double]
_fields_ = genFields(_names, _types)
def __init__(self, y, x, bias=-1):
if len(y) != len(x) :
raise ValueError("len(y) != len(x)")
self.l = l = len(y)
self.bias = -1
max_idx = 0
x_space = self.x_space = []
for i, xi in enumerate(x):
tmp_xi, tmp_idx = gen_feature_nodearray(xi)
x_space += [tmp_xi]
max_idx = max(max_idx, tmp_idx)
self.n = max_idx
self.y = (c_double * l)()
for i, yi in enumerate(y): self.y[i] = y[i]
self.x = (POINTER(feature_node) * l)()
for i, xi in enumerate(self.x_space): self.x[i] = xi
self.set_bias(bias)
def set_bias(self, bias):
if self.bias == bias:
return
if bias >= 0 and self.bias < 0:
self.n += 1
node = feature_node(self.n, bias)
if bias < 0 and self.bias >= 0:
self.n -= 1
node = feature_node(-1, bias)
for xi in self.x_space:
xi[-2] = node
self.bias = bias
class parameter(Structure):
_names = ["solver_type", "eps", "C", "nr_weight", "weight_label", "weight", "p"]
_types = [c_int, c_double, c_double, c_int, POINTER(c_int), POINTER(c_double), c_double]
_fields_ = genFields(_names, _types)
def __init__(self, options = None):
if options == None:
options = ''
self.parse_options(options)
def __str__(self):
s = ''
attrs = parameter._names + list(self.__dict__.keys())
values = map(lambda attr: getattr(self, attr), attrs)
for attr, val in zip(attrs, values):
s += (' %s: %s\n' % (attr, val))
s = s.strip()
return s
def set_to_default_values(self):
self.solver_type = L2R_L2LOSS_SVC_DUAL
self.eps = float('inf')
self.C = 1
self.p = 0.1
self.nr_weight = 0
self.weight_label = (c_int * 0)()
self.weight = (c_double * 0)()
self.bias = -1
self.cross_validation = False
self.nr_fold = 0
self.print_func = cast(None, PRINT_STRING_FUN)
def parse_options(self, options):
if isinstance(options, list):
argv = options
elif isinstance(options, str):
argv = options.split()
else:
raise TypeError("arg 1 should be a list or a str.")
self.set_to_default_values()
self.print_func = cast(None, PRINT_STRING_FUN)
weight_label = []
weight = []
i = 0
while i < len(argv) :
if argv[i] == "-s":
i = i + 1
self.solver_type = int(argv[i])
elif argv[i] == "-c":
i = i + 1
self.C = float(argv[i])
elif argv[i] == "-p":
i = i + 1
self.p = float(argv[i])
elif argv[i] == "-e":
i = i + 1
self.eps = float(argv[i])
elif argv[i] == "-B":
i = i + 1
self.bias = float(argv[i])
elif argv[i] == "-v":
i = i + 1
self.cross_validation = 1
self.nr_fold = int(argv[i])
if self.nr_fold < 2 :
raise ValueError("n-fold cross validation: n must >= 2")
elif argv[i].startswith("-w"):
i = i + 1
self.nr_weight += 1
nr_weight = self.nr_weight
weight_label += [int(argv[i-1][2:])]
weight += [float(argv[i])]
elif argv[i] == "-q":
self.print_func = PRINT_STRING_FUN(print_null)
else :
raise ValueError("Wrong options")
i += 1
liblinear.set_print_string_function(self.print_func)
self.weight_label = (c_int*self.nr_weight)()
self.weight = (c_double*self.nr_weight)()
for i in range(self.nr_weight):
self.weight[i] = weight[i]
self.weight_label[i] = weight_label[i]
if self.eps == float('inf'):
if self.solver_type in [L2R_LR, L2R_L2LOSS_SVC]:
self.eps = 0.01
elif self.solver_type in [L2R_L2LOSS_SVR]:
self.eps = 0.001
elif self.solver_type in [L2R_L2LOSS_SVC_DUAL, L2R_L1LOSS_SVC_DUAL, MCSVM_CS, L2R_LR_DUAL]:
self.eps = 0.1
elif self.solver_type in [L1R_L2LOSS_SVC, L1R_LR]:
self.eps = 0.01
elif self.solver_type in [L2R_L2LOSS_SVR_DUAL, L2R_L1LOSS_SVR_DUAL]:
self.eps = 0.1
class model(Structure):
_names = ["param", "nr_class", "nr_feature", "w", "label", "bias"]
_types = [parameter, c_int, c_int, POINTER(c_double), POINTER(c_int), c_double]
_fields_ = genFields(_names, _types)
def __init__(self):
self.__createfrom__ = 'python'
def __del__(self):
# free memory created by C to avoid memory leak
if hasattr(self, '__createfrom__') and self.__createfrom__ == 'C':
liblinear.free_and_destroy_model(pointer(self))
def get_nr_feature(self):
return liblinear.get_nr_feature(self)
def get_nr_class(self):
return liblinear.get_nr_class(self)
def get_labels(self):
nr_class = self.get_nr_class()
labels = (c_int * nr_class)()
liblinear.get_labels(self, labels)
return labels[:nr_class]
def get_decfun_coef(self, feat_idx, label_idx=0):
return liblinear.get_decfun_coef(self, feat_idx, label_idx)
def get_decfun_bias(self, label_idx=0):
return liblinear.get_decfun_bias(self, label_idx)
def get_decfun(self, label_idx=0):
w = [liblinear.get_decfun_coef(self, feat_idx, label_idx) for feat_idx in range(1, self.nr_feature+1)]
b = liblinear.get_decfun_bias(self, label_idx)
return (w, b)
def is_probability_model(self):
return (liblinear.check_probability_model(self) == 1)
def is_regression_model(self):
return (liblinear.check_regression_model(self) == 1)
def toPyModel(model_ptr):
"""
toPyModel(model_ptr) -> model
Convert a ctypes POINTER(model) to a Python model
"""
if bool(model_ptr) == False:
raise ValueError("Null pointer")
m = model_ptr.contents
m.__createfrom__ = 'C'
return m
fillprototype(liblinear.train, POINTER(model), [POINTER(problem), POINTER(parameter)])
fillprototype(liblinear.cross_validation, None, [POINTER(problem), POINTER(parameter), c_int, POINTER(c_double)])
fillprototype(liblinear.predict_values, c_double, [POINTER(model), POINTER(feature_node), POINTER(c_double)])
fillprototype(liblinear.predict, c_double, [POINTER( |
BastienFaure/jarvis | src/pentest/tests/__main__.py | Python | mit | 1,036 | 0 | import os
import argparse
from pentest import __version__
def get_parser():
"""
Creates a new argument parser.
"""
parser = argparse.ArgumentParser('jarvis')
version = '%(prog)s ' + __version__
parser.add_argument('--version', '-v', action='version', version=version)
return parser
def main(args=None):
"""
Called with ``python -m jarvis.tests``: run main test suite.
"""
parser = get_parser()
args = parser.parse_args(args)
# Check if pytest is available
| try:
import pytest
except ImportError:
raise SystemExit(
'You need py.test to run the test suite.\n'
'You can install it using your distribution package manager or\n'
' $ python -m pip install pytest --user'
)
# Get data from test_module
import pentest.tests as test_module
test_path = os.path.abspath(os.path.dirname(test_module.__file | __))
pytest.main([test_path, '-m', 'not documentation'])
if __name__ == '__main__':
main()
|
TraMZzz/GoGreen | go_green/users/serializers.py | Python | mit | 1,359 | 0.002208 | # -*- coding: ut | f-8 -*-
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from rest_framework.validators import UniqueValidator
from .models import User
from go_green.badge.serializers import BadgeViewSetSerializer
class UserViewSetSerializer(serializers.ModelS | erializer):
badges = BadgeViewSetSerializer(many=True, read_only=True)
email = serializers.EmailField(validators=[UniqueValidator(queryset=User.objects.all())])
class Meta:
model = User
fields = (u'id', u'username', u'email', u'first_name',
u'last_name', u'clean_count',
u'status', u'is_volunteer', u'volunteer_group_name',
u'show_contact', u'contact_email', u'contact_phone',
u'contact_url', u'badges', u'uid', u'extra_data',
u'token', u'expires_at')
def validate_contact_email(self, attrs):
contact_email = attrs
if contact_email:
emailset = Q(contact_email__icontains=contact_email)
emailres = User.objects.filter(emailset)
if emailres:
msg = _('The email address is already taken')
raise serializers.ValidationError(msg)
else:
return attrs
|
trangel/OPTpy | examples/data/__init__.py | Python | gpl-3.0 | 391 | 0.005115 |
import os
pseudo_dir = os.path.join(os.path.dirname(__file__), 'pseudos')
# TODO
# Gotta handle the pseudos a bit better.
# GaAs
struc | ture_GaAs = os.path.join(os.path.dirname(__file__), 'structures', 'GaAs.json')
pseudos_GaAs = ['31-Ga.PBE.UPF', '33-As.PBE.UPF']
# Si
structure_Si = os.path.join(os.path.dirname(__f | ile__), 'structures', 'Si.json')
pseudos_Si = ['14-Si.pspnc']
del os
|
lthurlow/Network-Grapher | proj/external/numpy-1.7.0/numpy/core/tests/test_getlimits.py | Python | mit | 2,703 | 0.005179 | """ Test functions for limits module.
"""
from numpy.testing import *
from numpy.core import finfo, iinfo
from numpy import half, single, double, longdouble
import numpy as np
##################################################
class TestPythonFloat(TestCase):
def test_singleton(self):
| ftype = finfo(float)
ftype2 = finfo(float)
assert_equal(id(ftype),id(ftype2))
class TestHalf(TestCase):
def test_singleton(self):
ftype = finfo(half)
ftype2 = finfo(half)
assert_equal(id(ftype),id(ftype2))
|
class TestSingle(TestCase):
def test_singleton(self):
ftype = finfo(single)
ftype2 = finfo(single)
assert_equal(id(ftype),id(ftype2))
class TestDouble(TestCase):
def test_singleton(self):
ftype = finfo(double)
ftype2 = finfo(double)
assert_equal(id(ftype),id(ftype2))
class TestLongdouble(TestCase):
def test_singleton(self,level=2):
ftype = finfo(longdouble)
ftype2 = finfo(longdouble)
assert_equal(id(ftype),id(ftype2))
class TestIinfo(TestCase):
def test_basic(self):
dts = zip(['i1', 'i2', 'i4', 'i8',
'u1', 'u2', 'u4', 'u8'],
[np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64])
for dt1, dt2 in dts:
assert_equal(iinfo(dt1).min, iinfo(dt2).min)
assert_equal(iinfo(dt1).max, iinfo(dt2).max)
self.assertRaises(ValueError, iinfo, 'f4')
def test_unsigned_max(self):
types = np.sctypes['uint']
for T in types:
assert_equal(iinfo(T).max, T(-1))
class TestRepr(TestCase):
def test_iinfo_repr(self):
expected = "iinfo(min=-32768, max=32767, dtype=int16)"
assert_equal(repr(np.iinfo(np.int16)), expected)
def test_finfo_repr(self):
expected = "finfo(resolution=1e-06, min=-3.4028235e+38," + \
" max=3.4028235e+38, dtype=float32)"
# Python 2.5 float formatting on Windows adds an extra 0 to the
# exponent. So test for both. Once 2.5 compatibility is dropped, this
# can simply use `assert_equal(repr(np.finfo(np.float32)), expected)`.
expected_win25 = "finfo(resolution=1e-006, min=-3.4028235e+038," + \
" max=3.4028235e+038, dtype=float32)"
actual = repr(np.finfo(np.float32))
if not actual == expected:
if not actual == expected_win25:
msg = build_err_msg([actual, desired], verbose=True)
raise AssertionError(msg)
def test_instances():
iinfo(10)
finfo(3.0)
if __name__ == "__main__":
run_module_suite()
|
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/psutil/tests/test_linux.py | Python | gpl-3.0 | 58,553 | 0.000273 | #!/usr/bin/env python
# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Linux specific tests."""
from __future__ import division
import collections
import contextlib
import errno
import io
import os
import pprint
import re
import shutil
import socket
import struct
import tempfile
import textwrap
import time
import warnings
import psutil
from psutil import LINUX
from psutil._compat import PY3
from psutil._compat import u
from psutil.tests import call_until
from psutil.tests import get_kernel_version
from psutil.tests import importlib
from psutil.tests import MEMORY_TOLERANCE
from psutil.tests import mock
from psutil.tests import PYPY
from psutil.tests import pyrun
from psutil.tests import reap_children
from psutil.tests import retry_before_failing
from psutil.tests import run_test_module_by_name
from psutil.tests import safe_rmpath
from psutil.tests import sh
from psutil.tests import skip_on_not_implemented
from psutil.tests import TESTFN
from psutil.tests import ThreadTask
from psutil.tests import TRAVIS
from psutil.tests import unittest
from psutil.tests import which
HERE = os.path.abspath(os.path.dirname(__file__))
SIOCGIFADDR = 0x8915
SIOCGIFCONF = 0x8912
SIOCGIFHWADDR = 0x8927
if LINUX:
SECTOR_SIZE = 512
# =====================================================================
# utils
# =====================================================================
def get_ipv4_address(ifname):
import fcntl
ifname = ifname[:15]
if PY3:
ifname = bytes(ifname, 'ascii')
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
with contextlib.closing(s):
return socket.inet_ntoa(
fcntl.ioctl(s.fileno(),
SIOCGIFADDR,
struct.pack('256s', ifname))[20:24])
def get_mac_address(ifname):
import fcntl
ifname = ifname[:15]
if PY3:
ifname = bytes(ifname, 'ascii')
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
with contextlib.closing(s):
info = fcntl.ioctl(
s.fileno(), SIOCGIFHWADDR, struct.pack('256s', ifname))
if PY3:
def ord(x):
return x
else:
import __builtin__
ord = __builtin__.ord
return ''.join(['%02x:' % ord(char) for char in info[18:24]])[:-1]
def free_swap():
"""Parse 'free' cmd and return swap memory's s total, used and free
values.
"""
out = sh('free -b')
lines = out.split('\n')
for line in lines:
if line.startswith('Swap'):
_, total, used, free = line.split()
nt = collections.namedtuple('free', 'total used free')
return nt(int(total), int(used), int(free))
raise ValueError(
"can't find 'Swap' in 'free' output:\n%s" % '\n'.join(lines))
def free_physmem():
"""Parse 'free' cmd and return physical memory's total, used
and free values.
"""
# Note: free can have 2 different formats, invalidating 'shared'
# and 'cached' memory which may have different positions so we
# do not return them.
# https://github.com/giampaolo/psutil/issues/538#issuecomment-57059946
out = sh('free -b')
lines = out.split('\n')
for line in lines:
if line.startswith('Mem'):
total, used, free, shared = \
[int(x) for x in line.split()[1:5]]
nt = collections.namedtuple(
'free', 'total used free shared output')
return nt(total, used, free, shared, out)
raise ValueError(
"can't find 'Mem' in 'free' output:\n%s" % '\n'.join(lines))
def vmstat(stat):
out = sh("vmstat -s")
for line in out.split("\n"):
line = line.strip()
if stat in line:
return int(line.split(' ')[0])
raise ValueError("can't find %r in 'vmstat' output" % stat)
def get_free_version_info():
out = sh("free -V").strip()
return tuple(map(int, out.split()[-1].split('.')))
# =====================================================================
# system virtual memory
# =====================================================================
@unittest.skipUnless(LINUX, "LINUX only")
class TestSystemVirtualMemory(unittest.TestCase):
def test_total(self):
# free_value = free_physmem().total
# psutil_value = psutil.virtual_memory().total
# self.assertEqual(free_value, psutil_value)
vmstat_value = vmstat('total memory') * 1024
psutil_value = psutil.virtual_memory().total
self.assertAlmostEqual(vmstat_value, psutil_value)
# Older versions of procps used slab memory to calculate used memory.
# This got changed in:
# https://gitlab.com/procps-ng/procps/commit/
# 05d751c4f076a2f0118b914c5e51cfbb4762ad8e
@unittest.skipUnless(
LINUX and ge | t_free_version_info() >= (3, 3, 12), "old free version")
@retry_before_failing()
def test_used(self):
free = free_physmem()
free_value = free.used
psutil_value = psutil.virtual_memory().used
self.assertAlmostEqual(
free_value, psutil_value, delta=MEMORY_TOLERANCE,
msg='%s %s \n%s' % (free_value, psutil_value, free.output))
@retry_before_failing()
def test_free(self):
# _, _, free_v | alue, _ = free_physmem()
# psutil_value = psutil.virtual_memory().free
# self.assertAlmostEqual(
# free_value, psutil_value, delta=MEMORY_TOLERANCE)
vmstat_value = vmstat('free memory') * 1024
psutil_value = psutil.virtual_memory().free
self.assertAlmostEqual(
vmstat_value, psutil_value, delta=MEMORY_TOLERANCE)
@retry_before_failing()
def test_buffers(self):
vmstat_value = vmstat('buffer memory') * 1024
psutil_value = psutil.virtual_memory().buffers
self.assertAlmostEqual(
vmstat_value, psutil_value, delta=MEMORY_TOLERANCE)
@retry_before_failing()
def test_active(self):
vmstat_value = vmstat('active memory') * 1024
psutil_value = psutil.virtual_memory().active
self.assertAlmostEqual(
vmstat_value, psutil_value, delta=MEMORY_TOLERANCE)
@retry_before_failing()
def test_inactive(self):
vmstat_value = vmstat('inactive memory') * 1024
psutil_value = psutil.virtual_memory().inactive
self.assertAlmostEqual(
vmstat_value, psutil_value, delta=MEMORY_TOLERANCE)
@retry_before_failing()
def test_shared(self):
free = free_physmem()
free_value = free.shared
if free_value == 0:
raise unittest.SkipTest("free does not support 'shared' column")
psutil_value = psutil.virtual_memory().shared
self.assertAlmostEqual(
free_value, psutil_value, delta=MEMORY_TOLERANCE,
msg='%s %s \n%s' % (free_value, psutil_value, free.output))
@retry_before_failing()
def test_available(self):
# "free" output format has changed at some point:
# https://github.com/giampaolo/psutil/issues/538#issuecomment-147192098
out = sh("free -b")
lines = out.split('\n')
if 'available' not in lines[0]:
raise unittest.SkipTest("free does not support 'available' column")
else:
free_value = int(lines[1].split()[-1])
psutil_value = psutil.virtual_memory().available
self.assertAlmostEqual(
free_value, psutil_value, delta=MEMORY_TOLERANCE,
msg='%s %s \n%s' % (free_value, psutil_value, out))
def test_warnings_mocked(self):
def open_mock(name, *args, **kwargs):
if name == '/proc/meminfo':
return io.BytesIO(textwrap.dedent("""\
Active(anon): 6145416 kB
Active(file): 2950064 kB
Buffers: 287952 kB
Inactive(anon): 574764 kB
Inactive(file): 1567648 kB
MemAvailable: 6574984 kB
MemFree: 2057400 kB
|
google/grr | grr/core/grr_response_core/lib/util/statx.py | Python | apache-2.0 | 8,579 | 0.007227 | #!/usr/bin/env python
"""A module for working with extended file stat collection.
This module will try to collect as detailed stat information as possible
depending on platform capabilities (e.g. on Linux it will use `statx` [1] call.
[1]: https://www.man7.org/linux/man-pages/man2/statx.2.html
"""
import ctypes
import functools
import operator
import os
import platform
from typing import NamedTuple
# Indicates whether the call also collects information about the birth time.
BTIME_SUPPORT: bool
# TODO(hanuszczak): Migrate to data classes on support for 3.7 is available.
class Result(NamedTuple):
"""A result of extended stat collection."""
# A bitmask with extra file attributes.
attributes: int
# A number of hard links.
nlink: int
# A user identifier of the owner.
uid: int
# A group identifier of the owner.
gid: int
# A bitmask indicating the file and mode of the file.
mode: int
# An inode number of the file.
ino: int
# The total size (in bytes) of the file.
size: int
# Last access time (in nanoseconds since epoch).
atime_ns: int
# Last access time (in nanoseconds since epoch).
btime_ns: int
# Last access time (in nanoseconds since epoch).
ctime_ns: int
# Last access time (in nanoseconds since epoch).
mtime_ns: int
# The device identifier (if file represents a device).
rdev: int
# The device identifier of the filesystem the file resides on.
dev: int
def Get(path: bytes) -> Result:
"""Collects detailed stat information about the path.
Args:
path: A path to the file for which the information should be retrieved.
Returns:
An object with detailed start information.
"""
return _GetImpl(path)
class _StatxTimestampStruct(ctypes.Structure):
"""A low-level definition of Linux's stat timestamp type."""
# https://elixir.bootlin.com/linux/v5.6/source/include/uapi/linux/stat.h
_fields_ = [
("tv_sec", ctypes.c_int64),
("tv_nsec", ctypes.c_uint32),
("__reserved", ctypes.c_int32),
]
@property
def nanos(self):
"""A number of nanoseconds since epoch the timestamp represents."""
return self.tv_sec * 10**9 + self.tv_nsec
class _StatxStruct(ctypes.Structure):
"""A low-level definition of Linux's stat object type."""
# https://elixir.bootlin.com/linux/v5.6/source/include/uapi/linux/stat.h
_fields_ = [
("stx_mask", ctypes.c_uint32),
("stx_blksize", ctypes.c_uint32),
("stx_attributes", ctypes.c_uint64),
("stx_nlink", ctypes.c_uint32),
("stx_uid", ctypes.c_uint32),
("stx_gid", ctypes.c_uint32),
("stx_mode", ctypes.c_uint16),
("__spare0", ctypes.c_uint16 * 1),
("stx_ino", ctypes.c_uint64),
("stx_size", ctypes.c_uint64),
("stx_blocks", ctypes.c_uint64),
("stx_attributes_mask", ctypes.c_uint64),
# File timestamps.
("stx_atime", _StatxTimestampStruct),
( | "stx_btime", _StatxTimestampStruct), |
("stx_ctime", _StatxTimestampStruct),
("stx_mtime", _StatxTimestampStruct),
# Device identifier (if the file represents a device).
("stx_rdev_major", ctypes.c_uint32),
("stx_rdev_minor", ctypes.c_uint32),
# Device identifier of the filesystem the file resides on.
("stx_dev_major", ctypes.c_uint32),
("stx_dev_minor", ctypes.c_uint32),
# Spare space for future extensions.
("__spare2", ctypes.c_uint64 * 14),
]
@property
def rdev(self) -> int:
"""Device identifier (if the file represents a device)."""
# https://elixir.bootlin.com/linux/v5.6/source/tools/include/nolibc/nolibc.h
return ((self.stx_rdev_major & 0xfff) << 8) | (self.stx_rdev_minor & 0xff)
@property
def dev(self) -> int:
"""Device identifier of the filesystem the file resides on."""
# https://elixir.bootlin.com/linux/v5.6/source/tools/include/nolibc/nolibc.h
return ((self.stx_dev_major & 0xfff) << 8) | (self.stx_dev_minor & 0xff)
# https://elixir.bootlin.com/linux/v3.4/source/include/linux/fcntl.h
_AT_SYMLINK_NOFOLLOW = 0x100
_AT_STATX_SYNC_AS_STAT = 0x0000
# https://elixir.bootlin.com/linux/v5.8/source/include/uapi/linux/stat.h
_STATX_MODE = 0x00000002
_STATX_NLINK = 0x00000004
_STATX_UID = 0x00000008
_STATX_GID = 0x00000010
_STATX_ATIME = 0x00000020
_STATX_BTIME = 0x00000800
_STATX_MTIME = 0x00000040
_STATX_CTIME = 0x00000080
_STATX_INO = 0x00000100
_STATX_SIZE = 0x00000200
_STATX_ALL = functools.reduce(operator.__or__, [
_STATX_MODE,
_STATX_NLINK,
_STATX_UID,
_STATX_GID,
_STATX_ATIME,
_STATX_BTIME,
_STATX_MTIME,
_STATX_CTIME,
_STATX_INO,
_STATX_SIZE,
], 0)
if platform.system() == "Linux":
_libc = ctypes.CDLL("libc.so.6")
try:
_statx = _libc.statx
except AttributeError:
# `statx` is available only since glibc 2.28.
_statx = None
if _statx is not None:
_statx.argtypes = [
# Input arguments.
ctypes.c_int,
ctypes.c_char_p,
ctypes.c_int,
ctypes.c_uint,
# Output arguments.
ctypes.POINTER(_StatxStruct),
]
_statx.restype = ctypes.c_int
def _GetImplLinuxStatx(path: bytes) -> Result:
"""A Linux-specific stat implementation through `statx`."""
c_result = _StatxStruct()
c_status = _statx(0, path, _AT_SYMLINK_NOFOLLOW | _AT_STATX_SYNC_AS_STAT,
_STATX_ALL, ctypes.pointer(c_result))
if c_status != 0:
raise OSError(f"Failed to stat '{path}', error code: {c_status}")
return Result(
attributes=c_result.stx_attributes,
nlink=c_result.stx_nlink,
uid=c_result.stx_uid,
gid=c_result.stx_gid,
mode=c_result.stx_mode,
ino=c_result.stx_ino,
size=c_result.stx_size,
atime_ns=c_result.stx_atime.nanos,
btime_ns=c_result.stx_btime.nanos,
ctime_ns=c_result.stx_ctime.nanos,
mtime_ns=c_result.stx_mtime.nanos,
rdev=c_result.rdev,
dev=c_result.dev)
_GetImpl = _GetImplLinuxStatx
BTIME_SUPPORT = True
else:
def _GetImplLinux(path: bytes) -> Result:
"""A generic Linux-specific stat implementation."""
stat_obj = os.lstat(path)
return Result(
attributes=0, # Not available.
nlink=stat_obj.st_nlink,
uid=stat_obj.st_uid,
gid=stat_obj.st_gid,
mode=stat_obj.st_mode,
ino=stat_obj.st_ino,
size=stat_obj.st_size,
atime_ns=stat_obj.st_atime_ns,
btime_ns=0, # Not available.
ctime_ns=stat_obj.st_ctime_ns,
mtime_ns=stat_obj.st_mtime_ns,
rdev=stat_obj.st_rdev,
dev=stat_obj.st_dev)
_GetImpl = _GetImplLinux
BTIME_SUPPORT = False
elif platform.system() == "Darwin":
def _GetImplMacos(path: bytes) -> Result:
"""A macOS-specific stat implementation."""
stat_obj = os.lstat(path)
# Nanosecond-precision birthtime is not available, with approximate it with
# the float-precision one.
st_birthtime_ns = int(stat_obj.st_birthtime * 10**9)
return Result(
attributes=stat_obj.st_flags,
nlink=stat_obj.st_nlink,
uid=stat_obj.st_uid,
gid=stat_obj.st_gid,
mode=stat_obj.st_mode,
ino=stat_obj.st_ino,
size=stat_obj.st_size,
atime_ns=stat_obj.st_atime_ns,
btime_ns=st_birthtime_ns,
ctime_ns=stat_obj.st_ctime_ns,
mtime_ns=stat_obj.st_mtime_ns,
rdev=stat_obj.st_rdev,
dev=stat_obj.st_dev)
_GetImpl = _GetImplMacos
BTIME_SUPPORT = True
elif platform.system() == "Windows":
def _GetImplWindows(path: bytes) -> Result:
"""A Windows-specific stat implementation."""
stat_obj = os.lstat(path)
# pylint: disable=line-too-long
# On Windows, the `st_ctime` field is the file birth time [1], so we just
# copy this value both to `btime` and `ctime`.
#
# [1]: https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/stat-functions
# pylint: enable=line-too-long
return Result(
attributes=stat_obj.st_file_attributes, # pytype: disable=attribute-error
nlink=stat_obj.st_nlink,
uid=stat_obj.s |
ESS-LLP/erpnext-healthcare | erpnext/hooks.py | Python | gpl-3.0 | 14,269 | 0.020324 | from __future__ import unicode_literals
from frappe import _
app_name = "erpnext"
app_title = "ERPNext"
app_publisher = "Frappe Technologies Pvt. Ltd."
app_description = """ERP made simple"""
app_icon = "fa fa-th"
app_color = "#e74c3c"
app_email = "info@erpnext.com"
app_license = "GNU General Public License (v3)"
source_link = "https://github.com/frappe/erpnext"
develop_version = '12.x.x-develop'
# error_report_email = "support@erpnext.com"
app_include_js = "assets/js/erpnext.min.js"
app_include_css = "assets/css/erpnext.css"
web_include_js = "assets/js/erpnext-web.min.js"
web_include_css = "assets/css/erpnext-web.css"
doctype_js = {
"Communication": "public/js/communication.js",
"Event": "public/js/event.js"
}
welcome_email = "erpnext.setup.utils.welcome_email"
# setup wizard
setup_wizard_requires = "assets/erpnext/js/setup_wizard.js"
setup_wizard_stages = "erpnext.setup.setup_wizard.setup_wizard.get_setup_stages"
setup_wizard_test = "erpnext.setup.setup_wizard.test_setup_wizard.run_setup_wizard_test"
before_install = "erpnext.setup.install.check_setup_wizard_not_completed"
after_install = "erpnext.setup.install.after_install"
boot_session = "erpnext.startup.boot.boot_session"
notification_config = "erpnext.startup.notifications.get_notification_config"
get_help_messages = "erpnext.utilities.activation.get_help_messages"
get_user_progress_slides = "erpnext.utilities.user_progress.get_user_progress_slides"
update_and_get_user_progress = "erpnext.utilities.user_progress_utils.update_default_domain_actions_and_get_state"
on_session_creation = "erpnext.shopping_cart.utils.set_cart_count"
on_logout = "erpnext.shopping_cart.utils.clear_cart_count"
treeviews = ['Account', 'Cost Center', 'Warehouse', 'Item Group', 'Customer Group', 'Sales Person', 'Territory', 'Assessment Group']
# website
update_website_context = "erpnext.shopping_cart.utils.update_website_context"
my_account_context = "erpnext.shopping_cart.utils.update_my_account_context"
email_append_to = ["Job Applicant", "Lead", "Opportunity", "Issue"]
calendars = ["Task", "Work Order", "Leave Application", "Sales Order", "Holiday List", "Course Schedule"]
domains = {
'Agriculture': 'erpnext.domains.agriculture',
'Distribution': 'erpnext.domains.distribution',
'Education': 'erpnext.domains.education',
'Healthcare': 'erpnext.domains.healthcare',
'Hospitality': 'erpnext.domains.hospitality',
'Manufacturing': 'erpnext.domains.manufacturing',
'Non Profit': 'erpnext.domains.non_profit',
'Retail': 'erpnext.domains.retail',
'Services': 'erpnext.domains.services',
}
website_generators = ["Item Group", "Item", "BOM", "Sales Partner",
"Job Opening", "Student Admission"]
website_context = {
"favicon": "/assets/erpnext/images/favicon.png",
"splash_image": "/assets/erpnext/images/erp-icon.svg"
}
website_route_rules = [
{"from_route": "/orders", "to_route": "Sales Order"},
{"from_route": "/orders/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Sales Order",
"parents": [{"label": _("Orders"), "route": "orders"}]
}
},
{"from_route": "/invoices", "to_route": "Sales Invoice"},
{"from_route": "/invoices/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Sales Invoice",
"parents": [{"label": _("Invoices"), "route": "invoices"}]
}
},
{"from_route": "/supplier-quotations", "to_route": "Supplier Quotation"},
{"from_route": "/supplier-quotations/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Supplier Quotation",
"parents": [{"label": _("Supplier Quotation"), "route": "supplier-quotations"}]
}
},
{"from_route": "/quotations", "to_route": "Quotation"},
{"from_route": "/quotations/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Quotation",
"parents": [{"label": _("Quotations"), "route": "quotations"}]
}
},
{"from_route": "/shipments", "to_route": "Delivery Note"},
{"from_route": "/shipments/<path:name>", "to_route": "order",
"defaults": {
"doctype": "Delivery Note",
"parents": [{"label": _("Shipments"), "route": "shipments"}]
}
},
{"from_route": "/rfq", "to_route": "Request for Quotation"},
{"from_route": "/rfq/<path:name>", "to_route": "rfq",
"defaults": {
"doctype": "Request for Quotation",
"parents": [{"label": _("Request for Quotation"), "route": "rfq"}]
}
},
{"from_route": "/addresses", "to_route": "Address"},
{"from_route": "/addresses/<path:name>", "to_route": "addresses",
"defaults": {
"doctype": "Address",
"parents": [{"label": _("Addresses"), "route": "addresses"}]
}
},
{"from_route": "/jobs", "to_route": "Job Opening"},
{"from_route": "/admissions", "to_route": "Student Admission"},
{"from_route": "/boms", "to_route": "BOM"},
{"from_route": "/timesheets", "to_route": "Timesheet"},
]
standard_portal_menu_items = [
{"title": _("Personal Details"), "route": "/personal-details", "reference_doctype": "Patient", "role": "Patient"},
{"title": _("Projects"), "route": "/project", "reference_doctype": "Project"},
{"title": _("Request for Quotations"), "route": "/rfq", "reference_doctype": "Request for Quotation", "role": "Supplier"},
{"title": _("Supplier Quotation"), "route": "/supplier-quotations", "reference_doctype": "Supplier Quotation", "role": "Supplier"},
{"title": _("Quotations"), "route": "/quotations", "reference_doctype": "Quotation", "role":"Customer"},
{"title": _("Orders"), "route": "/orders", "reference_doctype": "Sales Order", "role":"Customer"},
{"title": _("Invoices"), "route": "/invoices", "reference_doctype": "Sales Invoice", "role":"Customer"},
{"title": _("Shipments"), "route": "/shipments", "reference_doctype": "Delivery Note", "role":"Customer"},
{"title": _("Issues"), "route": "/issues", "reference_doctype": "Issue", "role":"Customer"},
{"title": _("Addresses"), "route": "/addresses", "reference_doctype": "Address"},
{"title": _("Timesheets"), "route": "/timesheets", "reference_doctype": "Timesheet", "role":"Customer"},
{"title": _("Timesheets"), "route": "/timesheets", "reference_doctype": "Timesheet", "role":"Customer"},
{"title": _("Lab Test"), "route": "/lab-test", "reference_doctype": "Lab Test", "role":"Patient"},
{"title": _("Prescription"), "route": "/prescription", "reference_doctype": "Patient Encounter", "role":"Patient"},
{"title": _("Patient Appointment"), "route": "/patient-appointments", "reference_doctype": "Patient Appointment", "role":"Patient"},
{"title": _("Fees"), "route": "/fees", "reference_doctype": "Fees", "role":"Student"},
{"title": _("Newsletter"), "route": "/newsletters", "reference_doctype": "Newsletter"},
{"title": _("Admission"), "route": "/admissions", "reference_doctype": "Student Admission"},
{"title": _("Certification"), "route": "/certification", "reference_doctype": "Certification Application"},
]
default_roles = [
{'role': 'Customer', 'doctype':'Contact', 'email_field': 'email_id'},
{'role': 'Supplier', 'doctype':'Contact', 'email_field': 'email_id'},
{'role': 'Student', 'doctype':'Student', 'email_field': 'student_email_id'},
]
has_website_permission = {
"Sales Order": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Quotation": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Sales Invoice": "erpnext.controllers.website_list_ | for_contact.has_website_permission",
"Supplier Quotation": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Delivery Note": "erpnext.controllers.website_lis | t_for_contact.has_website_permission",
"Issue": "erpnext.support.doctype.issue.issue.has_website_permission",
"Timesheet": "erpnext.controllers.website_list_for_contact.has_website_permission",
"Lab Test": "erpnext.healthcare.web_form.lab_test.lab_test.has_website_permission",
"Patient Encounter": "erpnext.healthcare.web_form.prescription.prescription.has_website_permission",
"Patient Appointment": "erpnext.healthcare.web_form.patient_appointments.patient_appointments.has_website_permission",
"Patient": "erpnext.healthcare.web_form.personal_details.personal_details.has_website_permission"
}
dump_report_map = "erpnext.startup.report_data_map.data_map"
before_tests = "erpnext.setup. |
em92/pickup-rating | qllr/blueprints/ratings/methods.py | Python | mit | 3,787 | 0.001056 | import json
from math import ceil
from asyncpg import Connection
from qllr.common import MATCH_LIST_ITEM_COUNT
from qllr.db import cache
from qllr.settings import PLAYER_COUNT_PER_PAGE
KEEPING_TIME = 60 * 60 * 24 * 30
SQL_TOP_PLAYERS_BY_GAMETYPE = """
SELECT
p.steam_id,
p.name,
p.model,
gr.rating,
gr.deviation,
gr.n,
count(*) OVER () AS count,
ROW_NUMBER() OVER (ORDER BY gr.rating DESC) AS rank
FROM
players p
LEFT JOIN (SUBQUERY) gr ON
gr.steam_id = p.steam_id
WHERE
gr.n >= 10 AND
gr.last_played_timestamp > LEAST( $1, (
SELECT timestamp
FROM matches
WHERE gametype_id = $2
ORDER BY timestamp DESC
LIMIT 1 OFFSET {OFFSET}
)) AND
gr.gametype_id = $2
ORDER BY gr.rating DESC
""".format(
OFFSET=int(MATCH_LIST_ITEM_COUNT)
).replace(
"(SUBQUERY)", "({SUBQUERY})"
)
SQL_TOP_PLAYERS_BY_GAMETYPE_R1 = SQL_TOP_PLAYERS_BY_GAMETYPE.format(
SUBQUERY="""
SELECT
steam_id,
r1_mean AS rating,
r1_deviation AS deviation,
last_played_timestamp,
gametype_id,
n
FROM
gametype_ratings
"""
)
SQL_TOP_PLAYERS_BY_GAMETYPE_R2 = SQL_TOP_PLAYERS_BY_GAMETYPE.format(
SUBQUERY="""
SELECT
steam_id,
r2_value AS rating,
0 AS deviation,
last_played_timestamp,
gametype_id,
n
FROM
gametype_ratings
"""
)
def get_sql_top_players_query_by_gametype_id(gametype_id: int):
if cache.USE_AVG_PERF[gametype_id]:
return SQL_TOP_PLAYERS_BY_GAMETYPE_R2
else:
return SQL_TOP_PLAYERS_BY_GAMETYPE_R1
async def get_list(con: Connection, gametype_id: int, page: int, show_inactive=False):
await con.set_type_codec(
"json", encoder=json.dumps, decoder=json.loads, schema="pg_catalog"
)
query = get_sql_top_players_query_by_gametype_id(
gametype_id
) + "LIMIT {LIMIT} OFFSET {OFFSET}".format(
LIMIT=int(PLAYER_COUNT_PER_PAGE), OFFSET=int(PLAYER_COUNT_PER_PAGE * page)
)
start_timestamp = 0
if show_ina | ctive is False:
start_timestamp = cach | e.LAST_GAME_TIMESTAMPS[gametype_id] - KEEPING_TIME
result = []
player_count = 0
async for row in con.cursor(query, start_timestamp, gametype_id):
if row[0] != None:
result.append(
{
"_id": str(row[0]),
"name": row[1],
"model": (
row[2] + ("/default" if row[2].find("/") == -1 else "")
).lower(),
"rating": round(row[3], 2),
"rd": round(row[4], 2),
"n": row[5],
"rank": row[7],
}
)
player_count = row[6]
steam_ids = list(map(lambda player: int(player["_id"]), result))
query = """
SELECT
s.steam_id,
CEIL(AVG(CASE
WHEN m.team1_score > m.team2_score AND s.team = 1 THEN 1
WHEN m.team2_score > m.team1_score AND s.team = 2 THEN 1
ELSE 0
END)*100)
FROM
matches m
LEFT JOIN scoreboards s ON s.match_id = m.match_id
WHERE
m.gametype_id = $1 AND s.steam_id = ANY($2)
GROUP BY s.steam_id;
"""
for row in await con.fetch(query, gametype_id, steam_ids):
try:
result_index = steam_ids.index(row[0])
result[result_index]["win_ratio"] = int(row[1])
except ValueError:
pass # must not happen
return {
"ok": True,
"response": result,
"page_count": ceil(player_count / PLAYER_COUNT_PER_PAGE),
}
|
olasitarska/django | tests/serializers/models.py | Python | bsd-3-clause | 3,125 | 0.00064 | # -*- coding: utf-8 -*-
"""
42. Serialization
``django.core.serializers`` provides interfaces to converting Django
``QuerySet`` objects to and from "flat" data (i.e. strings).
"""
from __future__ import unicode_literals
from decimal import Decimal
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Category(models.Model):
name = models.CharField(max_length=20)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=20)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Article(models.Model):
author = models.ForeignKey(Author)
headline = models.CharField(max_length=50)
pub_date = models.DateTimeField()
categories = models.ManyToManyField(Category)
class Meta:
ordering = ('pub_date',)
def __str__(self):
return self.headline
@python_2_unicode_compatible
class AuthorProfile(models.Model):
author = models.OneToOneField(Author, primary_key=True)
date_of_birth = models.DateField()
def __str__(self):
return "Profile of %s" % self.author
@python_2_unicode_compatible
class Actor(models.Model):
name = models.CharField(max_length=20, primary_key=True)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Movie(models.Model):
actor = models.ForeignKey(Actor)
title = models.CharField(max_length=50)
price = models.DecimalField(max_digits=6, decimal_places=2, default=Decimal('0.00'))
class Meta:
ordering = ('title',)
def __str__(self):
return self.title
class Score(models.Model):
score = models | .FloatField()
@python_2_unicode_compatible
class Team(object):
def __init__(self, title):
self.title = title
def __str__(self):
raise NotImplementedError("Not so simple")
def to_string(self):
return "%s" % self.title
class TeamField(models.CharField):
def __init__(self):
super(TeamField, self).__init__(max_length=100)
def get_db_prep_save(self, val | ue, connection):
return six.text_type(value.title)
def to_python(self, value):
if isinstance(value, Team):
return value
return Team(value)
def from_db_value(self, value, connection):
return Team(value)
def value_to_string(self, obj):
return self._get_val_from_obj(obj).to_string()
def deconstruct(self):
name, path, args, kwargs = super(TeamField, self).deconstruct()
del kwargs['max_length']
return name, path, args, kwargs
@python_2_unicode_compatible
class Player(models.Model):
name = models.CharField(max_length=50)
rank = models.IntegerField()
team = TeamField()
def __str__(self):
return '%s (%d) playing for %s' % (self.name, self.rank, self.team.to_string())
|
sailthru/mongo-connector | tests/test_oplog_manager.py | Python | apache-2.0 | 17,080 | 0.00041 | # Copyright 2013-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test oplog manager methods
"""
import itertools
import re
import sys
import time
import bson
import gridfs
import pymongo
sys.path[0:0] = [""]
from mongo_connector.doc_managers.doc_manager_simulator import DocManager
from mongo_connector.locking_dict import LockingDict
from mongo_connector.namespace_config import NamespaceConfig
from mongo_connector.oplog_manager import OplogThread
from mongo_connector.test_utils import (assert_soon,
close_client,
ReplicaSetSingle)
from mongo_connector.util import bson_ts_to_long
from tests import unittest
class TestOplogManager(unittest.TestCase):
"""Defines all the testing methods, as well as a method that sets up the
cluster
"""
def setUp(self):
self.repl_set = ReplicaSetSingle().start()
self.primary_conn = self.repl_set.client()
self.oplog_coll = self.primary_conn.local['oplog.rs']
self.opman = OplogThread(
primary_client=self.primary_conn,
doc_managers=(DocManager(),),
oplog_progress_dict=LockingDict(),
namespace_config=NamespaceConfig(
namespace_options={
| 'test.*': True,
'gridfs.*': {'gridfs': True}
}
),
)
def tearDown(self):
try:
self.opman.join()
except RuntimeError:
pass # OplogThread may not have been started
self.primary_conn.drop_database("test")
close_client(self.primary_conn)
self.repl_set.stop()
def test_get_oplog_cursor(self):
"""Test the get_oplog_cur | sor method"""
# timestamp is None - all oplog entries excluding no-ops are returned.
cursor = self.opman.get_oplog_cursor(None)
self.assertEqual(cursor.count(),
self.primary_conn["local"]["oplog.rs"].find(
{'op': {'$ne': 'n'}}).count())
# earliest entry is the only one at/after timestamp
doc = {"ts": bson.Timestamp(1000, 0), "i": 1}
self.primary_conn["test"]["test"].insert_one(doc)
latest_timestamp = self.opman.get_last_oplog_timestamp()
cursor = self.opman.get_oplog_cursor(latest_timestamp)
self.assertNotEqual(cursor, None)
self.assertEqual(cursor.count(), 1)
next_entry_id = next(cursor)['o']['_id']
retrieved = self.primary_conn.test.test.find_one(next_entry_id)
self.assertEqual(retrieved, doc)
# many entries before and after timestamp
self.primary_conn["test"]["test"].insert_many(
[{"i": i} for i in range(2, 1002)])
oplog_cursor = self.oplog_coll.find(
{'op': {'$ne': 'n'},
'ns': {'$not': re.compile(r'\.(system|\$cmd)')}},
sort=[("ts", pymongo.ASCENDING)]
)
# initial insert + 1000 more inserts
self.assertEqual(oplog_cursor.count(), 1 + 1000)
pivot = oplog_cursor.skip(400).limit(-1)[0]
goc_cursor = self.opman.get_oplog_cursor(pivot["ts"])
self.assertEqual(goc_cursor.count(), 1 + 1000 - 400)
def test_get_last_oplog_timestamp(self):
"""Test the get_last_oplog_timestamp method"""
# "empty" the oplog
self.opman.oplog = self.primary_conn["test"]["emptycollection"]
self.assertEqual(self.opman.get_last_oplog_timestamp(), None)
# Test non-empty oplog
self.opman.oplog = self.primary_conn["local"]["oplog.rs"]
for i in range(1000):
self.primary_conn["test"]["test"].insert_one({
"i": i + 500
})
oplog = self.primary_conn["local"]["oplog.rs"]
oplog = oplog.find().sort("$natural", pymongo.DESCENDING).limit(-1)[0]
self.assertEqual(self.opman.get_last_oplog_timestamp(),
oplog["ts"])
def test_dump_collection(self):
"""Test the dump_collection method
Cases:
1. empty oplog
2. non-empty oplog, with gridfs collections
3. non-empty oplog, specified a namespace-set, none of the oplog
entries are for collections in the namespace-set
"""
# Test with empty oplog
self.opman.oplog = self.primary_conn["test"]["emptycollection"]
last_ts = self.opman.dump_collection()
self.assertEqual(last_ts, None)
# Test with non-empty oplog with gridfs collections
self.opman.oplog = self.primary_conn["local"]["oplog.rs"]
# Insert 10 gridfs files
for i in range(10):
fs = gridfs.GridFS(self.primary_conn["gridfs"],
collection="test" + str(i))
fs.put(b"hello world")
# Insert 1000 documents
for i in range(1000):
self.primary_conn["test"]["test"].insert_one({
"i": i + 500
})
last_ts = self.opman.get_last_oplog_timestamp()
self.assertEqual(last_ts, self.opman.dump_collection())
self.assertEqual(len(self.opman.doc_managers[0]._search()), 1010)
# Case 3
# 1MB oplog so that we can rollover quickly
repl_set = ReplicaSetSingle(oplogSize=1).start()
conn = repl_set.client()
opman = OplogThread(
primary_client=conn,
doc_managers=(DocManager(),),
oplog_progress_dict=LockingDict(),
namespace_config=NamespaceConfig(namespace_set=["test.test"]),
)
# Insert a document into an included collection
conn["test"]["test"].insert_one({"test": 1})
# Cause the oplog to rollover on a non-included collection
while conn["local"]["oplog.rs"].find_one({"ns": "test.test"}):
conn["test"]["ignored"].insert_many(
[{"test": "1" * 1024} for _ in range(1024)])
last_ts = opman.get_last_oplog_timestamp()
self.assertEqual(last_ts, opman.dump_collection())
self.assertEqual(len(opman.doc_managers[0]._search()), 1)
conn.close()
repl_set.stop()
def test_skipped_oplog_entry_updates_checkpoint(self):
repl_set = ReplicaSetSingle().start()
conn = repl_set.client()
opman = OplogThread(
primary_client=conn,
doc_managers=(DocManager(),),
oplog_progress_dict=LockingDict(),
namespace_config=NamespaceConfig(namespace_set=["test.test"]),
)
opman.start()
# Insert a document into an included collection
conn["test"]["test"].insert_one({"test": 1})
last_ts = opman.get_last_oplog_timestamp()
assert_soon(lambda: last_ts == opman.checkpoint,
"OplogThread never updated checkpoint to non-skipped "
"entry.")
self.assertEqual(len(opman.doc_managers[0]._search()), 1)
# Make sure that the oplog thread updates its checkpoint on every
# oplog entry.
conn["test"]["ignored"].insert_one({"test": 1})
last_ts = opman.get_last_oplog_timestamp()
assert_soon(lambda: last_ts == opman.checkpoint,
"OplogThread never updated checkpoint to skipped entry.")
opman.join()
conn.close()
repl_set.stop()
def test_dump_collection_with_error(self):
"""Test the dump_collection method with invalid documents.
Cases:
1. non-empty oplog, continue_on_error=True, invalid documents
"""
# non-empty oplog, continue_on_error=True, invalid documents
|
Azure/azure-sdk-for-python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2021_04_01/models/__init__.py | Python | mit | 28,223 | 0.000177 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._models_py3 import AccessUri
from ._models_py3 import AdditionalCapabilities
from ._models_py3 import AdditionalUnattendContent
from ._models_py3 import ApiEntityReference
from ._models_py3 import ApiError
from ._models_py3 import ApiErrorBase
from ._models_py3 import Auto | maticOSUpgradePolicy
from ._models_py3 import AutomaticOSUpgradeProperties
from ._models_py3 import AutomaticRepairsPolicy
from ._ | models_py3 import AvailabilitySet
from ._models_py3 import AvailabilitySetListResult
from ._models_py3 import AvailabilitySetUpdate
from ._models_py3 import AvailablePatchSummary
from ._models_py3 import BillingProfile
from ._models_py3 import BootDiagnostics
from ._models_py3 import BootDiagnosticsInstanceView
from ._models_py3 import CapacityReservation
from ._models_py3 import CapacityReservationGroup
from ._models_py3 import CapacityReservationGroupInstanceView
from ._models_py3 import CapacityReservationGroupListResult
from ._models_py3 import CapacityReservationGroupUpdate
from ._models_py3 import CapacityReservationInstanceView
from ._models_py3 import CapacityReservationInstanceViewWithName
from ._models_py3 import CapacityReservationListResult
from ._models_py3 import CapacityReservationProfile
from ._models_py3 import CapacityReservationUpdate
from ._models_py3 import CapacityReservationUtilization
from ._models_py3 import ComputeOperationListResult
from ._models_py3 import ComputeOperationValue
from ._models_py3 import CreationData
from ._models_py3 import DataDisk
from ._models_py3 import DataDiskImage
from ._models_py3 import DedicatedHost
from ._models_py3 import DedicatedHostAllocatableVM
from ._models_py3 import DedicatedHostAvailableCapacity
from ._models_py3 import DedicatedHostGroup
from ._models_py3 import DedicatedHostGroupInstanceView
from ._models_py3 import DedicatedHostGroupListResult
from ._models_py3 import DedicatedHostGroupUpdate
from ._models_py3 import DedicatedHostInstanceView
from ._models_py3 import DedicatedHostInstanceViewWithName
from ._models_py3 import DedicatedHostListResult
from ._models_py3 import DedicatedHostUpdate
from ._models_py3 import DiagnosticsProfile
from ._models_py3 import DiffDiskSettings
from ._models_py3 import DisallowedConfiguration
from ._models_py3 import Disk
from ._models_py3 import DiskAccess
from ._models_py3 import DiskAccessList
from ._models_py3 import DiskAccessUpdate
from ._models_py3 import DiskEncryptionSet
from ._models_py3 import DiskEncryptionSetList
from ._models_py3 import DiskEncryptionSetParameters
from ._models_py3 import DiskEncryptionSetUpdate
from ._models_py3 import DiskEncryptionSettings
from ._models_py3 import DiskInstanceView
from ._models_py3 import DiskList
from ._models_py3 import DiskRestorePoint
from ._models_py3 import DiskRestorePointList
from ._models_py3 import DiskSecurityProfile
from ._models_py3 import DiskSku
from ._models_py3 import DiskUpdate
from ._models_py3 import Encryption
from ._models_py3 import EncryptionSetIdentity
from ._models_py3 import EncryptionSettingsCollection
from ._models_py3 import EncryptionSettingsElement
from ._models_py3 import ExtendedLocation
from ._models_py3 import GrantAccessData
from ._models_py3 import HardwareProfile
from ._models_py3 import Image
from ._models_py3 import ImageDataDisk
from ._models_py3 import ImageDisk
from ._models_py3 import ImageDiskReference
from ._models_py3 import ImageListResult
from ._models_py3 import ImageOSDisk
from ._models_py3 import ImageReference
from ._models_py3 import ImageStorageProfile
from ._models_py3 import ImageUpdate
from ._models_py3 import InnerError
from ._models_py3 import InstanceViewStatus
from ._models_py3 import KeyForDiskEncryptionSet
from ._models_py3 import KeyVaultAndKeyReference
from ._models_py3 import KeyVaultAndSecretReference
from ._models_py3 import KeyVaultKeyReference
from ._models_py3 import KeyVaultSecretReference
from ._models_py3 import LastPatchInstallationSummary
from ._models_py3 import LinuxConfiguration
from ._models_py3 import LinuxParameters
from ._models_py3 import LinuxPatchSettings
from ._models_py3 import ListUsagesResult
from ._models_py3 import LogAnalyticsInputBase
from ._models_py3 import LogAnalyticsOperationResult
from ._models_py3 import LogAnalyticsOutput
from ._models_py3 import MaintenanceRedeployStatus
from ._models_py3 import ManagedDiskParameters
from ._models_py3 import NetworkInterfaceReference
from ._models_py3 import NetworkProfile
from ._models_py3 import OSDisk
from ._models_py3 import OSDiskImage
from ._models_py3 import OSProfile
from ._models_py3 import OrchestrationServiceStateInput
from ._models_py3 import OrchestrationServiceSummary
from ._models_py3 import PatchInstallationDetail
from ._models_py3 import PatchSettings
from ._models_py3 import Plan
from ._models_py3 import PrivateEndpoint
from ._models_py3 import PrivateEndpointConnection
from ._models_py3 import PrivateEndpointConnectionListResult
from ._models_py3 import PrivateLinkResource
from ._models_py3 import PrivateLinkResourceListResult
from ._models_py3 import PrivateLinkServiceConnectionState
from ._models_py3 import PropertyUpdatesInProgress
from ._models_py3 import ProximityPlacementGroup
from ._models_py3 import ProximityPlacementGroupListResult
from ._models_py3 import ProximityPlacementGroupUpdate
from ._models_py3 import ProxyOnlyResource
from ._models_py3 import ProxyResource
from ._models_py3 import PublicIPAddressSku
from ._models_py3 import PurchasePlan
from ._models_py3 import PurchasePlanAutoGenerated
from ._models_py3 import RecoveryWalkResponse
from ._models_py3 import RequestRateByIntervalInput
from ._models_py3 import Resource
from ._models_py3 import ResourceUriList
from ._models_py3 import RestorePoint
from ._models_py3 import RestorePointCollection
from ._models_py3 import RestorePointCollectionListResult
from ._models_py3 import RestorePointCollectionSourceProperties
from ._models_py3 import RestorePointCollectionUpdate
from ._models_py3 import RestorePointSourceMetadata
from ._models_py3 import RestorePointSourceVMDataDisk
from ._models_py3 import RestorePointSourceVMOSDisk
from ._models_py3 import RestorePointSourceVMStorageProfile
from ._models_py3 import RetrieveBootDiagnosticsDataResult
from ._models_py3 import RollbackStatusInfo
from ._models_py3 import RollingUpgradePolicy
from ._models_py3 import RollingUpgradeProgressInfo
from ._models_py3 import RollingUpgradeRunningStatus
from ._models_py3 import RollingUpgradeStatusInfo
from ._models_py3 import RunCommandDocument
from ._models_py3 import RunCommandDocumentBase
from ._models_py3 import RunCommandInput
from ._models_py3 import RunCommandInputParameter
from ._models_py3 import RunCommandListResult
from ._models_py3 import RunCommandParameterDefinition
from ._models_py3 import RunCommandResult
from ._models_py3 import ScaleInPolicy
from ._models_py3 import ScheduledEventsProfile
from ._models_py3 import SecurityProfile
from ._models_py3 import ShareInfoElement
from ._models_py3 import Sku
from ._models_py3 import Snapshot
from ._models_py3 import SnapshotList
from ._models_py3 import SnapshotSku
from ._models_py3 import SnapshotUpdate
from ._models_py3 import SourceVault
from ._models_py3 import SpotRestorePolicy
from ._models_py3 import SshConfiguration
from ._models_py3 import SshPublicKey
from ._models_py3 import SshPublicKeyGenerateKeyPairResult
from ._models_py3 import SshPublicKeyResource
from ._models_py3 import SshPublicKeyUpdateResource
from ._models_py3 import SshPublicKeysGroupListResult
from ._models_py3 import StorageProfile
from ._models_py3 import SubResource
from ._models_py3 import SubResourceReadOnly
from ._models_py3 import SubResourceWithColocationStatus
from ._models_py3 |
yugangw-msft/azure-cli | src/azure-cli/azure/cli/command_modules/botservice/_exception_handler.py | Python | mit | 1,308 | 0.003058 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from knack.util import CLIError
def bot_exception_handler(ex):
from azure.mgmt.botservice.models import ErrorException
from msrestazure.azure_exceptions import CloudError
from msrest.exceptions import ClientRequestError # pylint: disable=import-error
if isinstance(ex, ErrorException):
message = 'An error occurred. {0}: {1}'.format(
ex.error.error.code,
ex.error.error.message
)
raise CL | IError | (message)
if isinstance(ex, CloudError) and ex.status_code == 404:
return None
if isinstance(ex, ClientRequestError):
message = 'Error occurred in sending request. Please file an issue on {0}'.format(
'https://github.com/microsoft/botframework-sdk'
)
raise CLIError(message)
message = 'Unknown error during execution. Please file an issue on {0}'.format(
'https://github.com/microsoft/botframework-sdk'
)
raise CLIError(message)
|
jakev/dtf | python-dtf/tests/unit/test_prop.py | Python | apache-2.0 | 5,430 | 0 | # Android Device Testing Framework ("dtf")
# Copyright 2013-2016 Jake Valletta (@jake_valletta)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""pytest for using dtf property manager"""
from __future__ import absolute_import
import pytest
import dtf.properties as prop
import dtf.testutils as testutils
# prop_set() tests
def test_set_new_property():
"""Attempt to set a new property (existing section)"""
value = '1'
contents = ("[info]\n"
"real = not_real")
testutils.deploy_config_raw(contents)
prop.set_prop('info', 'sdk', value)
assert prop.get_prop('info', 'sdk') == value
testutils.undeploy()
def test_set_new_section_property():
"""Set a property that has no section (yet)"""
value = '1'
testutils.deploy_config_raw("")
prop.set_prop('info', 'sdk', value)
assert prop.get_prop('info', 'sdk') == value
testutils.undeploy()
return 0
def test_set_existing_property():
"""Set a property that already exists"""
value = 'new'
contents = ("[Info]\n"
"sdk = old")
testutils.deploy_config_raw(contents)
prop.set_prop('info', 'sdk', value)
assert prop.get_prop('info', 'sdk') == value
testutils.undeploy()
return 0
def test_set_property_casing():
"""Set a prop and try to retrieve with casing"""
sdk = '1'
testutils.deploy_config_raw("")
prop.set_prop('INFO', 'sdk', sdk)
assert prop.get_prop('info', 'sdk') == sdk
assert prop.get_prop('Info', 'sdk') == sdk
assert prop.get_prop('INFO', 'sdk') == sdk
testutils.undeploy()
return 0
# prop_get() tests
def test_get_empty_config():
"""Attempts to get a property without a valid config"""
testutils.deploy_config_raw("")
with pytest.raises(prop.PropertyError):
prop.get_prop('info', 'sdk')
testutils.undeploy()
return 0
def test_get_property():
"""Attempts to get a valid property"""
sdk = '23'
contents = ("[Info]\n"
"sdk = %s" % sdk)
testutils.deploy_config_raw(contents)
assert prop.get_prop('info', 'sdk') == sdk
testutils.undeploy()
return 0
def test_get_property_no_option():
"""Attempt to get property that doesnt exist"""
contents = ("[Info]\n"
"vmtype = arm64")
testutils.deploy_config_raw(contents)
with pytest.raises(prop.PropertyError):
prop.get_prop('info', 'sdk')
testutils.undeploy()
return 0
def test_get_property_casing():
"""Get a prop with alternating casing"""
sdk = '23'
contents = ("[Info]\n"
"sdk = %s" % sdk)
testutils.deploy_config_raw(contents)
assert prop.get_prop('info', 'sdk') == sdk
assert prop.get_prop('Info', 'sdk') == | sdk
assert prop.get_prop('INFO', 'sdk') == sdk
testutils.undeploy()
return 0
# prop_del() tests
def test_del_empty_config():
"""Attempts to delete a property without a valid config"""
testutils.deploy_config_raw("")
assert prop.del_prop('info', 'sdk') != 0
testutils.undeploy()
return 0
def test_del_property():
"""Attempts to delete a valid property"" | "
contents = ("[Info]\n"
"sdk = 23")
testutils.deploy_config_raw(contents)
prop.del_prop('info', 'sdk')
testutils.undeploy()
return 0
def test_del_property_invalid():
"""Attempts to delete a property that doesnt exist"""
contents = ("[Info]\n"
"vmtype = 64")
testutils.deploy_config_raw(contents)
assert prop.del_prop('info', 'sdk') != 0
testutils.undeploy()
return 0
def test_del_property_casing():
"""Delete a prop with alternating casing"""
sdk = '23'
contents = ("[Info]\n"
"sdk = %s" % sdk)
testutils.deploy_config_raw(contents)
prop.del_prop('info', 'sdk')
testutils.undeploy()
return 0
# prop_test() tests
def test_test_empty_config():
"""Test a property without a valid config"""
testutils.deploy_config_raw("")
assert prop.test_prop('info', 'sdk') == 0
testutils.undeploy()
return 0
def test_test_property():
"""Test a valid property"""
contents = ("[Info]\n"
"sdk = 23")
testutils.deploy_config_raw(contents)
assert prop.test_prop('info', 'sdk') == 1
testutils.undeploy()
return 0
def test_test_invalid_property():
"""Test a missingproperty"""
contents = ("[Info]\n"
"vmtype = arm64")
testutils.deploy_config_raw(contents)
assert prop.test_prop('info', 'sdk') == 0
testutils.undeploy()
return 0
def test_test_property_casing():
"""Test a prop with alternating casing"""
sdk = '23'
contents = ("[Info]\n"
"sdk = %s" % sdk)
testutils.deploy_config_raw(contents)
assert prop.test_prop('info', 'sdk') == 1
testutils.undeploy()
return 0
|
modoboa/modoboa | modoboa/admin/migrations/0018_auto_20201204_0935.py | Python | isc | 357 | 0 | # Generated by Django 2.2.12 on 2020-12-04 08:35
from django.db import migrations, models
class Migration(migrations.Migration):
|
dependencies = [
('admin', '0017_alarm'),
]
operations = [
migrations.AlterField(
mo | del_name='alarm',
name='title',
field=models.TextField(),
),
]
|
benjello/openfisca-france | openfisca_france/model/caracteristiques_socio_demographiques/logement.py | Python | agpl-3.0 | 4,749 | 0.005925 | # -*- coding: utf-8 -*-
from numpy import logical_not as not_, logical_or as or_
from numpy.core.defchararray import startswith
from openfisca_france.model.base import * # noqa analysis:ignore
class coloc(Variable):
column = BoolCol
entity_class = Individus
label = u"Vie en colocation"
class logement_chambre(Variable):
column = BoolCol
entity_class = Individus
label = u"Le logement est considéré comme une chambre"
class loyer(Variable):
column = FloatCol()
entity_class = Menages
set_input = set_input_divide_by_period
label = u"Loyer ou mensualité d'emprunt pour un primo-accédant"
class loyer_individu(EntityToPersonColumn):
entity_class = Individus
label = u"Zone apl de la personne"
variable = loyer
class depcom(Variable):
column = FixedStrCol(max_length = 5)
entity_class = Menages
label = u"Code INSEE (depcom) du lieu de résidence"
class loyer_famille(PersonToEntityColumn):
entity_class = Familles
label = u"Zone apl de la famille"
role = CHEF
variable = loyer_individu
class charges_locatives(Variable):
column = FloatCol()
entity_class = Menages
set_input = set_input_divide_by_period
label = u'Charges locatives'
class proprietaire_proche_famille(Variable):
column = BoolCol
entity_class = Familles
label = u"Le | propriétaire du logement a un lien de parenté avec la personne de référence ou son conjoint"
class statut_occupation_logement(Variable):
column = EnumCol(
enum = Enum([
u"Non renseigné",
| u"Accédant à la propriété",
u"Propriétaire (non accédant) du logement",
u"Locataire d'un logement HLM",
u"Locataire ou sous-locataire d'un logement loué vide non-HLM",
u"Locataire ou sous-locataire d'un logement loué meublé ou d'une chambre d'hôtel",
u"Logé gratuitement par des parents, des amis ou l'employeur",
u"Locataire d'un foyer (résidence universitaire, maison de retraite, foyer de jeune travailleur, résidence sociale...)",
u"Sans domicile stable"])
)
entity_class = Menages
label = u"Statut d'occupation du logement"
set_input = set_input_dispatch_by_period
class residence_dom(Variable):
column = BoolCol
entity_class = Familles
def function(self, simulation, period):
residence_guadeloupe = simulation.calculate('residence_guadeloupe', period)
residence_martinique = simulation.calculate('residence_martinique', period)
residence_guyane = simulation.calculate('residence_guyane', period)
residence_reunion = simulation.calculate('residence_reunion', period)
residence_mayotte = simulation.calculate('residence_mayotte', period)
return period, or_(or_(residence_guadeloupe, residence_martinique), or_(or_(residence_reunion, residence_guyane), residence_mayotte))
class residence_guadeloupe(Variable):
column = BoolCol
entity_class = Familles
def function(self, simulation, period):
depcom_holder = simulation.compute('depcom', period)
depcom = self.cast_from_entity_to_roles(depcom_holder)
depcom = self.filter_role(depcom, role = CHEF)
return period, startswith(depcom, '971')
class residence_martinique(Variable):
column = BoolCol
entity_class = Familles
def function(self, simulation, period):
depcom_holder = simulation.compute('depcom', period)
depcom = self.cast_from_entity_to_roles(depcom_holder)
depcom = self.filter_role(depcom, role = CHEF)
return period, startswith(depcom, '972')
class residence_guyane(Variable):
column = BoolCol
entity_class = Familles
def function(self, simulation, period):
depcom_holder = simulation.compute('depcom', period)
depcom = self.cast_from_entity_to_roles(depcom_holder)
depcom = self.filter_role(depcom, role = CHEF)
return period, startswith(depcom, '973')
class residence_reunion(Variable):
column = BoolCol
entity_class = Familles
def function(self, simulation, period):
depcom_holder = simulation.compute('depcom', period)
depcom = self.cast_from_entity_to_roles(depcom_holder)
depcom = self.filter_role(depcom, role = CHEF)
return period, startswith(depcom, '974')
class residence_mayotte(Variable):
column = BoolCol
entity_class = Familles
def function(self, simulation, period):
depcom_holder = simulation.compute('depcom', period)
depcom = self.cast_from_entity_to_roles(depcom_holder)
depcom = self.filter_role(depcom, role = CHEF)
return period, startswith(depcom, '976')
|
bplower/legendary-waffle-lib | test/test_universe_create.py | Python | apache-2.0 | 2,904 | 0.007576 | #!/usr/bin/env python
"""
This pretty much just tests creating a user, a universe, a planet, a building type name, a building
type, and a building.
"""
import os
import sys
import sqlalchemy
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import legendary_waffle
# Database setup
db_engine = sqlalchemy.create_engine("sqlite://")
legendary_waffle.models.MODELBASE.metadata.create_all(db_engine)
legendary_waffle.models.MODELBASE.metadata.bind = db_engine
db_session = sqlalchemy.orm.sessionmaker(bind=db_engine)
db = db_session()
# Create the user
legendary_waffle.model_create(db, legendary_waffle.models.User, name='sk4ly')
print "Users: {}".format(legendary_waffle.model_read(db, legendary_waffle.models.User))
# Create the universe
universe_config = {
"name": 'poopiverse',
"map_size": 1000,
"max_planets": 1000,
"max_players": 10
}
legendary_waffle.model_create(db, legendary_waffle.models.Universe, **universe_config)
print "Universe: {}".format(legendary_waffle.model_read(db, legendary_waffle.models.Universe))
# Create the planet
planet_config = {
"universe": 1, # The pkid of the universe 'poopiverse'
"coordinate_x": 1,
"coordinate_y": 1,
"name": 'bloth',
"habitable": True,
"player_control": 1, # The pkid of user 'sk4ly'
"default_condition": 1000,
"default_resources": 1000,
"current_condition": 1000,
"current_resources": 1000
}
legendary_waffle.model_create(db, legendary_waffle.models.Planet, **planet_config)
print "Planet: {}".format(legendary_waffle.model_read(db, legendary_waffle.models.Planet))
# Create building type name
legendary_waffle.model_create(db, legendary_waffle.models.BuildingTypeName, name="Control Center")
print "Building Type Name: {}".format(legendary_waffle.model_read(db, legendary_waffle.models.BuildingTypeName))
# Create building type
building_type_config = {
"type | name": 1, # The pkid of the building type name 'Control Center'
"description": "This is the | control center",
"default_condition": 100,
"default_firepower": 0,
"default_storage": 100,
"rhr_passive": 0,
"rhr_active": 0,
"rhr_destructive": 0,
"build_resource_reqs": 500,
}
legendary_waffle.model_create(db, legendary_waffle.models.BuildingType, **building_type_config)
print "Building Type: {}".format(legendary_waffle.model_read(db, legendary_waffle.models.BuildingType))
# Now create our new building
building_config = {
"building_type": 1, # The pkid of the building type with the name 'Control Center'
"universe": 1, # The pkid of the universe 'poopiverse'
"planet": 1, # The pkid of the planet 'bloth'
"player_control": 1, # The pkid of the user 'sk4ly'
}
legendary_waffle.model_create(db, legendary_waffle.models.Building, **building_config)
print "Building: {}".format(legendary_waffle.model_read(db, legendary_waffle.models.Building))
|
unho/virtaal | virtaal/views/widgets/storecellrenderer.py | Python | gpl-2.0 | 10,840 | 0.00369 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2008-2010 Zuza Software Foundation
#
# This file is part of Virtaal.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import gtk
import gobject
import pango
from translate.lang import factory
from virtaal.common import pan_app
from virtaal.support.simplegeneric import generic
from virtaal.views import markup, rendering
from virtaal.views.theme import current_theme
@generic
def compute_optimal_height(widget, width):
raise NotImplementedError()
@compute_optimal_height.when_type(gtk.Widget)
def gtk_widget_compute_optimal_height(widget, width):
pass
@compute_optimal_height.when_type(gtk.Container)
def gtk_container_compute_optimal_height(widget, width):
if not widget.props.visible:
return
for child in widget.get_children():
compute_optimal_height(child, width)
@compute_optimal_height.when_type(gtk.Table)
def gtk_table_compute_optimal_height(widget, width):
for child in widget.get_children():
# width / 2 because we use half of the available width
compute_optimal_height(child, width / 2)
@compute_optimal_height.when_type(gtk.TextView)
def gtk_textview_compute_optimal_height(widget, width):
if not widget.props.visible:
return
buf = widget.get_buffer()
# For border calculations, see gtktextview.c:gtk_text_view_size_request in the GTK source
border = 2 * widget.border_width - 2 * widget.parent.border_width
if widget.style_get_property("interior-focus"):
border += 2 * widget.style_get_property("focus-line-width")
buftext = buf.props.text
# A good way to test height estimation is to use it for all units and
# compare the reserved space to the actual space needed to display a unit.
# To use height estimation for all units (not just empty units), use:
#if True:
if not buftext:
text = getattr(widget, '_source_text', u"")
if text:
lang = factory.getlanguage(pan_app.settings.language["targetlang"])
buftext = lang.alter_length(text)
buftext = markup.escape(buftext)
_w, h = rendering.make_pango_layout(widget, buftext, width - border).get_pixel_size()
if h == 0:
# No idea why this bug happens, but it often happens for the first unit
# directly after the file is opened. For now we try to guess a more
# useful default than 0. This should look much better than 0, at least.
h = 28
parent = widget.parent
if isinstance(parent, gtk.ScrolledWindow) and parent.get_shadow_type() != gtk.SHADOW_NONE:
border += 2 * parent.rc_get_style().ythickness
widget.parent.set_size_request(-1, h + border)
@compute_optimal_height.when_type(gtk.Label)
def gtk_label_compute_optimal_height(widget, width):
if widget.get_text().strip() == "":
widget.set_size_request(width, 0)
else:
_w, h = rendering.make_pango_layout(widget, widget.get_label(), width).get_pixel_size()
widget.set_size_request(width, h)
class StoreCellRenderer(gtk.GenericCellRenderer):
"""
Cell renderer for a unit based on the C{UnitRenderer} class from Virtaal's
pre-MVC days.
"""
__gtype_name__ = "StoreCellRenderer"
__gproperties__ = {
"unit": (
object,
"The unit",
"The unit that this renderer is currently handling",
gobject.PARAM_READWRITE
),
"editable": (
bool,
"editable",
"A boolean indicating whether this unit is currently editable",
False,
gobject.PARAM_READWRITE
),
}
__gsignals__ = {
"editing-done": (
gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE,
(gobject.TYPE_STRING, gobject.TYPE_BOOLEAN, gobject.TYPE_BOOLEAN)
),
"modified": (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE, ())
}
ROW_PADDING = 10
"""The number of pixels between rows."""
# INITIALIZERS #
def __init__(self, view):
gtk.GenericCellRenderer.__init__(self)
self.set_property('mode', gtk.CELL_RENDERER_MODE_EDITABLE)
self.view = view
self.__unit = None
self.editable = False
self.source_layout = None
self.target_layout = None
# ACCESSORS #
def _get_unit(self):
return self.__unit
def _set_unit(self, value):
if value.isfuzzy():
self.props.cell_background = current_theme['fuzzy_row_bg']
self.props.cell_background_set = True
else:
self.props.cell_background_set = False
self.__unit = value
unit = property(_get_unit, _set_unit, None, None)
# INTERFACE METHODS #
def do_set_property(self, pspec, value):
setattr(self, pspec.name, value)
def do_get_property(self, pspec):
return getattr(self, pspec.name)
def do_get_size(self, widget, _cell_area):
#TODO: store last unitid and computed dimensions
width = widget.get_toplevel().get_allocation().width - 32
if width < -1:
width = -1
if self.editable:
editor = self.view.get_unit_celleditor(self.unit)
editor.set_size_request(width, -1)
editor.show()
compute_optimal_height(editor, width)
parent_height = widget.get_allocation().height
if parent_height < -1:
parent_height = widget.size_request()[1]
if parent_height > 0:
self.check_editor_height(editor, width, parent_height)
_width, height = editor.size_request()
height += self.ROW_PADDING
else:
height = self.compute_cell_height(widget, width)
#height = min(height, 600)
y_offset = self.ROW_PADDING / 2
return 0, y_offset, width, height
def do_start_editing(self, _event, tree_view, path, _bg_area, cell_area, _flags):
"""Initialize and return the editor widget."""
editor = self.view.get_unit_celleditor(self.unit)
editor.set_size_request(cell_area.width, cell_area.height)
if not getattr(self, '_editor_editing_done_id', None):
self._editor_editing_done_id = editor.connect("editing-done", self._on_editor_done)
if not getattr(self, '_editor_modified_id', None):
self._editor_modified_id = editor.connect("modified", self._on_modified)
return editor
def on_render(self, window, widget, _background_area, cell_area, _expose_area, _flags):
if self.editable:
return True
x_offset, y_offset, width, _height = self.do_get_size(widget, cell_area)
x = cell_area.x + x_offset
y = cell_area.y + y_offset
source_x = x
target_x = x
if widget.get_direction() == gtk.TEXT_DIR_LTR:
target_x += width/2
else:
source_x += (width/2) + 10
widget.get_style().paint_layout(window, gtk.STATE_NORMAL, False,
cell_area, widget, | '', source_x, y, self.source_layout)
widget.get_style().paint_layout(window, gtk.STATE_NORMAL, False,
cell_area, widget, '', target_x, y, self.target_layout)
# METHODS #
def _get_pango_layout(self, widget, text, width, font_descri | ption):
'''Gets the Pango layout used in the cell in a TreeView widget.'''
# We can't use widget.get_pango_context() because we'll end up
# overwriting the language and font settings if we don't have a
# new one
layout = pango.Layout(widget.create_pang |
staranjeet/fjord | vendor/packages/translate-toolkit/translate/convert/test_po2sub.py | Python | bsd-3-clause | 2,446 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pytest import importorskip
from translate.convert import po2sub
from translate.convert import test_convert
from translate.misc import wStringIO
from translate.storage import po
# Technically subtitles can also use an older gaupol
importorskip("aeidon")
class TestPO2Sub:
def po2sub(self, posource):
"""helper that converts po source to subtitle source without requiring
files"""
inputfile = wStringIO.StringIO(posource)
inputpo = po.pofile(inputfile)
convertor = po2sub.resub()
outputsub = convertor.convertstore(inputpo)
return outputsub
def merge2sub(self, subsource, posource):
"""helper that merges po translations to subtitle source without
requiring files"""
inputfile = wStringIO.StringIO(posource)
inputpo = po.pofile(inputfile)
templatefile = wStringIO.StringIO(subsource)
convertor = po2sub.resub(templatefile, inputpo)
outputsub = convertor.convertstore()
print outputsub
return outputsub
def test_subrip(self):
"""test SubRip or .srt files."""
posource = u'''#: 00:00:20.000-->00:00:24.400
msgid "Altocumulus clouds occur between six thousand"
msgstr "Blah blah blah blah"
#: 00:00:24.600-->00:00:27.800
msgid "and twenty thousand feet above ground level."
msgstr "Koei koei koei koei"
'''
subtemplate = '''1
00:00:20,000 --> 00:00:24,400
Altocumulus clouds occur between six thousand
2
00:00:24,600 --> 00:00:27,800
and twenty thousand feet above ground level.
'''
subexpected = '''1
00:00:20,000 --> 00:00:24,400
Blah blah blah blah
2
00:00:24,600 --> 00:00:27,800
Koei koei koei koei
'''
subfile = self.merge2sub(subtemplate, posource)
print subexpected
assert subfile == subexpected
class TestPO2SubCommand(test_convert.TestConvertCommand, T | estPO2Sub):
"""Tests running actual po2sub commands on files"""
convertmodule = po2sub
defaultoptions = {"progress": "none"}
def test_help(self):
"""tests getting help"""
options = test_convert.TestConvertCommand.test_help(self)
options = self.help_check(options, "-t TEMPLATE, --template=TEMPLATE")
options = self.help_check(options, | "--threshold=PERCENT")
options = self.help_check(options, "--fuzzy")
options = self.help_check(options, "--nofuzzy", last=True)
|
pmichel31415/reddit-iambic-pentameter | rip/poet.py | Python | mit | 5,989 | 0.001002 | # -*- coding: utf-8 -*-
from __future__ import print_function, division
import sys
from collections import defaultdict
import numpy as np
import numpy.random as npr
import util
import poetry
import curse
import image
import title
class Poet(object):
"""Composes poems (duh...)"""
def __init__(self, config_file):
"""Constructor"""
# Load options
util.load_config(self, config_file)
# Set up the collection of verses
self.verses = load_verses(self.general.output_file)
self.n_verses = len(self.verses)
# Store the verses by rhyme
self.rhymes = defaultdict(lambda: set())
for i, verse in enumerate(self.verses):
self.rhymes[poetry.verse_rhyme(verse)].add(i)
# Total number of rhymes
self.n_rhymes = len(self.rhymes)
for k, v in self.rhymes.items():
self.rhymes[k] = list(v)
| # Probability of picking a rhyme
# This probability is proportional to the number of verses for each rhyme.
# In particular, for rhymes with only one verse, the probability is set to 0
self.p_rhymes = {r: (len(v) - 1) for r, v in self.rhymes.items()}
self.names_rhymes, self.p_rhymes = zip(*self.p_rhymes.items())
self.p_rhymes = np.asarray(self.p_rhymes, dtype=float)
self.p_rhymes /= self.p_rhymes.sum()
# Title generator
self.tg = | title.get_title_generator(self.title)
def add_period(self, line):
"""Adds a period at the end of line"""
if not line[-1] in '.,!?;':
line = line + '.'
elif line[-1] in ',:;':
line = line[:-1] + '.'
return line
def find_rhyming_verse(self, rhyme, verse=None):
"""Finds a random verse that rhymes with the input"""
# Get all rhyming verses
rhyming_verses = self.rhymes[rhyme]
# Until we have a different verse
# if verse is None this will make sure the rhyming verse is different
# TODO: this is not the best way to ensure the verses are different
# it is however relatively simple and works in most cases
# Sample 4 candidate verses
num_candidates = min(4, len(rhyming_verses))
candidate_ids = npr.choice(rhyming_verses, size=num_candidates, replace=False)
candidates = [self.verses[i] for i in candidate_ids]
if verse is not None:
# Select the first candidate with a different last word
for v in candidates:
if poetry.last_word(verse) != poetry.last_word(v):
return v
# If all the candidates have the same last word, just go with it
return candidates[-1]
def sample_rhyming_pair(self, rhyme):
"""Sample a pair of rhyming verses"""
first_verse = self.find_rhyming_verse(rhyme)
second_verse = self.find_rhyming_verse(rhyme, verse=first_verse)
return [first_verse, second_verse]
def generate_couplet(self):
"""Generates a couplet"""
# Sample rhymes
a = npr.choice(self.names_rhymes, p=self.p_rhymes)
# Get verses
couplet = self.sample_rhyming_pair(a)
# Add period at the end
couplet[-1] = self.add_period(couplet[-1])
# Package and ship
return '\n'.join(couplet)
def generate_quatrain(self):
"""Generate a quatrain"""
# Sample rhymes
a, b = npr.choice(self.names_rhymes, size=2, replace=False, p=self.p_rhymes)
# Get verses
quatrain = [""] * 4
quatrain[0], quatrain[2] = self.sample_rhyming_pair(a)
quatrain[1], quatrain[3] = self.sample_rhyming_pair(b)
# Add period at the end
quatrain[-1] = self.add_period(quatrain[-1])
# Package and ship
return '\n'.join(quatrain)
def generate_sonnet(self):
"""Generates a sonnet"""
# A sonnet is 3 quatrains and one couplet
sonnet = [""] * 4
sonnet[0] = self.generate_quatrain()
sonnet[1] = self.generate_quatrain()
sonnet[2] = self.generate_quatrain()
sonnet[3] = self.generate_couplet()
# Package and ship
return '\n\n'.join(sonnet)
def generate_title(self, poem):
"""Generate a title for the poem"""
return self.tg(poem)
def add_title(self, poem):
"""Generate a title and prepend it to the string"""
# Get the title
t = self.generate_title(poem)
# Add a nice separator
sep = "\n%s\n" % "".join(["-"] * len(t))
# Concatenate and ship
return t + sep + poem
def load_verses(filename):
"""Load verses from dump file created by the bot"""
verses = []
with open(filename, 'r') as f:
for l in f:
fields = l.strip().split('\t')
# Only select verses without curse words
if curse.is_clean(fields[-1]):
verses.append(fields[-2].strip())
return verses
def main():
config_file = sys.argv[1]
mode = sys.argv[2]
poet = Poet(config_file)
if mode == 'text':
# Print a sonnet to stdout (for debugging mainly)
sonnet = poet.generate_sonnet()
sonnet = poet.add_title(sonnet)
print(sonnet)
elif mode == 'image':
# Save quatrain in image form
quatrain = poet.generate_quatrain()
quatrain = poet.add_title(quatrain)
image.make_image(quatrain, output_file=sys.argv[3])
elif mode == 'image_text':
# Save a quatrain in imag and text form
quatrain = poet.generate_quatrain()
quatrain = poet.add_title(quatrain)
image.make_image(quatrain, output_file=sys.argv[3])
util.savetxt(sys.argv[4], quatrain.split('\n'))
else:
# Print a couplet and a warning
print('mode %s not recognized. Here, get a couplet for free:\n' % mode, file=sys.stderr)
print(poet.generate_couplet(), file=sys.stderr)
if __name__ == '__main__':
main()
|
wrwrwr/turtle-trans | turtletrans/translate.py | Python | mit | 444 | 0 | """
Some common utilities.
"""
from turtle import Turtle
def turtle_subclass(name):
"""
| Creates a subclass of Turtle with the given name.
"""
return type(name, (Turtle,), {})
def translate_methods(cls, translations):
"""
Creates aliases for method names.
"""
for method, aliases in translations.items():
func = getattr(cls, | method)
for alias in aliases:
setattr(cls, alias, func)
|
blackberry/ALF | alf/debug/_gdb.py | Python | apache-2.0 | 19,799 | 0.002879 | ################################################################################
# Name : GDB Wrapper
# Author : Jesse Schwartzentruber & Tyson Smith
#
# Copyright 2014 BlackBerry Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import distutils.spawn
import os
import platform
import re
import signal
import tempfile
import time
from . import _common
CLOSE_FDS = True
if platform.system() in ["Linux", "Darwin"]:
TOOL_GDB = distutils.spawn.find_executable('gdb', os.pathsep.join([os.environ['PATH'], _common.PATH_DBG]))
if platform.system() == "Linux":
TOOL_GDB_NTO = os.path.join(_common.PATH_DBG, "linux_x64-gdb-ntoarm")
TOOL_KDSRV = os.path.join(_common.PATH_DBG, "linux_x64-kdserver")
else:
TOOL_GDB_NTO = None
TOOL_KDSRV = None
elif platform.system() == "QNX":
TOOL_GDB = {"x86": os.path.join(_common.PATH_DBG, "n | tox86-gdb"),
"armle": os.path.join(_common.PATH_DBG, "ntoarm-gdb"),
| }[platform.processor()]
TOOL_GDB_NTO = TOOL_GDB
TOOL_KDSRV = None
assert os.access(TOOL_GDB, os.X_OK), "%s is not executable" % TOOL_GDB
elif platform.system() == "Windows":
TOOL_GDB = distutils.spawn.find_executable('gdb.exe', os.pathsep.join([os.environ['PATH'], _common.PATH_DBG]))
TOOL_GDB_NTO = os.path.join(_common.PATH_DBG, "gdb-ntoarm.exe")
TOOL_KDSRV = os.path.join(_common.PATH_DBG, "kdserver.exe")
CLOSE_FDS = False
GDB_CMDS = os.path.join(os.path.abspath(os.path.dirname(__file__)), "cmds.gdb")
# child sometimes doesn't die on SIGTERM in QNX
# wait this length of time before sending another SIGTERM, and finally SIGKILL
SLAY_TIMEOUT = 10
def _send_signal(signal, *args):
for pid in args:
if pid:
os.kill(pid, signal)
break
def _trim_disassembly(stdout):
if not stdout:
return stdout
start_loc = stdout.find("Dump of assembler code")
end_loc = stdout.find("End of assembler dump.", start_loc)
if start_loc == -1 or end_loc == -1:
return "%s\nError trimming assembler dump. start_loc = %d, end_loc = %d" % (stdout,
start_loc,
end_loc)
try:
a, b = stdout[start_loc:end_loc].split("\n=>")
except ValueError:
return "%s\nError trimming assembler dump. Could not find '=>'" % (stdout)
a = a.splitlines()
start_loc += len(a.pop(0))
return "%s\n%s\n=>%s\n%s" % (stdout[:start_loc],
"\n".join(a[-15:]),
"\n".join(b.splitlines()[:15]),
stdout[end_loc:])
def _gdb_cmd(target_exe, solib_search=None, run=True):
return [TOOL_GDB, "-nx", "-x", GDB_CMDS] + \
[i for sl in [("-ex", x) for x in
_gdb_cmd_gen(run=run, target=target_exe, solib_search=solib_search)] for i in sl] + \
["-return-child-result", "-batch", "--args"]
def run_with_gdb(target_cmd, symbols=None, solib_search=None, env=None, callback=None,
callback_args=None, timeout=_common.DEFAULT_TIMEOUT, memory_limit=None,
idle_limit=None):
"""
This function is similar to the :func:`run` function above,
except the target is executed under control of the GNU Debugger.
Symbols may be specified manually, otherwise they are expected
to be findable by GDB (usually included in the target itself).
:func:`run_with_gdb` returns a :class:`~alf.FuzzResult` instance.
If no crash was detected, the :attr:`~alf.FuzzResult.classification`
member of the :class:`~alf.FuzzResult` will be
:data:`~alf.debug.NOT_AN_EXCEPTION`.
Classifications: :data:`~alf.debug.NOT_AN_EXCEPTION`,
:data:`~alf.debug.TIMEOUT`, :data:`~alf.debug.UNKNOWN`.
Availability: Unix, Windows.
"""
classification = None
cpid = None
if platform.system() == "Windows":
_common._set_gflags(target_cmd[0])
if platform.system() == "QNX":
if not os.path.isfile("libc.so.3"):
if not os.path.isfile("/root/symbols/x86/lib/libc.so.3.sym"):
raise RuntimeError("Cannot find /root/symbols/x86/lib/libc.so.3.sym")
os.symlink("/root/symbols/x86/lib/libc.so.3.sym", "libc.so.3")
fd, temp_fn = tempfile.mkstemp(prefix="gdb", suffix=".log", dir=".")
os.close(fd)
nul = open(os.devnull, "w+")
try:
with open(temp_fn, "w+") as f:
if env is None:
env = dict(os.environ)
env["LIBC_FATAL_STDERR_"] = "1"
p = _common.subprocess.Popen(_gdb_cmd(target_cmd[0], solib_search) + target_cmd,
close_fds=CLOSE_FDS, stdout=f, stderr=f, stdin=nul,
creationflags=_common.POPEN_FLAGS, env=env)
try:
with open(temp_fn) as fr:
while p.poll() is None:
line = fr.readline()
m = re.match(r"^\*\s+1\s+Thread\s+\w+\s+\(LWP\s+(?P<pid>[0-9]+)\)", line)
if m is None:
m = re.match(r"^\*\s+1\s+(pid|process|Thread)\s+(?P<pid>[0-9]+)", line)
if m:
cpid = int(m.group("pid"))
break
cb_res = _common._call_callback(callback, callback_args, p.pid)
if cb_res == _common.CB_ERROR:
raise RuntimeError("callback() returned error")
target_mon = _common.TargetMonitor(cpid, idle_limit=idle_limit,
memory_limit=memory_limit, time_limit=timeout)
while p.poll() is None:
if target_mon.check_memory():
classification = _common.EXCESS_MEMORY_USAGE
break
if target_mon.check_idle():
break
if target_mon.check_timeout():
classification = _common.TIMEOUT
break
time.sleep(0.01)
finally:
while p.poll() is None:
try:
if platform.system() == "QNX":
attempt = -1
sigs = [signal.SIGTERM, signal.SIGKILL]
while p.poll() is None:
attempt += 1
assert attempt < len(sigs), "Failed to kill child process"
_send_signal(sigs[attempt], cpid, p.pid)
kill_time = _common.prof_timer()
while _common.prof_timer() - kill_time < SLAY_TIMEOUT:
if p.poll() is not None:
break
time.sleep(0.25)
elif platform.system() == "Windows":
_send_signal(signal.CTRL_BREAK_EVENT, cpid, p.pid)
else:
_send_signal(signal.SIGTERM, cpid, p.pid)
except OSError:
pass
exit_code = p.wait()
f.seek(0, os.SEEK_SET)
stdout = f.read()
finally:
_common.delete(temp_fn)
nul.close()
m = re.search(r"Traceback \(\D+\):.+Python command:", stdout, re.DOTALL)
if m:
tb = m.g |
kislyuk/cartographer | postproc_db.py | Python | agpl-3.0 | 1,920 | 0.005208 | #!/usr/bin/env python3
import os, sys, logging, urllib, time, string, json, argparse, collections, datetime, re, bz2, math
from concurrent.futures import ThreadPoolExecutor, wait
import lz4
pool = ThreadPoolExecutor(max_workers=16)
logging.basicConfig(level=logging.DEBUG)
sys. | path.append(os.path.join(os.path.dirname(__file__), "lib", "python"))
from carta import (logger, POI)
from mongoengine import *
connect('carta')
zoomspacing = [round(0.0001*(1.6**n), 4) for n in | range(21, 1, -1)]
def compute_occlusions(box):
SW, NE = box
points = list(POI.objects(at__geo_within_box=(SW, NE)))
print("Starting", SW, NE, len(points))
for i, p1 in enumerate(points):
for j, p2 in enumerate(points[i+1:]):
coords1, coords2 = p1.at['coordinates'], p2.at['coordinates']
dist = math.sqrt(abs(coords1[0]-coords2[0])**2 + abs(coords1[1]-coords2[1])**2)
occluded_point = p1 if p1.rank < p2.rank else p2
for zoom, spacing in enumerate(zoomspacing):
if dist < spacing:
continue
break
occluded_point.min_zoom = max(occluded_point.min_zoom, zoom)
p1.save()
print("Finished", SW, NE, len(points))
step = 2
boxes = []
for lat in range(-90, 90, step):
for lng in range(-180, 180, step):
boxes.append([(lng, lat), (lng+step, lat+step)])
for result in pool.map(compute_occlusions, boxes):
pass
# docs_by_rank = sorted(POI.objects(at__geo_within_center=(doc.at['coordinates'], spacing)),
# key=lambda point: point.rank or 0,
# reverse=True)
# for doc in POI.objects(at__geo_within_center=(doc.at['coordinates'], 1), min_zoom__gt=0).order_by('-rank'):
# for doc2 in POI.objects(at__geo_within_center=(doc.at['coordinates'], zoomspacing[doc.min_zoom]), min_zoom__lte=doc.min_zoom).order_by('-rank'):
|
orionblastar/K666 | freek666/urls.py | Python | mit | 299 | 0 | from django.conf.urls import include, url
from django.views.generic.base import RedirectView
from djan | go.views.generic.base import TemplateView
urlpatt | erns = [
url(r'^index.html$', TemplateView.as_view(template_name="index.html")),
# url(r'^$', RedirectView.as_view(url="/index.html")),
]
|
Upande/MaMaSe | apps/partners/forms.py | Python | apache-2.0 | 134 | 0.014925 | from django import fo | rms
class PartnerLogoForm(forms.Form):
partner_logo = fo | rms.ImageField(
label='Select a file',
) |
MWisBest/PyBot | Commands/synonym/__init__.py | Python | gpl-3.0 | 23 | 0 | fro | m .synonym im | port *
|
yepengxj/theano_exercises | 02_advanced/01_symbolic/03_energy_soln.py | Python | bsd-3-clause | 2,327 | 0.002149 | import numpy as np
import theano
from theano import function
from theano.sandbox.rng_mrg import MRG_RandomStreams
import theano.tensor as T
def energy(W, V, H):
"""
W : A theano matrix of RBM weights
num visible x num hidden
V : A theano matrix of assignments to visible units
Each row is another configuration
Each column corresponds to a different unit
H : A theano matrix of assignments to hidden units
Each row is another configuration
Each column corresponds to a different unit
Returns:
E: a theano vector
Element i gives the energy of configuration (V[i,:], H[i,:])
(This RBM has no biases, only weights)
"""
return -(T.dot(V, W) * H).sum(axis=1)
def grad_expected_energy(W, V, H):
"""
W : A theano matrix of RBM weights
num visible x num hidden
V : A theano matrix of samples of visible units
Each row is another samples
Each column corresponds to a different unit
H : A theano matrix of samples of hidden units
Each row is another samples
Each column corresponds to a different unit
Returns:
dW: a matrix of the derivatives of the expected gradient
of the energy
"""
return T.grad(energy(W, V, H).mean(), W, consider_constant=[V, H])
if __name__ == "__main__":
m = 2
nv = 3
nh = 4
v0 = T.alloc(1., m, nv)
h0 = T.alloc(1., m, nh)
rng_factory = MRG_RandomStreams(42)
W = rng_factory.normal(size=(nv, nh), dtype=v0.dtype)
pv = T.nnet.sigmoid(T.dot(h0, W.T))
v = rng_factory.binomial(p=pv, size=pv.shape, dtype=W.dtype)
ph = | T.nnet.sigmoid(T.dot(v, W))
h = rng_factory.binomial(p=ph, size=ph.shape, dtype=W.dtype)
c | lass _ElemwiseNoGradient(theano.tensor.Elemwise):
def grad(self, inputs, output_gradients):
raise TypeError("You shouldn't be differentiating through "
"the sampling process.")
return [ theano.gradient.DisconnectedType()() ]
block_gradient = _ElemwiseNoGradient(theano.scalar.identity)
v = block_gradient(v)
h = block_gradient(h)
g = grad_expected_energy(W, v, h)
stats = T.dot(v.T, h) / m
f = function([], [g, stats])
g, stats = f()
assert np.allclose(g, -stats)
print "SUCCESS!"
|
Linaro/squad | squad/core/migrations/0011_testrun_metadata_fields.py | Python | agpl-3.0 | 959 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-20 13:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0010_testrun_datetime'),
]
operations = [
migrations.AddField(
model_name='testrun',
name='build_url',
field=models.CharField(max_length=2048, null=True),
| ),
migrations.AddField(
model_name='testrun',
name='job_id',
field=models.CharField(max_length=128, null=True),
),
migrations.AddField(
model_name='testrun',
name='job_status',
field=models.CharField(max_length=128, null=True),
),
| migrations.AddField(
model_name='testrun',
name='job_url',
field=models.CharField(max_length=2048, null=True),
),
]
|
moodpulse/l2 | statistic/structure_sheet.py | Python | mit | 67,638 | 0.001698 | import json
from collections import OrderedDict
import openpyxl
from openpyxl.styles import Border, Side, Alignment, Font, NamedStyle
from openpyxl.utils.cell import get_column_letter
from directions.models import IstochnikiFinansirovaniya
from doctor_call.models import DoctorCall
from hospitals.tfoms_hospital import HOSPITAL_TITLE_BY_CODE_TFOMS
from utils.dates import normalize_dash_date
from dateutil.parser import parse as du_parse
from dateutil.relativedelta import relativedelta
month_dict = {1: 'Январь', 2: 'Февраль', 3: 'Март', 4: 'Апрель', 5: 'Май', 6: 'Июнь', 7: 'Июль', 8: 'Август', 9: 'Сентябрь', 10: 'Октябрь', 11: 'Ноябрь', 12: 'Декабрь'}
def job_total_base(ws1, month, type_fin):
"""
Основа(каркас) для итоговых данных
:return:
"""
ws1.column_dimensions[get_column_letter(1)].width = 22
for i in range(1, 32):
ws1.column_dimensions[get_column_letter(1 + i)].width = 4
ws1.cell(row=4, column=1 + i).value = str(i)
ws1.cell(row=1, column=1).value = 'Месяц'
ws1.cell(row=1, column=2).value = month_dict.get(month)
ws1.cell(row=4, column=1).value = 'Вид работы'
fin_obj = IstochnikiFinansirovaniya.objects.get(pk=type_fin)
ws1.cell(row=2, column=1).value = fin_obj.title
return ws1
def jot_total_titles(ws1, titles):
"""
Заговловки видов работ
:param ws1:
:param titles:
:return:
"""
cel_res = OrderedDict()
for i in range(len(titles)):
cell_row = 5 + i
ws1.cell(row=cell_row, column=1).value = titles[i]
cel_res[titles[i]] = cell_row
return ws1, cel_res
def job_total_data(ws1, titles, data):
for k, v in data.items():
for res, uet in v.items():
r = titles.get(res)
ws1.cell(row=r, column=k + 1).value = str(uet)
def passed_research_base(ws1, data_date):
"""
:param ws1:
:return:
"""
style_border = NamedStyle(name="style_border")
bd = Side(style='thin', color="000000")
style_border.border = Border(left=bd, top=bd, right=bd, bottom=bd)
style_border.font = Font(bold=True, size=11)
style_border.alignment = Alignment(wrap_text=True, horizontal='center', vertical='center')
ws1.merge_cells(start_row=1, start_column=1, end_row=1, end_column=19)
ws1.cell(row=1, column=1).value = 'ЖУРНАЛ учета приема и отказов в госпитализации за ' + data_date + 'г.(мед.документация Ф№001/У утв. МИНЗДРАВОМ СССР 04.10.1980г. №1030)'
ws1.cell(row=1, column=1).style = style_border
# габариты ячеек |
ws1.row_dimensions[2].height = 115
columns = [
('№ п/п', 5),
(' | Время поступления', 8),
('Услуга (дата-время подтверждения)', 14),
('Направление', 11),
('Фамилия, имя, отчество больного', 20),
('Дата рождения', 10),
('Постоянное место жительства или адрес родственников, близких и N телефона', 23),
('Каким учреждением был направлен или доставлен', 15),
('Отделение, в которое помещен больной', 12),
('N карты (стационарного) больного', 10),
('Диагноз направившего учреждения', 7),
('Диагноз при поступлении', 7),
('№ ДДУ', 16),
('Полис', 21),
('Примечания', 10),
('Выписан, переведен в другой стационар, умер (вписать и указать дату и название стационара, куда переведен', 20),
('Отметка о сообщении родственникам или учреждению', 11),
('Если не был госпитализирован указать причину и принятые меры', 11),
('отказ в приеме первичный, повторный (вписать)', 11),
]
for idx, column in enumerate(columns, 1):
ws1.cell(row=2, column=idx).value = column[0]
ws1.column_dimensions[get_column_letter(idx)].width = column[1]
ws1.cell(row=2, column=idx).style = style_border
return ws1
def passed_research_data(ws1, data):
r = 2
n = 0
empty = ' '
style_border1 = NamedStyle(name="style_border1")
bd = Side(style='thin', color="000000")
style_border1.border = Border(left=bd, top=bd, right=bd, bottom=bd)
style_border1.font = Font(bold=False, size=11)
style_border1.alignment = Alignment(wrap_text=True, horizontal='left', vertical='center')
for i in data:
current_research_title = i[1]
current_polis_n = i[2] or empty
current_polis_who_give = i[3] or empty
current_napravlen = i[4]
current_datatime_confirm = i[5]
current_create_napr = i[6]
current_diagnoz = i[7] or empty
current_result = i[8] or empty
current_napr_time_at = i[19] or empty
current_num_card = i[10]
current_family = i[11] or empty
current_name = i[12] or empty
current_patronymic = i[13] or empty
current_birthday = i[14] or empty
current_main_address = i[15] if i[15] else ''
current_fact_address = i[16] if i[16] else empty
current_address = current_main_address if current_main_address else current_fact_address
current_work_place = i[17] or empty
current_kem_napravlen = i[18] or empty
r = r + 1
n = n + 1
ws1.cell(row=r, column=1).value = n
ws1.cell(row=r, column=2).value = current_napr_time_at
ws1.cell(row=r, column=3).value = f'{current_research_title},\n({current_datatime_confirm})'
ws1.cell(row=r, column=4).value = f'{current_napravlen},\n({current_create_napr})'
ws1.cell(row=r, column=5).value = current_family + ' ' + current_name + ' ' + current_patronymic
ws1.cell(row=r, column=6).value = current_birthday
ws1.cell(row=r, column=7).value = current_address
ws1.cell(row=r, column=8).value = current_kem_napravlen
ws1.cell(row=r, column=9).value = 'Приемное'
ws1.cell(row=r, column=10).value = current_num_card
ws1.cell(row=r, column=11).value = ' '
ws1.cell(row=r, column=12).value = current_diagnoz
ws1.cell(row=r, column=13).value = current_work_place
ws1.cell(row=r, column=14).value = current_polis_n + ', ' + current_polis_who_give
ws1.cell(row=r, column=15).value = ' '
ws1.cell(row=r, column=16).value = current_result
ws1.cell(row=r, column=17).value = ' '
ws1.cell(row=r, column=18).value = ' '
ws1.cell(row=r, column=19).value = ' '
for j in range(1, 20):
ws1.cell(row=r, column=j).style = style_border1
return ws1
def covid_call_patient_base(ws1):
"""
:param ws1:
:return:
"""
style_border = NamedStyle(name="style_border")
bd = Side(style='thin', color="000000")
style_border.border = Border(left=bd, top=bd, right=bd, bottom=bd)
style_border.font = Font(bold=True, size=11)
style_border.alignment = Alignment(wrap_text=True, horizontal='center', vertical='center')
ws1.merge_cells(start_row=1, start_column=1, end_row=1, end_column=19)
ws1.cell(row=1, column=1).value = 'Обзвон'
ws1.cell(row=1, column=1).style = style_border
# габариты ячеек
ws1.row_dimensions[2].height = 15
columns = [
('ФИО', 25),
('№ карты', 15),
('Телефон', 20),
('Оператор', 25),
('Дата', 25),
]
for idx, column in enumerate(columns, 1):
ws1.cell(row=2, column=idx).value = column[0]
ws1.column_dimensions[get_column_letter(idx)].width = column[1]
ws1.cell(row=2, column=idx).style = style_border
return ws1
def covid_call_patient_data(ws1, data):
r = 3
style_border1 = NamedStyle(name="style_border1")
bd = Side(style='thin', color="000000")
style_border1.border = Border(left=bd, top=bd, right=bd, bottom=bd)
style_border1.font = Font(bold=False, size=11)
style_border1.alignment = Alignment(wrap_text=True, horizontal='left', vertical='center')
for i in data:
ws1.cell(row=r, column=1).value = i["fio_patient"]
ws1.cell(row=r, column=2).value = i["number"]
ws1.cell(row=r, column=3).value = i["Контактный телефон"]
ws1.cell(row=r, column=4).value = i["Оператор"]
ws1.cell(row=r, column=5).value = normalize_dash_date(i["Дата следующего звонка"])
for j in range(1, 6):
ws1.c |
bikash/h2o-dev | py2/testdir_single_jvm/test_parse_covtype_2.py | Python | apache-2.0 | 3,121 | 0.013137 | import unittest, sys
sys.path.extend(['.','..','../..','py'])
import h2o2 as h2o
import h2o_cmd, h2o_import as h2i, h2o_browse as h2b
from h2o_test import find_file, dump_json, verboseprint
expectedZeros = [0, 4914, 656, 24603, 38665, 124, 13, 5, 1338, 51, 320216, 551128, 327648, 544044, 577981,
573487, 576189, 568616, 579415, 574437, 580907, 580833, 579865, 548378, 568602, 551041,
563581, 580413, 581009, 578167, 577590, 579113, 576991, 571753, 580174, 547639, 523260,
559734, 580538, 578423, 579926, 580066, 465765, 550842, 555346, 528493, 535858, 579401,
579121, 580893, 580714, 565439, 567206, 572262, 0]
def assertEqualMsg(a, b): assert a == b, "%s %s" % (a, b)
def parseKeyIndexedCheck(frames_result, multiplyExpected):
# get the name of the frame?
print ""
frame = frames_result['frames'][0]
rows = frame['rows']
columns = frame['columns']
for i,c in enumerate(columns):
label = c['label']
stype = c['type']
missing = c['missing']
zeros = c['zeros']
domain = c['domain']
print "column: %s label: %s type: %s missing: %s zeros: %s domain: %s" %\
(i,label,stype,missing,zeros,domain)
# files are concats of covtype. so multiply expected
assertEqualMsg(zeros, expectedZeros[i] * multiplyExpected)
assertEqualMsg(label,"C%s" % (i+1))
assertEqualMsg(stype,"int")
assertEqualMsg(missing, 0)
assertEqualMsg(domain, None)
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
h2o.init()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_parse_covtype_2(self):
tryList = [
('covtype.data', 1, 30),
('covtype20x.data', 20, 120),
]
for (csvFilename, multiplyExpected, timeoutSecs) in tryList:
# import_result = a_node.import_files(path=find_file("smalldata/logreg/prostate.csv"))
im | portFolderPath = "standard"
hex_key = 'covtype.hex'
csvPathname = importFolderPath + "/" + csvFilename
parseResult = h2i.import_parse(bucket='home-0 | xdiag-datasets', path=csvPathname, schema='local',
timeoutSecs=timeoutSecs, hex_key=hex_key,
chunk_size=4194304*2, doSummary=False)
pA = h2o_cmd.ParseObj(parseResult)
iA = h2o_cmd.InspectObj(pA.parse_key)
print iA.missingList, iA.labelList, iA.numRows, iA.numCols
for i in range(1):
print "Summary on column", i
co = h2o_cmd.runSummary(key=hex_key, column=i)
k = parseResult['frames'][0]['key']['name']
# print "parseResult:", dump_json(parseResult)
a_node = h2o.nodes[0]
frames_result = a_node.frames(key=k, row_count=5)
# print "frames_result from the first parseResult key", dump_json(frames_result)
parseKeyIndexedCheck(frames_result, multiplyExpected)
if __name__ == '__main__':
h2o.unit_main()
|
Mlieou/oj_solutions | leetcode/python/ex_576.py | Python | mit | 898 | 0.004454 | class Solution(object):
def findPaths(self, m, n, N, i, j):
"""
:type m: int
:type n: int
:type N: int
:type i: int
:type j: int
:rtype: int
"""
if not N: return 0
board = [[[0] * n for _ in range(m | )] for _ in range(2)]
for c in range(1, N+1):
prev = (c - 1) % 2
curr = c % 2
for x | in range(m):
for y in range(n):
board[curr][x][y] = 0
board[curr][x][y] += board[prev][x-1][y] if x > 0 else 1
board[curr][x][y] += board[prev][x+1][y] if x < m-1 else 1
board[curr][x][y] += board[prev][x][y-1] if y > 0 else 1
board[curr][x][y] += board[prev][x][y+1] if y < n-1 else 1
board[curr][x][y] %= 10 ** 9 + 7
return board[N%2][i][j] |
jimrybarski/fylm_critic | tests.py | Python | mit | 250 | 0 | """
Auto-discovers all unittests in the | tests directory and runs them
"""
import unittest
loader = unittest.TestLoader()
tests = loader.discover('tests', pattern='*.py', top_level_dir='.')
testRunner = unittest.TextTestRunner()
tes | tRunner.run(tests)
|
xmendez/wfuzz | src/wfuzz/ui/console/mvc.py | Python | gpl-2.0 | 10,785 | 0.000556 | import sys
from collections import defaultdict
import threading
try:
from itertools import zip_longest
except ImportError:
from itertools import izip_longest as zip_longest
from wfuzz.fuzzobjects import FuzzWordType, FuzzType, FuzzPlugin
from .common import exec_banner, Term
from .getch import _Getch
from .output import getTerminalSize, wrap_always_list
usage = """\r\n
Interactive keyboard commands:\r\n
?: Show this help
p: Pause
s: Show stats
q: Cancel
"""
class SimpleEventDispatcher:
def __init__(self):
self.publisher = defaultdict(list)
def create_event(self, msg):
self.publisher[msg] = []
def subscribe(self, func, msg, dynamic=False):
if msg not in self.publisher and not dynamic:
raise KeyError("subscribe. No such event: %s" % (msg))
else:
self.publisher[msg].append(func)
def notify(self, msg, **event):
if msg not in self.publisher:
raise KeyError("notify. Event not subscribed: %s" % (msg,))
else:
for functor in self.publisher[msg]:
functor(**event)
class KeyPress(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.inkey = _Getch()
self.setName("KeyPress")
self.dispatcher = SimpleEventDispatcher()
self.dispatcher.create_event("?")
self.dispatcher.create_event("p")
self.dispatcher.create_event("s")
self.dispatcher.create_event("q")
self.do_job = True
def cancel_job(self):
self.do_job = False
def run(self):
while self.do_job:
k = self.inkey()
if k and ord(k) == 3:
self.dispatcher.notify("q", key="q")
elif k == "p":
self.dispatcher.notify("p", key="p")
elif k == "s":
self.dispatcher.notify("s", key="s")
elif k == "?":
self.dispatcher.notify("?", key="?")
elif k == "q":
self.dispatcher.notify("q", key="q")
class Controller:
def __init__(self, fuzzer, view):
self._debug = False
self.fuzzer = fuzzer
self.view = view
self.__paused = False
self.stats = fuzzer.options.get("compiled_stats")
self.view.dispatcher.subscribe(self.on_help, "?")
self.view.dispatcher.subscribe(self.on_pause, "p")
self.view.dispatcher.subscribe(self.on_stats, "s")
self.view.dispatcher.subscribe(self.on_exit, "q")
# dynamic keyboard bindings
def on_exit(self, **event):
self.fuzzer.cancel_job()
self.view.cancel_job()
self.fuzzer.options.close()
def on_help(self, **event):
print(usage)
def on_pause(self, **event):
self.__paused = not self.__paused
if self.__paused:
self.fuzzer.pause_job()
else:
self.fuzzer.resume_job()
def on_stats(self, **event):
if self._debug:
self.show_debug_stats()
else:
self.show_stats()
def show_debug_stats(self):
print("\n=============== Paused ==================")
stats = self.fuzzer.stats()
for k, v in list(stats.items()):
print("%s: %s" % (k, v))
print("\n=========================================")
def show_stats(self):
pending = self.stats.total_req - self.stats.processed()
summary = self.stats
summary.mark_end()
print("\nTotal requests: %s\r" % str(summary.total_req))
print("Pending requests: %s\r" % str(pending))
if summary.backfeed() > 0:
print(
"Processed Requests: %s (%d + %d)\r"
% (
str(summary.processed())[:8],
(summary.processed() - summary.backfeed()),
summary.backfeed(),
)
)
else:
print("Processed Requests: %s\r" % (str(summary.processed())[:8]))
print("Filtered Requests: %s\r" % (str(summary.filtered())[:8]))
req_sec = (
summary.processed() / summary.totaltime if summary.totaltime > 0 else 0
)
print("Total time: %s\r" % str(summary.totaltime)[:8])
if req_sec > 0:
print("Requests/sec.: %s\r" % str(req_sec)[:8])
eta = pending / req_sec
if eta > 60:
print("ET left min.: %s\r\n" % str(eta / 60)[:8])
else:
print("ET left sec.: %s\r\n" % str(eta)[:8])
class View:
widths = [10, 8, 6, 8, 9, getTerminalSize()[0] - 65]
verbose_widths = [10, 10, 8, 8, 6, 9, 30, 30, getTerminalSize()[0] - 145]
def __init__(self, session_options):
self.colour = session_options["colour"]
self.verbose = session_options["verbose"]
self.previous = session_options["previous"]
| self.term = Term()
self.printed_lines = 0
def _print_verbose(self, res, print_nres=True):
txt_colour = (
Term.noColour if not res.is_baseline or not self.colour else Term.fgCyan
)
if self.previous and self.colour and not print_nres:
| txt_colour = Term.fgCyan
location = ""
if "Location" in res.history.headers.response:
location = res.history.headers.response["Location"]
elif res.history.url != res.history.redirect_url:
location = "(*) %s" % res.history.url
server = ""
if "Server" in res.history.headers.response:
server = res.history.headers.response["Server"]
rows = [
("%09d:" % res.nres if print_nres else " |_", txt_colour),
("%.3fs" % res.timer, txt_colour),
(
"%s" % "XXX" if res.exception else str(res.code),
self.term.get_colour(res.code) if self.colour else txt_colour,
),
("%d L" % res.lines, txt_colour),
("%d W" % res.words, txt_colour),
("%d Ch" % res.chars, txt_colour),
(server, txt_colour),
(location, txt_colour),
('"%s"' % res.description, txt_colour),
]
self.term.set_colour(txt_colour)
self.printed_lines = self._print_line(rows, self.verbose_widths)
def _print_header(self, rows, maxWidths):
print("=" * (3 * len(maxWidths) + sum(maxWidths[:-1]) + 10))
self._print_line(rows, maxWidths)
print("=" * (3 * len(maxWidths) + sum(maxWidths[:-1]) + 10))
print("")
def _print_line(self, rows, maxWidths):
def wrap_row(rows, maxWidths):
newRows = [
wrap_always_list(item[0], width) for item, width in zip(rows, maxWidths)
]
return [[substr or "" for substr in item] for item in zip_longest(*newRows)]
def print_row(row, rows):
sys.stdout.write(
" ".join(
[
colour + str.ljust(str(item), width) + Term.reset
for (item, width, colour) in zip(
row, maxWidths, [colour[1] for colour in rows]
)
]
)
)
new_rows = wrap_row(rows, maxWidths)
for row in new_rows:
print_row(row, rows)
sys.stdout.write("\n\r")
sys.stdout.flush()
return len(new_rows)
def _print(self, res, print_nres=True):
txt_colour = (
Term.noColour if not res.is_baseline or not self.colour else Term.fgCyan
)
if self.previous and self.colour and not print_nres:
txt_colour = Term.fgCyan
rows = [
("%09d:" % res.nres if print_nres else " |_", txt_colour),
(
"%s" % "XXX" if res.exception else str(res.code),
self.term.get_colour(res.code) if self.colour else txt_colour,
),
("%d L" % res.lines, txt_colour),
("%d W" % res.words, txt_colour),
("%d Ch" % res.chars, txt_colour),
('"%s"' % res.description, txt_colour),
]
|
guillaume-philippon/aquilon | lib/aquilon/consistency/checks/branch.py | Python | apache-2.0 | 3,427 | 0.000584 | #!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2013,2014 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from aquilon.consistency.checker import ConsistencyChecker
from aquilon.aqdb.model.branch import Branch
from aquilon.worker.processes import run_git
from aquilon.worker.dbwrappers.branch import merge_into_trash
class BranchChecker(ConsistencyChecker):
"""
Branch Consistancy Checker
This module performs validation that is common for all branches (both
domains and sandboxes) in template-king.
"""
def check(self, repair=False):
# Find all of the branchs that are listed in the database
db_branches = {}
for branch in self.session.query(Branch):
db_branches[branch.name] = | branch
# Find all of the branches that are in the template king, this
# includes both domains and sandbox's
kingdir = self.config.get("broker", "kingdir")
out = run_git(['for-each-ref', '--format=%(refname:short)',
| 'refs/heads'], path=kingdir, loglevel=logging.DEBUG)
git_branches = set(out.splitlines())
# The trash branch is special
if self.config.has_option("broker", "trash_branch"):
git_branches.remove(self.config.get("broker", "trash_branch"))
# Branches in the database and not in the template-king
for branch in set(db_branches.keys()).difference(git_branches):
self.failure(branch, format(db_branches[branch]),
"found in the database but not in template-king")
# No repair mode. We consider AQDB more canonical than
# template-king, so we should not delete the DB object, and we don't
# have any information how to restore the branch in template-king.
# Branches in the template-king and not in the database
for branch in git_branches.difference(db_branches.keys()):
if repair:
self.logger.info("Deleting branch %s", branch)
merge_msg = []
merge_msg.append("Delete orphaned branch %s" % branch)
merge_msg.append("")
merge_msg.append("The consistency checker found this branch to be ")
merge_msg.append("orphaned.")
if self.config.has_option("broker", "trash_branch"):
merge_into_trash(self.config, self.logger, branch,
"\n".join(merge_msg),
loglevel=logging.DEBUG)
run_git(['branch', '-D', branch], path=kingdir,
loglevel=logging.DEBUG)
else:
self.failure(branch, "Branch %s" % branch,
"found in template-king but not in the database")
|
cameronmaske/skipper | skipper/utils.py | Python | bsd-2-clause | 1,012 | 0 | import re
def get_subset(a, keys):
return dict((k, a[k]) for k in keys if k in a)
def find(array, properties):
keys = properties.keys()
for a in array:
dict_a = | a
if hasattr(a, '__dict__'):
| dict_a = a.__dict__
subset = get_subset(dict_a, keys)
if subset == properties:
return a
return None
def missing_keys(a, b):
return [k for k in a.keys() if k not in b.keys()]
def get_index(x, index):
"""
Get the element at the index of the list or return None
>>> example = [1, 2]
>>> get_index(example, 1)
1
>>> get_index(example, 7)
None
"""
if len(x) > index:
return x[index]
def extract_version(event):
"""
Parses the output from docker --version for the version, e.g.
>>> extract_version("Docker version 0.11.1, build fb99f99")
"0.11.1"
"""
match = re.search(r'Docker version ([0-9\.]+),', event)
if match:
version = match.group(1)
return version
|
clusterpy/clusterpy | clusterpy/core/toolboxes/cluster/componentsAlg/areacl.py | Python | bsd-3-clause | 1,990 | 0.00201 | # encoding: latin2
"""Algorithm utilities
G{packagetree core}
"""
__author__ = "Juan C. Duque"
__credits__ = "Copyright (c) 2009-11 Juan C. Duque"
__license__ = "New BSD License"
__version__ = "1.0.0"
__maintainer__ = "RiSE Group"
__email__ = "contacto@rise-group.org"
from clusterpy.core.toolboxes.cluster.componentsAlg.distanceFunctions import distMethods
import numpy as np
class AreaCl:
"""
Area Class for Regional Clustering.
"""
def __init__(self, id, neighs, data, variance="false"):
"""
@type id: i | nteger
@param id: Id of the polygon/area
@type neighs: list
@param neighs: Neighborhood ids
@type da | ta: list.
@param data: Data releated to the area.
@type variance: boolean
@keyword variance: Boolean indicating if the data have variance matrix
"""
self.id = id
self.neighs = neighs
if variance == "false":
self.data = data
else:
n = (np.sqrt(9 + 8 * (len(data) - 1)) - 3) / 2
self.var = np.matrix(np.identity(n))
index = n + 1
for i in range(int(n)):
for j in range(i + 1):
self.var[i, j] = data[int(index)]
self.var[j, i] = data[int(index)]
index += 1
self.data = data[0: int(n + 1)]
def returnDistance2Area(self, otherArea, distanceType="EuclideanSquared", indexData=[]):
"""
Return the distance to `otherArea`
"""
y0 = []
y1 = []
if indexData:
for index in indexData:
y0 += [self.data[index]]
y1 += [otherArea.data[index]]
else:
y0 = self.data
y1 = otherArea.data
data = [y0] + [y1]
areaDistance = distMethods[distanceType](data)
try:
dist = areaDistance[0][0]
except:
dist = areaDistance[0]
return dist
|
Danielhiversen/home-assistant | homeassistant/components/snmp/sensor.py | Python | apache-2.0 | 6,642 | 0.000602 | """Support for displaying collected data over SNMP."""
from datetime import timedelta
import logging
import pysnmp.hlapi.asyncio as hlapi
from pysnmp.hlapi.asyncio import (
CommunityData,
ContextData,
ObjectIdentity,
ObjectType,
SnmpEngine,
UdpTransportTarget,
UsmUserData,
getCmd,
)
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
CONF_UNIT_OF_MEASUREMENT,
CONF_USERNAME,
CONF_VALUE_TEMPLATE,
STATE_UNKNOWN,
)
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_ACCEPT_ERRORS,
CONF_AUTH_KEY,
CONF_AUTH_PROTOCOL,
CONF_BASEOID,
CONF_COMMUNITY,
CONF_DEFAULT_VALUE,
CONF_PRIV_KEY,
CONF_PRIV_PROTOCOL,
CONF_VERSION,
DEFAULT_AUTH_PROTOCOL,
DEFAULT_COMMUNITY,
DEFAULT_HOST,
DEFAULT_NAME,
DEFAULT_PORT,
DEFAULT_PRIV_PROTOCOL,
DEFAULT_VERSION,
MAP_AUTH_PROTOCOLS,
MAP_PRIV_PROTOCOLS,
SNMP_VERSIONS,
)
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=10)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_BASEOID): cv.string,
vol.Optional(CONF_ACCEPT_ERRORS, default=False): cv.boolean,
vol.Optional(CONF_COMMUNITY, default=DEFAULT_COMMUNITY): cv.string,
vol.Optional(CONF_DEFAULT_VALUE): cv.string,
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_VERSION, default=DEFAULT_VERSION): vol.In(SNMP_VERSIONS),
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_AUTH_KEY): cv.string,
vol.Optional(CONF_AUTH_PROTOCOL, default=DEFAULT_AUTH_PROTOCOL): vol.In(
MAP_AUTH_PROTOCOLS
),
vol.Optional(CONF_PRIV_KEY): cv.string,
vol.Optional(CONF_PRIV_PROTOCOL, default=DEFAULT_PRIV_PROTOCOL): vol.In(
MAP_PRIV_PROTOCOLS
),
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the SNMP sensor."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
community = config.get(CONF_COMMUNITY)
baseoid = config.get(CONF_BASEOID)
unit = config.get(CONF_UNIT_OF_MEASUREMENT)
version = config.get(CONF_VERSION)
username = config.get(CONF_USERNAME)
authkey = config.get(CONF_AUTH_KEY)
authproto = config.get(CONF_AUTH_PROTOCOL)
privkey = config.get(CONF_PRIV_KEY)
privproto = config.get(CONF_PRIV_PROTOCOL)
accept_errors = config.get(CONF_ACCEPT_ERRORS)
default_value = config.get(CONF_DEFAULT_VALUE)
value_template = config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
if version == "3":
if not authkey:
authproto = "none"
if not privkey:
privproto = "none"
request_args = [
SnmpEngine(),
UsmUserData(
username,
authKey=authkey or None,
privKey=privkey or None,
authProtocol=getattr(hlapi, MAP_AUTH_PROTOCOLS[authproto]),
privProtocol=getattr(hlapi, MAP_PRIV_PROTOCOLS[privproto]),
),
UdpTransportTarget((host, port)),
ContextData(),
]
else:
request_args = [
SnmpEngine(),
CommunityData(community, mpModel=SNMP_VERSIONS[version]),
UdpTransportTarget((host, port)),
ContextData(),
]
errindication, _, _, _ = await getCmd(
*request_args, ObjectType(ObjectIdentity(baseoid))
)
if errindication and not accept_errors:
_LOGGER.error("Please check the details in the configuration file")
return
data = SnmpData(request_args, baseoid, accept_errors, default_value)
async_add_entities([SnmpSensor(data, name, unit, value_template)], True)
class SnmpSensor(SensorEntity):
"""Representation of a SNMP sensor."""
def __init__(self, data, name, unit_of_measurement, value_template):
"""Initialize the sensor."""
self.data = data
self._name = name
self._state = None
self._unit_of_measurement = unit_of_measurement
self._value_template = value_template
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def native_value(self):
"""Return the state of the sensor."""
return self._state
@property
def native_unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
async def async_update(self):
"""Get the latest data and updates the states."""
await self.data.async_update()
value = self.data.value
if value is None:
value = STATE_UNKNOWN
elif self._value_template is not None:
value = self._value_template.async_render_with_possible_json_value(
value, STATE_UNKNOWN
)
self._state = value
class SnmpData:
"""Get the latest data and update the states."""
def __init__(self, request_args, baseoid, accept_errors, default_value):
"""Initialize the data object."""
self._request_args = request_args
self._baseoid = baseoid
self._accept_errors = accept_errors
self._default_value = default_value
self.value = None
async def async_update(self):
"""Get the latest data from the remote SNMP capable host."""
errindication, errstatus, errindex, restable = await getCmd(
| *self._request_args, ObjectType(ObjectIdentity(self._baseoid))
)
if errindication and not self._accept_errors:
_LOGGER.error("SNMP error: %s", errindication)
elif errstatus and not self._accept_errors:
_LOGGER.error(
"SNMP error: %s at %s",
errstatus.prettyPrint(),
errindex and restable[-1][int(errindex) - 1] or "?",
| )
elif (errindication or errstatus) and self._accept_errors:
self.value = self._default_value
else:
for resrow in restable:
self.value = resrow[-1].prettyPrint()
|
hunsteve/AIQGen | urls.py | Python | mit | 2,114 | 0.005676 | from django.conf.urls import url
from AIQGen.views.index import index
from AIQGen.views.astar import astar
from AIQGen.views.minimax import minimax
from AIQGen.views.printview import testPrintView
from AIQGen.views.problemprintview import problemPrintView
from AIQGen.views.test import testList, testCreate, testUpdate, testDelete, testProblemList, testProblemRemove, testProblemAdd
from AIQGen.views.problem import problemList, problemCreate, problemUpdate, problemDelete, problemSelect
from AIQGen.views.upload import upload
from AIQGen.whovisitedme import whovisitedme, listlog
urlpatterns = [
url(r'^$', index, name='index'),
url(r'astar', astar, name='astar'),
url(r'minimax', minimax, name='minimax'),
url(r'^testprintview/(?P<pk>\d+)$', testPrintView, name='test_printview'),
url(r'^problemprintview/(?P<pk>\d+)$', problemPrintView, name='problem_printview'),
url(r'^tests$', testList, name='test_list'),
url(r'^testnew$', testCreate, name='test_new'),
url(r'^testedit/(?P<pk>\d+)$', testUpdate, name='test_edit'),
url(r'^testdelete/(?P<pk>\d+)$', testDelete, name='test_d | elete'),
url(r' | ^testproblemlist/(?P<pk>\d+)$', testProblemList, name='test_problem_list'),
url(r'^testproblemremove/(?P<pk>\d+)$', testProblemRemove, name='test_problem_remove'),
url(r'^testproblemadd/(?P<test_key>\d+)/(?P<problem_key>\d+)$', testProblemAdd, name='test_problem_add'),
url(r'^problems$', problemList, name='problem_list'),
url(r'^problemselect/(?P<test_key>\d+)$', problemSelect, name='problem_select'),
url(r'^problemnew$', problemCreate, name='problem_new'),
url(r'^problemnew/(?P<test_key>\d+)$', problemCreate, name='problem_new'),
url(r'^problemedit/(?P<pk>\d+)$', problemUpdate, name='problem_edit'),
url(r'^problemedit/(?P<pk>\d+)/(?P<test_key>\d+)$', problemUpdate, name='problem_edit'),
url(r'^problemdelete/(?P<pk>\d+)$', problemDelete, name='problem_delete'),
url(r'^upload$', upload, name='upload'),
url(r'^log$', whovisitedme, name='whovisitedme'),
url(r'^listlog$', listlog, name='listlog'),
]
|
wholland/env | vim/runtime/bundle/ultisnips/plugin/UltiSnips/tests/test_diff.py | Python | mit | 4,789 | 0.006264 | #!/usr/bin/env python
# encoding: utf-8
import unittest
import os.path as p, sys; sys.path.append(p.join(p.dirname(__file__), ".."))
from _diff import diff, guess_edit
from geometry import Position
def transform(a, cmds):
buf = a.split("\n")
for cmd in cmds:
ctype, line, col, char = cmd
if ctype == "D":
if char != '\n':
buf[line] = buf[line][:col] + buf[line][col+len(char):]
else:
buf[line] = buf[line] + buf[line+1]
del buf[line+1]
elif ctype == "I":
buf[line] = buf[line][:col] + char + buf[line][col:]
buf = '\n'.join(buf).split('\n')
return '\n'.join(buf)
import unittest
# Test Guessing {{{
class _BaseGuessing(object):
def runTest(self):
rv, es = guess_edit(self.initial_line, self.a, self.b, Position(*self.ppos), Position(*self.pos))
self.assertEqual(rv, True)
self.assertEqual(self.wanted, es)
class TestGuessing_Noop0(_BaseGuessing, unittest.TestCase):
a, b = [], []
initial_line = 0
ppos, pos = (0, 6), (0, 7)
wanted = ()
class TestGuessing_InsertOneChar(_BaseGuessing, unittest.TestCase):
a, b = ["Hello World"], ["Hello World"]
initial_line = 0
ppos, pos = (0, 6), (0, 7)
wanted = (
("I", 0, 6, " "),
)
class TestGuessing_InsertOneChar1(_BaseGuessing, unittest.TestCase):
a, b = ["Hello World"], ["Hello World"]
initial_line = 0
ppos, pos = (0, 7), (0, 8)
wanted = (
("I", 0, 7, " "),
)
class TestGuessing_BackspaceOneChar(_BaseGuessing, unittest.TestCase):
a, b = ["Hello World"], ["Hello World"]
initial_line = 0
ppos, pos = (0, 7), (0, 6)
wanted = (
("D", 0, 6, " "),
)
class TestGuessing_DeleteOneChar(_BaseGuessing, unittest.TestCase):
a, b = ["Hello World"], ["Hello World"]
initial_line = 0
ppos, pos = (0, 5), (0, 5)
wanted = (
("D", 0, 5, " "),
)
# End: Test Guessing }}}
class _Base(object):
def runTest(self):
es = diff(self.a, self.b)
tr = transform(self.a, es)
self.assertEqual(self.b, tr)
self.assertEqual(self.wanted, es)
class TestEmptyString(_Base, unittest.TestCase):
a, b = "", ""
wanted = ()
class TestAllMatch(_Base, unittest.TestCase):
a, b = "abcdef", "abcdef"
wanted = ()
class TestLotsaNewlines(_Base, unittest.TestCase):
a, b = "Hello", "Hello\nWorld\nWorld\nWorld"
| wanted = (
("I", 0, 5, "\n"),
("I", 1, 0, "World"),
("I", 1, 5, "\n"),
("I", 2, 0, "World"),
("I", 2, 5, "\n"),
("I", 3, 0, "World"),
)
class TestCrash(_Base, unittest.TestCase):
a = 'hallo Blah mitte=sdfdsfsd\nhallo kjsdhfjksdhfkjhsdfkh mittekjshdkfhkhsdfdsf'
b = 'hallo Blah mitte=sdfdsfsd\nhall | o b mittekjshdkfhkhsdfdsf'
wanted = (
("D", 1, 6, "kjsdhfjksdhfkjhsdfkh"),
("I", 1, 6, "b"),
)
class TestRealLife(_Base, unittest.TestCase):
a = 'hallo End Beginning'
b = 'hallo End t'
wanted = (
("D", 0, 10, "Beginning"),
("I", 0, 10, "t"),
)
class TestRealLife1(_Base, unittest.TestCase):
a = 'Vorne hallo Hinten'
b = 'Vorne hallo Hinten'
wanted = (
("I", 0, 11, " "),
)
class TestWithNewline(_Base, unittest.TestCase):
a = 'First Line\nSecond Line'
b = 'n'
wanted = (
("D", 0, 0, "First Line"),
("D", 0, 0, "\n"),
("D", 0, 0, "Second Line"),
("I", 0, 0, "n"),
)
class TestCheapDelete(_Base, unittest.TestCase):
a = 'Vorne hallo Hinten'
b = 'Vorne Hinten'
wanted = (
("D", 0, 5, " hallo"),
)
class TestNoSubstring(_Base, unittest.TestCase):
a,b = "abc", "def"
wanted = (
("D", 0, 0, "abc"),
("I", 0, 0, "def"),
)
class TestCommonCharacters(_Base, unittest.TestCase):
a,b = "hasomelongertextbl", "hol"
wanted = (
("D", 0, 1, "asomelongertextb"),
("I", 0, 1, "o"),
)
class TestUltiSnipsProblem(_Base, unittest.TestCase):
a = "this is it this is it this is it"
b = "this is it a this is it"
wanted = (
("D", 0, 11, "this is it"),
("I", 0, 11, "a"),
)
class MatchIsTooCheap(_Base, unittest.TestCase):
a = "stdin.h"
b = "s"
wanted = (
("D", 0, 1, "tdin.h"),
)
class MultiLine(_Base, unittest.TestCase):
a = "hi first line\nsecond line first line\nsecond line world"
b = "hi first line\nsecond line k world"
wanted = (
("D", 1, 12, "first line"),
("D", 1, 12, "\n"),
("D", 1, 12, "second line"),
("I", 1, 12, "k"),
)
if __name__ == '__main__':
unittest.main()
# k = TestEditScript()
# unittest.TextTestRunner().run(k)
|
UNINETT/nav | python/nav/web/portadmin/forms.py | Python | gpl-2.0 | 1,575 | 0 | #
# Copyright (C) 2014 Uninett AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 3 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Forms for PortAdmin"""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms_foundation.layout import Layout, Row, Column, Submit
class SearchForm(forms.Form):
"""Form for searching for ip-devices and interfaces"""
query = forms.CharField(
label='',
widget=forms.TextInput(
attrs={'placeholder': 'Search for ip device or interface'}))
def __init__(self, *args, **kwargs):
super(SearchForm, self).__init__(*args, **kwargs)
self.helper = | FormHelper()
self.helper.fo | rm_action = 'portadmin-index'
self.helper.form_method = 'GET'
self.helper.layout = Layout(
Row(
Column('query', css_class='medium-9'),
Column(Submit('submit', 'Search', css_class='postfix'),
css_class='medium-3'),
css_class='collapse'
)
)
|
alexandonian/ptutils | tests/_test_config.py | Python | mit | 600 | 0.008333 | import re
import sys
import yaml
import pprint
import torch.nn as nn
import torch.optim as optim
sys.path.insert(0, '.')
sys.path.insert(0, '..')
# from ptutils.model import AlexNet
from ptutils.session import Session
from ptutils.utils import frozendict
# from ptutils.module import Mod | ule, State, Configuration
from ptutils.base import Module, Configurati | on
from ptutils.database import DBInterface, MongoInterface
from ptutils.data import ImageNetProvider, ImageNet, HDF5DataReader
CONFIG_FILE = 'resources/config.yml'
c = Configuration(CONFIG_FILE)
sess = Session(c)
# print(c)
# print(sess)
|
ShenggaoZhu/django-sortedone2many | sortedone2many/utils.py | Python | mit | 1,509 | 0.002651 | # -*- coding: utf-8 -*-
from django.utils import six
from sortedone2many.fields import SortedOneToManyField
def inject_extra_field_to_model(from_model, field_name, field):
if not isinstance(from_model, six.string_types):
field.contribute_to_class(from_model, field_name)
return
raise Exception('from_model must be a Model Class')
# app_label, model_name = from_model.split('.')
# from django.apps import apps
# try:
# from_model_cls = apps.get_registered_model(app_label, model_name)
# field.contribute_to_class(from_model_cls, field_name)
# except:
# from django.db.models.signals import class_prepared
# def add_field(sender, **kwargs):
# if sender.__name__ == model_name and sender._meta.app_label == app_label:
# | field.contribute_to_class(sender, field_name)
# # TODO: `add_field` is never called. `class_prepared` already fired or never fire??
# class_prepared.connect(add_field)
def add_sorted_one2many_relation(model_one,
model_many,
field_name_on_model_one=None,
related_name_on_model_many=No | ne):
field_name = field_name_on_model_one or model_many._meta.model_name + '_set'
related_name = related_name_on_model_many or model_one._meta.model_name
field = SortedOneToManyField(model_many, related_name=related_name)
field.contribute_to_class(model_one, field_name)
|
ESSolutions/ESSArch_Core | ESSArch_Core/essxml/ProfileMaker/migrations/0010_auto_20170808_1114.py | Python | gpl-3.0 | 573 | 0 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-08-08 09:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ProfileMaker', '0009_auto_20161020_0959'),
]
operations = [
| migrations.AddField(
model_name='extensionpackage',
name='nsmap',
field=models.JSONField(default={}),
),
migrations.AddField(
model_name='templatepackage',
name='nsmap',
field=models.JSO | NField(default={}),
),
]
|
reyoung/Paddle | python/paddle/fluid/tests/unittests/test_dropout_op.py | Python | apache-2.0 | 5,697 | 0 | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid.core as core
from op_test import OpTest
class TestDropoutOp(OpTest):
def setUp(self):
self.op_type = "dropout"
self.inputs = {'X': np.random.random((32, 64)).astype("float32")}
self.attrs = {'dropout_prob': 0.0, 'fix_seed': True, 'is_test': False}
self.outputs = {
'Out': self.inputs['X'],
'Mask': np.ones((32, 64)).astype('float32')
}
def test_check_output(self):
self.check_output()
def test_check_grad_normal(self):
self.check_grad(['X'], 'Out', max_relative_error=0.05)
class TestDropoutOp2(TestDropoutOp):
def setUp(self):
self.op_type = "dropout"
self.inputs = {'X': np.random.random((32, 64)).astype("float32")}
self.attrs = {'dropout_prob': 1.0, 'fix_seed': True, 'is_test': False}
self.outputs = {
'Out': np.zeros((32, 64)).astype('float32'),
'Mask': np.zeros((32, 64)).astype('float32')
}
class TestDropoutOp3(TestDropoutOp):
def setUp(self):
self.op_type = "dropout"
self.inputs = {'X': np.random.random((32, 64, 2)).astype("float32")}
self.attrs = {'dropout_prob': 0.0, 'fix_seed': True, 'is_test': False}
self.outputs = {
'Out': self.inputs['X'],
'Mask': np.ones((32, 64, 2)).astype('float32')
}
class TestDropoutOp4(OpTest):
def setUp(self):
self.op_type = "dropout"
self.inputs = {'X': np.random.random((32, 64)).astype("float32")}
self.attrs = {'dropout_prob': 0.35, 'fix_seed': True, 'is_test': True}
self.outputs = {
'Out': self.inputs['X'] * (1.0 - self.attrs['dropout_prob'])
}
def test_check_output(self):
self.check_output()
class TestDropoutOp5(OpTest):
def setUp(self):
self.op_type = "dropout"
self.inputs = {'X': np.random.random((32, 64, 3)).astype("float32")}
self.attrs = {'dropout_prob': 0.75, 'is_test': True}
self.outputs = {
'Out': self.inputs['X'] * (1.0 - self.attrs['dropout_prob'])
}
def test_check_output(self):
self.check_output()
class TestDropoutOp6(TestDropoutOp):
def setUp(self):
self.op_type = "dropout"
self.inputs = {'X': np.random.random((32, 64)).astype("float32")}
self.attrs = {
'dropout_prob': 1.0,
'fix_seed': True,
'is_test': False,
'dropout_implementation': 'upscale_in_train'
}
self.outputs = {
'Out': np.zeros((32, 64)).astype('float32'),
'Mask': np.zeros((32, 64)).astype('float32')
}
class TestDropoutOp7(TestD | ropoutOp):
def setUp(self):
self.op_type = "dropout"
self.inputs = {'X': np.random.random((32, 64, 2 | )).astype("float32")}
self.attrs = {
'dropout_prob': 0.0,
'fix_seed': True,
'is_test': False,
'dropout_implementation': 'upscale_in_train'
}
self.outputs = {
'Out': self.inputs['X'],
'Mask': np.ones((32, 64, 2)).astype('float32')
}
class TestDropoutOp8(OpTest):
def setUp(self):
self.op_type = "dropout"
self.inputs = {'X': np.random.random((32, 64)).astype("float32")}
self.attrs = {
'dropout_prob': 0.35,
'fix_seed': True,
'is_test': True,
'dropout_implementation': 'upscale_in_train'
}
self.outputs = {'Out': self.inputs['X']}
def test_check_output(self):
self.check_output()
class TestDropoutOp9(OpTest):
def setUp(self):
self.op_type = "dropout"
self.inputs = {'X': np.random.random((32, 64, 3)).astype("float32")}
self.attrs = {
'dropout_prob': 0.75,
'is_test': True,
'dropout_implementation': 'upscale_in_train'
}
self.outputs = {'Out': self.inputs['X']}
def test_check_output(self):
self.check_output()
class TestFP16DropoutOp(OpTest):
def setUp(self):
self.op_type = "dropout"
self.init_test_case()
x = np.random.random(self.input_size).astype("float16")
out = x * (1.0 - self.prob)
self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(x)}
self.attrs = {
'dropout_prob': self.prob,
'fix_seed': self.fix_seed,
'is_test': True
}
self.outputs = {'Out': out}
def init_test_case(self):
self.input_size = [32, 64]
self.prob = 0.35
self.fix_seed = True
def test_check_output(self):
if core.is_compiled_with_cuda() and core.op_support_gpu("dropout"):
self.check_output_with_place(core.CUDAPlace(0), atol=1e-3)
class TestFP16DropoutOp2(TestFP16DropoutOp):
def init_test_case(self):
self.input_size = [32, 64, 3]
self.prob = 0.75
self.fix_seed = False
if __name__ == '__main__':
unittest.main()
|
sderenth/Scrapy-Draft-Data | RealGM/PlayerGameLogs.py | Python | mit | 3,667 | 0.00709 | import scrapy
import psycopg2
from bs4 import BeautifulSoup
try:
conn = psycopg2.connect(database='my_database', user='user_name', password='password', host='localhost')
print("Connected Foo!!")
except:
print("I am unable to connect to the database.")
cur = conn.cursor()
cur.execute("""SELECT playerid, season FROM prenbastats2003to2012 ORDER BY playerid, season;""")
player_seasons = cur.fetchall()
player_season_urls = []
for player_season in player_seasons:
name = ''.join([i for i in player_season[0] if not i.isdigit()])
num = ''.join([i for i in player_season[0] if i.isdigit()])
url = 'http://basketball.realgm.com/player/' + name + '/GameLogs/' + num + '/NCAA/' + str(player_season[1])
player_season_urls.append(url)
class RealGMSpider(scrapy.Spider):
name = "RealGMGameLogs"
start_urls = player_season_urls
def parse(self, response):
soup = BeautifulSoup(response.text, "lxml")
data_rows = soup.tbody.find_all('tr')
game_data = [[td.getText() for td in data_rows[i].findAll('td')]
for i in range(len(data_rows))]
season = int(response._url.split('/')[-1])
for game in game_data:
game.insert(0, season)
table_name = response._url.split('/')[-5] + response._url.split('/')[-3]
cur.execute("SELECT EXISTS(SELECT * FROM information_schema.tables WHERE table_name = '" + table_name + "');")
if cur.fetchone()[0] == False:
cur.execute("""CREATE TABLE \"""" + table_name + """\" (
"Season" int,
"Date" text,
"Team" text,
"Opponent" text,
"W/L" text,
"Status" text,
"Pos" text,
"MIN" text | ,
"FGM" int,
"FGA" int,
"FG%" float,
"3PM" int,
"3PA" int,
"3P%" float,
"FTM" int, |
"FTA" int,
"FT%" float,
"ORB" int,
"DRB" int,
"REB" int,
"AST" int,
"STL" int,
"BLK" int,
"PTS" int,
"FIC" float,
"PF" int,
"TOV" int,
id serial PRIMARY key);""")
for game in game_data:
gameTuple = tuple(game)
cur.execute("""INSERT INTO \"""" + table_name + """\" VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)""", gameTuple)
conn.commit()
else:
for game in game_data:
gameTuple = tuple(game)
cur.execute("""INSERT INTO \"""" + table_name + """\" VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)""", gameTuple)
conn.commit()
|
stianrh/askbot-nordic | askbot/migrations/0014_rename_schema_from_forum_to_askbot.py | Python | gpl-3.0 | 55,671 | 0.008083 | # encoding: utf-8
import os
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
app_dir_name = os.path.basename(os.path.dirname(os.path.dirname(__file__)))
class Migration(SchemaMigration):
def forwards(self, orm):
if app_dir_name == 'forum':
try:
db.rename_table('forum_anonymousanswer', 'askbot_anonymousanswer')
db.rename_table('forum_anonymousquestion', 'askbot_anonymousquestion')
db.rename_table('forum_emailfeedsetting', 'askbot_emailfeedsetting')
db.rename_table('forum_markedtag', 'askbot_markedtag')
db.rename_table('forum_questionview', 'askbot_questionview')
db.rename_table('forum_validationhash', 'askbot_validationhash')
except:
pass
def backwards(self, orm):
if app_dir_name == 'forum':
db.rename_table('askbot_anonymousanswer', 'forum_anonymousanswer')
db.rename_table('askbot_anonymousquestion', 'forum_anonymousquestion')
db.rename_table('askbot_emailfeedsetting', 'forum_emailfeedsetting')
db.rename_table('askbot_markedtag', 'forum_markedtag')
db.rename_table('askbot_questionview', 'forum_questionview')
db.rename_table('askbot_validationhash', 'forum_validationhash')
if app_dir_name == 'forum':
models = {
'forum.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'forum.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['forum.Question']"}),
'session_key': ('django.db.models.fields.Cha | rField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'forum.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
| 'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'forum.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['forum.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'forum.answerrevision': {
'Meta': {'object_name': 'AnswerRevision', 'db_table': "u'answer_revision'"},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['forum.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answerrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models |
mrquim/mrquimrepo | repo/plugin.video.salts/scrapers/treasureen_scraper.py | Python | gpl-2.0 | 3,727 | 0.004293 | """
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import urlparse
import kodi
import log_utils # @UnusedImport
import dom_parser2
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import VIDEO_TYPES
from salts_lib.utils2 import i18n
import scraper
BASE_URL = 'http://treasureen.com'
class Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@class | method
def provides(cls):
return frozenset([VIDEO_TYPES.MOVIE])
@classmethod
def get_name(cls):
return 'treasureen'
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
if not source_url or source_url == FORCE_NO_MATCH: return hosters
url = scraper_utils.urljoin(self.base_url, source_url)
html = self._http_get(url, require_debrid=False, cache_limit=.5)
title = dom_parser2.parse_dom(html, 'meta', {'prop | erty': 'og:title'}, req='content')
meta = scraper_utils.parse_movie_link(title[0].attrs['content']) if title else {}
fragment = dom_parser2.parse_dom(html, 'p', {'class': 'download_message'})
if fragment:
for attrs, _content in dom_parser2.parse_dom(fragment[0].content, 'a', req='href'):
source = attrs['href']
if scraper_utils.excluded_link(source): continue
host = urlparse.urlparse(source).hostname
quality = scraper_utils.height_get_quality(meta.get('height', 480))
hoster = {'multi-part': False, 'host': host, 'class': self, 'views': None, 'url': source, 'rating': None, 'quality': quality, 'direct': True}
if 'format' in meta: hoster['format'] = meta['format']
hosters.append(hoster)
return hosters
def get_url(self, video):
return self._blog_get_url(video)
@classmethod
def get_settings(cls):
settings = super(cls, cls).get_settings()
settings = scraper_utils.disable_sub_check(settings)
name = cls.get_name()
settings.append(' <setting id="%s-filter" type="slider" range="0,180" option="int" label=" %s" default="30" visible="eq(-3,true)"/>' % (name, i18n('filter_results_days')))
settings.append(' <setting id="%s-select" type="enum" label=" %s" lvalues="30636|30637" default="0" visible="eq(-4,true)"/>' % (name, i18n('auto_select')))
return settings
def search(self, video_type, title, year, season=''): # @UnusedVariable
html = self._http_get(self.base_url, params={'s': title}, require_debrid=False, cache_limit=8)
post_pattern = 'class="post-thumbnail">\s*<a[^>]+href="(?P<url>[^"]+)[^>]*[^>]+title="(?P<post_title>[^"]+).*?datetime="(?P<date>[^"]+)'
date_format = '%Y-%m-%d %H:%M:%S'
return self._blog_proc_results(html, post_pattern, date_format, video_type, title, year)
|
jbalogh/zamboni | apps/paypal/__init__.py | Python | bsd-3-clause | 10,184 | 0.000295 | import contextlib
import socket
import urllib
import urllib2
import urlparse
import re
from django.conf import settings
from django.utils.http import urlencode
import commonware.log
from statsd import statsd
from amo.helpers import absolutify
from amo.urlresolvers import reverse
class PaypalError(Exception):
id = None
class AuthError(PaypalError):
pass
errors = {'520003': AuthError}
paypal_log = commonware.log.getLogger('z.paypal')
def should_ignore_paypal():
"""
Returns whether to skip PayPal communications for development
purposes or not.
"""
return settings.DEBUG and 'sandbox' not in settings.PAYPAL_PERMISSIONS_URL
def get_paykey(data):
"""
Gets a paykey from Paypal. Need to pass in the following in data:
pattern: the reverse pattern to resolve
email: who the money is going to (required)
amount: the amount of money (required)
ip: ip address of end user (required)
uuid: contribution_uuid (required)
memo: any nice message
qs: anything you want to append to the complete or cancel(optional)
"""
complete = reverse(data['pattern'], args=[data['slug'], 'complete'])
cancel = reverse(data['pattern'], args=[data['slug'], 'cancel'])
qs = {'uuid': data['uuid']}
if 'qs' in data:
qs.update(data['qs'])
uuid_qs = urllib.urlencode(qs)
paypal_data = {
'actionType': 'PAY',
'requestEnvelope.errorLanguage': 'US',
'currencyCode': 'USD',
'cancelUrl': absolutify('%s?%s' % (cancel, uuid_qs)),
'returnUrl': absolutify('%s?%s' % (complete, uuid_qs)),
'receiverList.receiver(0).email': data['email'],
'receiverList.receiver(0).amount': data['amount'],
'receiverList.receiver(0).invoiceID': 'mozilla-%s' % data['uuid'],
'receiverList.receiver(0).primary': 'TRUE',
'receiverList.receiver(0).paymentType': 'DIGITALGOODS',
'trackingId': data['uuid']}
if data.get('ipn', True):
paypal_data['ipnNotificationUrl'] = absolutify(reverse('amo.paypal'))
if data.get('memo'):
paypal_data['memo'] = data['memo']
with statsd.timer('paypal.paykey.retrieval'):
try:
response = _call(settings.PAYPAL_PAY_URL + 'Pay', paypal_data,
ip=data['ip'])
except AuthError, error:
paypal_log.error('Authentication error: %s' % error)
raise
return response['payKey']
def check_purchase(paykey):
"""
When a purchase is complete checks paypal that the purchase has gone
through.
"""
with statsd.timer('paypal.payment.details'):
try:
response = _call(settings.PAYPAL_PAY_URL + 'PaymentDetails',
{'payKey': paykey})
except PaypalError:
paypal_log.error('Payment details error', exc_info=True)
return False
return response['status']
def refund(txnid):
"""
Refund a payment.
Arguments: transaction id of payment to refund
Returns: A list of dicts containing the refund info for each
receiver of the original payment.
"""
OK_STATUSES = ['REFUNDED', 'REFUNDED_PENDING']
with statsd.timer('paypal.payment.refund'):
try:
response = _call(settings.PAYPAL_PAY_URL + 'Refund',
{'transactionID': txnid})
except PaypalError:
paypal_log.error('Refund error', exc_info=True)
raise
responses = []
for k in response:
g = re.match('refundInfoList.refundInfo\((\d+)\).(.*)', k)
if g:
i = int(g.group(1))
subkey = g.group(2)
while i >= len(responses):
responses.append({})
responses[i][subkey] = response[k]
for d in responses:
status = '%s: %s' % (d['receiver.email'], d['refundStatus'])
if d['refundStatus'] not in OK_STATUSES:
raise PaypalError('Bad refund status for %s' % status)
paypal_log.debug('Refund done for transaction %s, status: %s'
% (txnid, status))
return responses
def check_refund_permission(token):
"""
Asks PayPal whether the PayPal ID for this account has granted
refund permission to us.
"""
# This is set in settings_test so we don't start calling PayPal
# by accident. Explicitly set this in your tests.
if not settings.PAYPAL_PERMISSIONS_URL:
return False
paypal_log.debug('Checking refund permission for token: %s..'
% token[:10])
try:
with statsd.timer('paypal.permissions.refund'):
r = _call(settings.PAYPAL_PERMISSIONS_URL + 'GetPermissions',
{'token': token})
except PaypalError, error:
paypal_log.debug('Paypal returned error for token: %s.. error: %s'
% (token[:10], error))
return False
# in the future we may ask for other permissions so let's just
# make sure REFUND is one of them.
paypal_log.debug('Paypal returned permissions for token: %s.. perms: %s'
% (token[:10], r))
return 'REFUND' in [v for (k, v) in r.iteritems()
if k.startswith('scope')]
def refund_permission_url(addon):
"""
Send permissions request to PayPal for refund privileges on
this addon's paypal account. Returns URL on PayPal site to visit.
"""
# This is set in settings_test so we don't start calling PayPal
# by accident. Explicitly set this in your tests.
if not settings.PAYPAL_PERMISSIONS_URL:
return ''
paypal_log.debug('Getting refund permission URL for addon: %s' % addon.pk)
with statsd.timer('paypal.permissions.url'):
url = reverse('devhub.addons.acquire_refund_permission',
args=[addon.slug])
try:
r = _call(settings.PAYPAL_PERMISSIONS_URL + 'RequestPermissions',
{'scope': 'REFUND', 'callback': absolutify(url)})
except PaypalError, e:
paypal_log.debug('Error on refund permission URL addon: %s, %s' %
(addon.pk, e))
if 'malformed' in str(e):
# PayPal is very picky about where they redirect users to.
# If you try and create a PayPal permissions URL on a
# zamboni that has a non-standard port number or a
# non-standard TLD, it will blow up with an error. We need
# to be able to at least visit these pages and alter them
# in dev, so this will give you a broken token that doesn't
# work, but at least the page will function.
r = {'token': 'wont-work-paypal-doesnt-like-your-domain'}
else:
raise
return (settings.PAYPAL_CGI_URL +
'?cmd=_grant-permission&request_token=%s' % r['token'])
def get_permissions_token(request_token, verification_code):
"""
Send request for permissions token, after user has granted the
requested permissions via the PayPal page we redirected them to.
"""
with statsd.timer('paypal.permissions.token'):
r = _call(settings.PAYPAL_PERMISSIONS_URL + 'GetAccessToken',
{'token' | : request_token, 'verifier': verification_code})
return r['token']
def _call(url, paypal_data, ip=None):
request = urllib2.Request(url)
if 'requestEnvelope.errorLanguage' not in paypal_data:
paypal_data['requestEnvelope.errorLanguage'] = 'en_US'
for key, value in [
('security-userid | ', settings.PAYPAL_EMBEDDED_AUTH['USER']),
('security-password', settings.PAYPAL_EMBEDDED_AUTH['PASSWORD']),
('security-signature', settings.PAYPAL_EMBEDDED_AUTH['SIGNATURE']),
('application-id', settings.PAYPAL_APP_ID),
('request-data-format', 'NV'),
('response-data-format', 'NV')]:
request.add_header('X-PAYPAL-%s' % key.upper(), value)
if ip:
request.add_header('X-PAYPAL-DEVICE-IPADDRESS', ip)
opener = urllib2.b |
cnbeining/onedrivecmd | onedrivecmd/utils/session.py | Python | agpl-3.0 | 8,409 | 0.010703 | #!/usr/bin/env python
#coding:utf-8
# Author: Beining --<i@cnbeining.com>
# Purpose: Session helper for onedrivecmd
# Created: 09/24/2016
import onedrivesdk
import logging
import json
from time import time
try:
from static import *
from helper_file import *
except ImportError:
from .static import *
from .helper_file import *
### Session
def get_access_token(client):
"""OneDriveClient->str
Get the access token that shall be used with all the request that
would require authorization.
This is just a helper function to assist with self-defined
downloading and uploading.
"""
return str(client.auth_provider.access_token)
def refresh_token(client):
"""OneDriveClient->OneDriveClient
Refresh token of the client.
The default expire time of one token is 3600 secs.
"""
client.auth_provider.refresh_token()
return
def token_time_to_live(client):
"""OneDriveClient->int
Get the expiration time of token in sec.
We have to make sure the token is available.
"""
return int(client.auth_provider._session._expires_at - time())
## Make our own even worse Session
def save_session(client, path = ''):
"""Client, str->None
Save the current status to a JSON file
so can be loaded later on to resume the
current status.
Compared to pickle,
save whether the client is Business or personal account,
and if Business, save its API endpoint
so we can save 1 API call to retrive the endpoint.
The session JSON file is as important as the user's password.
"""
if client.base_url == 'https://api.onedrive.com/v1.0/':
# Normal
status_dict = {
'is_business': False,
'client_id': client.auth_provider._client_id,
'client.base_url': client.base_url, #'https://api.onedrive.com/v1.0/'
'client.auth_provider.auth_token_url': client.auth_provider.auth_token_url, #'https://login.live.com/oauth20_token.srf'
'client.auth_provider.auth_server_url': client.auth_provider.auth_server_url, #'https://login.live.com/oauth20_authorize.srf'
'client.auth_provider.scopes': client.auth_provider.scopes,
}
status_dict['client.auth_provider._session'] = dict_merge(client.auth_provider._session.__dict__,
{'_expires_at': int(client.auth_provider._session._expires_at),
'scope_string': ' '.join([str(i) for i in client.auth_provider._session.scope]),
})
else:
# Business/office 365
status_dict = {
'is_business': True,
'client_id': client.auth_provider._client_id,
'client.base_url': client.base_url, #'https://{.....}.sharepoint.com/_api/v2.0/'
'client.auth_provider.auth_token_url': client.auth_provider.auth_token_url, #'https://login.microsoftonline.com/common/oauth2/token'
'client.auth_provider.auth_server_url': client.auth_provider.auth_server_url[0], #'https://login.microsoftonline.com/common/oauth2/authorize'
'client.auth_provider.scopes': client.auth_provider.scopes, # empty for business
}
status_dict['client.auth_provider._session'] = dict_merge(client.auth_provider._session.__dict__,
{'_expires_at': int(client.auth_provider._session._expires_at),
'scope_string': ' '.join([str(i) for i in client.auth_provider._session.scope]),
})
status = json.dumps(status_dict)
with open(path, "w+") as session_file:
session_file.write(status)
return
def load_session(client, path = ''):
"""str->Client
Load a new client from the saved status file.
"""
## helper: making a Session from dict we get from session file
# main entrance of function to come after this function
def make_session_from_dict(status_dict):
return onedrivesdk.auth_provider.Session(status_dict['client.auth_provider._session']['token_type'],
status_dict['client.auth_provider._session']['_expires_at'] - time(),
status_dict['client.auth_provider._session']['scope_string'],
| status_dict['client.auth_provider._session']['access_token'],
status_dict['client.auth_provider._session']['client_id'],
status_dict['client.auth_provider._session']['auth_server_url'],
status_dict['client.auth_provider._session']['redirect_uri'],
refresh_token=status_dict['client.auth_provider._session']['refresh_token'],
client_secret=status_dict['client.auth_provider._session']['client_secret'])
## start of function
## Read Session file
try:
with open(path, 'r') as session_file:
status_dict = json.loads(session_file.read())
except IOError as e:
# file not exist or some other problems...
logging.fatal(e.strerror)
logging.fatal('Cannot read the session file!')
exit() #have to die now, or what else can we do?
## deterime type of account, run different logics
# Business
if status_dict['is_business']:
# mock http and auth
http_provider = onedrivesdk.HttpProvider()
auth_provider = onedrivesdk.AuthProvider(http_provider,
client_id_business,
auth_server_url=status_dict['client.auth_provider.auth_server_url'],
auth_token_url=status_dict['client.auth_provider.auth_token_url'])
else:
# personal
http_provider = onedrivesdk.HttpProvider()
auth_provider = onedrivesdk.AuthProvider(
http_provider=http_provider,
client_id=status_dict['client_id'],
scopes=scopes)
## inject a Session in
auth_provider._session = make_session_from_dict(status_dict)
auth_provider.refresh_token()
## put API endpoint in
return onedrivesdk.OneDriveClient(status_dict['client.base_url'], auth_provider, http_provider)
if __name__=='__main__':
pass
'''
The old way that save the whole session in a pickle file.
Replaced by saving more information in JSON
in order to know whether is Business account and its API endpoint.
# def save_session(client, path = ''):
# """OneDriveClient, str->None
# Save the session info in a pickle file.
# Not safe, but whatever.
# """
# client.auth_provider.save_session(path = path)
# return
# def load_session(client, path = ''):
# """str->OneDriveClient
# Determine whether the session is a normal or Business one,
# load a session from the storaged pickle,
# then refresh so the session is available to use immediately.
# """
# if not os.path.isfile(path):
# logging.error('Session dump path does not exist')
# raise Exception
# # look inside the pickle to determine whether is normal or Business
# session_standalone =onedrivesdk.auth_provider.Session.load_session(path = path)
# if session_standalone.auth_server_url == 'https://login.microsoftonline.com/common/oauth2/token':
# # Business
# http = onedrivesdk.HttpProvider()
# auth = onedrivesdk.AuthProvider(http,
# client_id_business ,
# auth_server_url=auth_server_url,
| |
kerneltask/micropython | ports/cc3200/tools/smoke.py | Python | mit | 1,883 | 0.000531 | from machine import Pin
from machine import RTC
import time
import os
"""
Execute it like this:
python3 run-tests --target wipy --device 192.168.1.1 ../cc3200/tools/smoke.py
"""
pin_map = [23, 24, 11, 12, 13, 14, 15, 16, 17, 22, 28, 10, 9, 8, 7, 6, 30, 31, 3, 0, 4, 5]
test_bytes = os.urandom(1024)
def test_pin_read(pull):
# enable the pull resistor on all pins, then read the value
for p in pin_map:
pin = Pin("GP" + str(p), mode=Pin.IN, pull=pull)
# read the pin value
print(pin())
def test_pin_shorts(pull):
if pull == Pin.PULL_UP:
pull_inverted = Pin.PULL_DOWN
else:
pull_inverted = Pin.PULL_UP
# enable all pulls of the specified type
for p in pin_map:
pin = Pin("GP" + str(p), mode=Pin.IN, pull=pull_inverted)
# then change the pull one pin at a time and read its value
i = 0
while i < len(pin_map):
pin = Pin("GP" + str(pin_map[i]), mode=Pin.IN, pull=pull)
Pin("GP" + str(pin_map[i - 1]), mode=Pin.IN, pull=pull_inverted)
i += 1
# read the pin value
print(pin())
test_pin_read(Pin.PULL_UP)
test_pin_read(Pin.PULL_DOWN)
test_pin_shorts(Pin.PULL_UP)
test_pin_shorts(Pin.PULL_DOWN)
# create a test directory
os.mkdir("/flash/test")
os.chdir("/flash/test")
print(os.getcwd())
# create a new file
f = open("test.txt", "w")
n_w = f.write(test_bytes)
print(n_w == len(test_bytes))
f.close()
f = open("test.txt", "r")
r = bytes(f.read(), "ascii")
# check that we can write and read it correctly
print(r == test_bytes)
f.close()
os.remove("test.txt")
os.chdir("..")
os.rmdir("test")
ls = os.listdir()
print( | "test" not in ls)
print(ls)
# test the real time clock
rtc = RTC()
while rtc.now()[6] > 800:
pass
time1 = rtc.now()
time.sleep_ms(1000)
time2 = rtc.now()
print(time2[5] - time1[5] == 1)
| print(time2[6] - time1[6] < 5000) # microseconds
|
noam09/deluge-telegramer | telegramer/include/telegram/userprofilephotos.py | Python | gpl-3.0 | 2,091 | 0.002391 | #!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2018
# Leandro Toledo de Souza <devs@python-telegram-bot.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains an object that represents a Telegram UserProfilePhotos."""
from telegram import PhotoSize, TelegramObject
class UserProfilePhotos(TelegramObject):
"""This object represent a user's profile pictures.
Attributes:
total_count (:obj:`int`): Total number of profile pictures.
photos (List[List[:class:`telegram.PhotoSize`]]): Requested profile pictures.
Args:
total_count (:obj:`int`): Total number of profile pictures the target user has.
photos (List[List[:class:`telegram.PhotoSize`]]): Requested profile pictures (in up to 4
sizes each).
"""
def __init__(self, total_count, photos, **kwargs):
# Required
self.total_count = int(total_count)
self.photos = photos
@classmethod
def de_json(cls, data, bot):
if not data:
retu | rn None
data = super(UserProfilePhotos, cls).de_json(data, bot)
data['photos'] = [PhotoSize.de_list(photo, bot) for phot | o in data['photos']]
return cls(**data)
def to_dict(self):
data = super(UserProfilePhotos, self).to_dict()
data['photos'] = []
for photo in self.photos:
data['photos'].append([x.to_dict() for x in photo])
return data
|
citrix-openstack-build/trove | trove/extensions/security_group/views.py | Python | apache-2.0 | 3,880 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from trove.openstack.common import log as logging
import os
LOG = logging.getLogger(__name__)
def _base_url(req):
return req.application_url
class SecurityGroupView(object):
def __init__(self, secgroup, rules, req, tenant_id):
self.secgroup = secgroup
self.rules = rules
self.request = req
self.tenant_id = tenant_id
def _build_links(self):
"""Build the links for the secgroup"""
base_url = _base_url(self.request)
href = os.path.join(base_url, self.tenant_id | ,
"security-groups", str(self.secgroup['id']))
links = [
{
'rel': 'self',
'href': href
}
]
return links
def _build_rules(self):
rules = []
if self.rules is None:
return rules
for rule in self | .rules:
rules.append({'id': str(rule['id']),
'protocol': rule['protocol'],
'from_port': rule['from_port'],
'to_port': rule['to_port'],
'cidr': rule['cidr'],
})
return rules
def data(self):
return {"id": self.secgroup['id'],
"name": self.secgroup['name'],
"description": self.secgroup['description'],
"instance_id": self.secgroup['instance_id'],
"rules": self._build_rules(),
"links": self._build_links(),
"created": self.secgroup['created'],
"updated": self.secgroup['updated']
}
def show(self):
return {"security_group": self.data()}
def create(self):
return self.show()
class SecurityGroupsView(object):
def __init__(self, secgroups, rules_dict, req, tenant_id):
self.secgroups = secgroups
self.rules = rules_dict
self.request = req
self.tenant_id = tenant_id
def list(self):
groups_data = []
for secgroup in self.secgroups:
rules = \
self.rules[secgroup['id']] if self.rules is not None else None
groups_data.append(SecurityGroupView(secgroup,
rules,
self.request,
self.tenant_id).data())
return {"security_groups": groups_data}
class SecurityGroupRulesView(object):
def __init__(self, rule, req, tenant_id):
self.rule = rule
self.request = req
self.tenant_id = tenant_id
def _build_create(self):
return {"security_group_rule":
{"id": str(self.rule['id']),
"security_group_id": self.rule['group_id'],
"protocol": self.rule['protocol'],
"from_port": self.rule['from_port'],
"to_port": self.rule['to_port'],
"cidr": self.rule['cidr'],
"created": self.rule['created']
}
}
def create(self):
return self._build_create()
|
michaelmurdock/mmlib | fname_info.py | Python | bsd-2-clause | 11,171 | 0.017545 | # fname_info.py
'''
fname.py module wraps the cfname class, which is used to simplify the task
of dealing with filenames.
'''
from __future__ import print_function
import os.path
class cfname_info(object):
'''
An instance of cfname_info class is used to represent a single filename.
After an instance is constructed, the client can access the properties to get
whatever name is required.
'''
def __init__(self, fullname='', dirname='', filename='', basename='', suffix='', ext=''):
'''
Client code must supply either the (path-qualified) fullname OR the dir and filename.
The
Definition of terms:
full : full means the item (directory name or filename) is fully path-qualified
suffix : The part of the filename before the extension.
fullname = dirname + filename
filename = basename + suffix + ext
Note: If the fullname is supplied, we split it, which means we override dirname
(if it was supplied) and the filename (if it was supplied). This means a constructor
with a fullname will still work with a bogus dirname or filename.
'''
validation_rule = 'filename supplied without dirname is invalid.'
if filename and not dirname:
raise ValueError(validation_rule)
validation_rule = 'dirname supplied without a filename is invalid.'
if dirname and not (filename or (basename and suffix and ext)):
raise ValueError(validation_rule)
validation_rule = 'If fullname is not supplied, dirname and either filename or base, suffix and ext must be supplied'
#if (not filename or not dirname) and not fullname:
if not fullname and (not dirname and not (filename or (basename and suffix and ext))):
raise ValueError(validation_rule)
validation_rule = 'If basename, suffix or ext is supplied, then all must be supplied'
if basename and not (suffix or ext):
raise ValueError(validation_rule)
#if suffix and not (basename or ext):
# raise ValueError(validation_rule)
if ext and not (basename or suffix):
raise ValueError(validation_rule)
self._fullname = fullname
self._dirname = dirname
self._filename = filename
self._suffix = suffix
(result, errmsg) = self._process_names()
# More validation checks
validation_rule = 'basename + suffix + ext must equal filename'
if self._basename + self._suffix + self._ext != self._filename:
raise ValueError(validation_rule)
validation_rule = 'dirname + filename must equal fullname'
if os.path.join(self._dirname, self._filename) != self._fullname:
raise ValueError(validation_rule)
def _update_on_this_change(self, item_that_was_changed):
'''
item_that_was_changed can be one of the following:
'fullname'
'filename'
' ... '
'''
def _process_names(self):
'''
Private method that updates all the missing information based on items
are currently defined (non-NULL).
'''
# If we have a fullname, split it up to create dirname and filename
# This rule implies that fullname supercedes dirname and filename
if self._fullname:
(self._dirname, self._filename) = os.path.split(self._fullname)
# We started with a fullname, used it to get dirname and filename, and now
# might want to derive basename, suffix, and ext
# If suffix is defined, but not basename, use filename and suffix to create basename and ext
if self._suffix and not self._basename:
(base_and_suffix, self._ext) = os.path.splitext(self._filename)
self._basename = base_and_suffix.split(self._suffix)[0]
raise ValueError('THIS IS AS FAR AS I GOT!!!'
# If we don't have a fullname, create it from dirname and filename
else:
# If we have a filename, use it with dirname to create fullname
if self._filename:
if not self._dirname:
raise ValueError('If fullname is not defined, but filename is defined, then dirname also needs to be defined!')
self._fullname = os.path.join(self._dirname, self._filename)
# If we don't have a filename, that must mean we need to build it from
# base, suffix and ext.
else:
# Note: Suffix is NOT required: it can be NULL and we can stil build our filename
if not (self._basename and self._ext):
raise ValueError('If fullname and filename are not defined, then basename, suffix and ext all need to be defined')
self._filename = self._basename + self._suffix + self._ext
# Now that we have the filename, we use it and dirname to create the fullname
self._fullname = os.path.join(self._dirname, self._filename)
# At this point we have either:
# fullname, dirname and filename, or
#
# If we don't have the basename, create it from the filename
if not self._basename:
(base_and_suffix, self._ext) = os.path.splitext(self._filename)
self._parentname = os.path.split(self._dirname)[1]
if self._suffix:
self._basename = base_and_suffix.split(self._suffix)[0]
else:
self._basename = base_and_suffix
return (True, '')
@property
def filename(self):
return self._filename
@property
def fullname(self):
return self._fullname
@property
def basename(self):
'''
basename is the part of the filename preceeding the suffix
'''
return self._basename
@property
def dirname(self):
return self._dirname
@property
def parentname(self):
return self._parentname
@property
def suffix(self):
'''
Suffix is the part of the filename after the basename, but before the ext.
'''
return self._suffix
@property
def ext(self):
return self._ext
@property
def tag(self):
return self._tag
def test_instance_properties(inst):
try:
a=inst.basename
b=inst.filename
c=inst.dirname
d=inst.fullname
e=inst.parentname
f=inst.suffix
g=inst.ext
except Exception as e:
return (False, 'Unexpected exception in test_instance_properties. Details: %s' % (str(e)))
return (True, '')
def unit_test_pos(id, d, f, fn, s):
'''
Positive unit test harness | : Expectation is to run without failure.
'''
try:
| inst = cfname(dirname=d, filename=f, fullname=fn, suffix=s)
except ValueError as e:
return (False, 'Unexpected exception with test %d: %s' % (id, str(e)))
return (True, '')
def unit_test_neg(id, d, f, fn, s):
'''
Negative unit test harness: Expectation is to fail.
'''
try:
inst = cfname(dirname=d, filename=f, fullname=fn, suffix=s)
except ValueError as e:
return (True, 'Expected exception with test %d: %s' % (id, str(e)))
return (False, 'Error: unit_test_neg expected an exception on %d' % (id))
def run_unit_tests():
'''
'''
# Positive unit tests - I expect positive results
id = 1
(result, errmsg) = unit_test_pos(id, r'c:\process_root\crop_tests', '31129_193939-00550.jpg', '', '')
if not result:
return (False, id, errmsg)
id = 2
(result, errmsg) = unit_test_pos(id, r'c:\process_root\crop_tests', '31129_193939-00550_processed.jpg', '', '_processed')
if not result:
return (False, id, errmsg)
id = |
cgarrard/osgeopy-code | Chapter4/listing4_2.py | Python | mit | 1,083 | 0.000923 | import os
from osgeo import ogr
def layers_to_feature_dataset(ds_name, gdb_fn, dataset_name):
"""Copy layers to a feature dataset in a file geodatabase."""
# Open the input datasource.
in_ds = ogr.Open(ds_name)
| if in_ds is None:
raise RuntimeError('Could not open datasource')
# Open the geodatabase or create it if it doesn't exist.
gdb_driver = ogr.GetDriverByName('FileGDB')
if os.path.exists(gdb_fn):
gdb_ds = gdb_driver.Open(gdb_fn, 1)
else:
| gdb_ds = gdb_driver.CreateDataSource(gdb_fn)
if gdb_ds is None:
raise RuntimeError('Could not open file geodatabase')
# Create an option list so the feature classes will be
# saved in a feature dataset.
options = ['FEATURE_DATASET=' + dataset_name]
# Loop through the layers in the input datasource and copy
# each one into the geodatabase.
for i in range(in_ds.GetLayerCount()):
lyr = in_ds.GetLayer(i)
lyr_name = lyr.GetName()
print('Copying ' + lyr_name + '...')
gdb_ds.CopyLayer(lyr, lyr_name, options)
|
balarsen/Scrabble | scrabble/test.py | Python | bsd-3-clause | 72 | 0.013889 | real = complex(1, 1).re | al
imag = comple | x(1, 1).imag
print(real, imag)
|
ecino/compassion-switzerland | muskathlon/forms/partner_coordinates_form.py | Python | agpl-3.0 | 3,075 | 0 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Emanuel Cino <ecino | @compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import re
from odoo import models, tools, _
testing = tools.config.get('test_enable')
if not testing:
# prevent these forms to be registered when running tests
class PartnerC | oordinatesForm(models.AbstractModel):
_name = 'cms.form.partner.coordinates'
_inherit = 'cms.form'
form_buttons_template = 'cms_form_compassion.modal_form_buttons'
form_id = 'modal_coordinates'
_form_model = 'res.partner'
_form_model_fields = [
'name', 'email', 'phone', 'street', 'zip',
'city', 'country_id'
]
_form_required_fields = [
'name', 'email', 'phone', 'street', 'zip',
'city', 'country_id',
]
_form_fields_order = [
'name', 'email', 'phone', 'street', 'zip',
'city', 'country_id'
]
@property
def form_title(self):
return _("Coordinates")
@property
def submit_text(self):
return _("Save")
@property
def form_msg_success_updated(self):
return _('Coordinates updated.')
def _form_validate_phone(self, value, **req_values):
if not re.match(r'^[+\d][\d\s]{7,}$', value, re.UNICODE):
return 'phone', _('Please enter a valid phone number')
# No error
return 0, 0
def _form_validate_zip(self, value, **req_values):
if not re.match(r'^\d{3,6}$', value):
return 'zip', _('Please enter a valid zip code')
# No error
return 0, 0
def _form_validate_email(self, value, **req_values):
if not re.match(r'[^@]+@[^@]+\.[^@]+', value):
return 'email', _('Verify your e-mail address')
# No error
return 0, 0
def _form_validate_name(self, value, **req_values):
return self._form_validate_alpha_field('name', value)
def _form_validate_street(self, value, **req_values):
return self._form_validate_alpha_field('street', value)
def _form_validate_city(self, value, **req_values):
return self._form_validate_alpha_field('city', value)
def _form_validate_alpha_field(self, field, value):
if not re.match(r"^[\w\s'-]+$", value, re.UNICODE):
return field, _('Please avoid any special characters')
# No error
return 0, 0
def form_before_create_or_update(self, values, extra_values):
""" Dismiss any pending status message, to avoid multiple
messages when multiple forms are present on same page.
"""
self.o_request.website.get_status_message()
|
six8/python-clom | src/clom/_compat.py | Python | mit | 240 | 0 | PY2 = (str is bytes)
PY3 = (str is not bytes)
if PY | 3:
number_types = (int, float)
integer_types = int
string_types | = str
else:
number_types = (int, long, float)
integer_types = (int, long)
string_types = basestring
|
Orav/kbengine | kbe/res/scripts/common/Lib/site-packages/setuptools/script template.py | Python | lgpl-3.0 | 167 | 0 | # EASY-INS | TALL-SCRIPT: %(spec)r,%(script_name)r
__requ | ires__ = """%(spec)r"""
import pkg_resources
pkg_resources.run_script("""%(spec)r""", """%(script_name)r""")
|
js850/PyGMIN | pygmin/optimize/result.py | Python | gpl-3.0 | 2,911 | 0.005496 | # Result object copied from scipy 0.11
__all__=['Result']
class Result(dict):
""" Represents the optimization re | sult.
Attributes
----------
coords : ndarray
The solution of the optimization.
success : bool
Whether or not the optimizer exited successfully.
status : int
Termination status of the optimi | zer. Its value depends on the
underlying solver. Refer to `message` for details.
message : str
Description of the cause of the termination.
energy : ndarray
energy at the solution
grad : ndarray
gradient at the solution
nfev : int
Number of evaluations of the function or gradient
nit : int
Number of iterations performed by the optimizer.
Notes
-----
There may be additional attributes not listed above depending of the
specific solver. Since this class is essentially a subclass of dict
with attribute accessors, one can see which attributes are available
using the `keys()` method.
"""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
def __repr__(self):
if self.keys():
m = max(map(len, self.keys())) + 1
return '\n'.join([k.rjust(m) + ': ' + repr(v)
for k, v in self.iteritems()])
else:
return self.__class__.__name__ + "()"
def __getitem__(self, i):
"""
April 26, 2013
this overloaded function exists only for compatibility with the old quenchers.
It should be removed at some point in the future
"""
if isinstance(i, slice):
mylength = 5
vals = tuple([self[j] for j in range(*i.indices(mylength))])
return vals
if i in range(4):
maplist = {0:"coords",
1:"energy",
2:"rms",
3:"nfev",
}
i = maplist[i]
elif i == 4:
return self
return dict.__getitem__(self, i)
def __iter__(self):
"""
April 26, 2013
this overloaded function exists only for compatibility with the old quenchers.
It should be removed at some point in the future
"""
return iter((self.coords, self.energy, self.rms, self.nfev, self))
if __name__ == "__main__":
import numpy as np
res = Result()
res.coords = np.array([0])
res.energy = 1.
res.rms = 1e-4
res.nfev = 100
print dir(res)
print res[0]
print res[1]
print res[4]
print "slice", res[:2]
x, e = res[:2]
print "unpack slice", x, e
a, b, c, d, f = res
print "done"
print a, b, c, d |
HL7/hl7-c-cda-ex | db_create.py | Python | epl-1.0 | 625 | 0.0016 | #!flask/bin/python
from app.db import db
import os.path
import git
import ipdb
import git
import shutil
from parse_meta_data import parse |
LOCAL_EXAMPLES_REPO_DIR = "./ccda_examples_repo"
BRANCH = 'permalinksHashObject'
shutil.rmtree(LOCAL_EXAMPLES_REPO_DIR)
repo = git.Repo.clone_from("https://github.com/schmoney/C-CDA-Examples.git", LOCAL_EXAMPLES_REPO_DIR)
repo.git.pull("origin", BRANCH)
parse(LOCAL_EXAMPLES_REPO_DIR)
basedir = os.path.abspath(os.path.dirname(__file__))
sections = db.sections.find().count()
exa | mples = db.examples.find().count()
print "loaded {} sections and {} examples".format(sections, examples)
|
unioslo/cerebrum | Cerebrum/modules/hr_import/errors.py | Python | gpl-2.0 | 948 | 0 | # -*- coding: utf-8 -*-
#
# Copyright 2021 University of Oslo, Norway
#
# This file is part of Cerebrum.
#
# Cerebrum is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Cerebrum is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY | or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for mo | re details.
#
# You should have received a copy of the GNU General Public License
# along with Cerebrum; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Errors relating to the hr-import
"""
class NonExistentOuError(Exception):
"""An error to raise when an OU is not found in Cerebrum"""
pass
|
derdon/chef | chef/external/pretty.py | Python | isc | 20,504 | 0.000536 | # -*- coding: utf-8 -*-
"""
pretty
~~
Python advanced pretty printer. This pretty printer is intended to
replace the old `pprint` python module which does not allow developers
to provide their own pretty print callbacks.
This module is based on ruby's `prettyprint.rb` library by `Tanaka Akira`.
Example Usage
=============
To directly print the representation of an object use `pprint`::
from pretty import pprint
pprint(complex_object)
To get a string of the output use `pretty`::
from pretty import pretty
string = pretty(complex_object)
Extending
=========
The pretty library allows developers to add pretty printing rules for their
own objects. This process is straightforward. All you have to do is to
add a `__pretty__` method to your object and call the methods on the
pretty printer passed::
class MyObject(object):
def __pretty__(self, p, cycle):
...
Depending on the python version you want to support you have two
possibilities. The following list shows the python 2.5 version and the
compatibility one.
Here the example implementation of a `__pretty__` method for a list
subclass for python 2.5 and higher (python 2.5 requires the with statement
__future__ import)::
class MyList(list):
def __pretty__(self, p, cycle):
if cycle:
p.text('MyList(...)')
else:
with p.group(8, 'MyList([', '])'):
for idx, item in enumerate(self):
if idx:
p.text(',')
p.breakable()
p.pretty(item)
The `cycle` parameter is `True` if pretty detected a cycle. You *have* to
react to that or the result is an infinite loop. `p.text()` just adds
non breaking text to the output, `p.breakable()` either adds a whitespace
or breaks here. If you pass it an argument it's used instead of the
default space. `p.pretty` prettyprints another object using the pretty print
method.
The first parameter to the `group` function specifies the extra indentation
of the next line. In this example the next item will either be not
breaked (if the items are short enough) or aligned with the right edge of
the opening bracked of `MyList`.
If you wa | nt to support python 2.4 and lower you can use this code::
class MyList(list):
def __pretty__(self, p, cycle):
if cycle:
p.text('MyList(...)')
else:
p.begin_group(8, 'MyList([')
for idx, item in enumerate(self):
if idx:
p.text(',')
| p.breakable()
p.pretty(item)
p.end_group(8, '])')
If you just want to indent something you can use the group function
without open / close parameters. Under python 2.5 you can also use this
code::
with p.indent(2):
...
Or under python2.4 you might want to modify ``p.indentation`` by hand but
this is rather ugly.
:copyright: 2007 by Armin Ronacher.
:license: BSD License.
"""
import __future__
import sys
import types
import re
import datetime
from StringIO import StringIO
from collections import deque
__all__ = ['pretty', 'pprint', 'PrettyPrinter', 'RepresentationPrinter', 'for_type']
_re_pattern_type = type(re.compile(''))
def pretty(obj, verbose=False, max_width=79, newline='\n'):
"""
Pretty print the object's representation.
"""
stream = StringIO()
printer = RepresentationPrinter(stream, verbose, max_width, newline)
printer.pretty(obj)
printer.flush()
return stream.getvalue()
def pprint(obj, verbose=False, max_width=79, newline='\n'):
"""
Like `pretty` but print to stdout.
"""
printer = RepresentationPrinter(sys.stdout, verbose, max_width, newline)
printer.pretty(obj)
printer.flush()
sys.stdout.write(newline)
sys.stdout.flush()
# add python2.5 context managers if we have the with statement feature
if hasattr(__future__, 'with_statement'): exec '''
from __future__ import with_statement
from contextlib import contextmanager
class _PrettyPrinterBase(object):
@contextmanager
def indent(self, indent):
"""with statement support for indenting/dedenting."""
self.indentation += indent
try:
yield
finally:
self.indentation -= indent
@contextmanager
def group(self, indent=0, open='', close=''):
"""like begin_group / end_group but for the with statement."""
self.begin_group(indent, open)
try:
with self.indent(indent):
yield
finally:
self.end_group(indent, close)
'''
else:
class _PrettyPrinterBase(object):
def _unsupported(self, *a, **kw):
"""unsupported operation"""
raise RuntimeError('not available in this python version')
group = indent = _unsupported
del _unsupported
class PrettyPrinter(_PrettyPrinterBase):
"""
Baseclass for the `RepresentationPrinter` prettyprinter that is used to
generate pretty reprs of objects. Contrary to the `RepresentationPrinter`
this printer knows nothing about the default pprinters or the `__pretty__`
callback method.
"""
def __init__(self, output, max_width=79, newline='\n'):
self.output = output
self.max_width = max_width
self.newline = newline
self.output_width = 0
self.buffer_width = 0
self.buffer = deque()
root_group = Group(0)
self.group_stack = [root_group]
self.group_queue = GroupQueue(root_group)
self.indentation = 0
def _break_outer_groups(self):
while self.max_width < self.output_width + self.buffer_width:
group = self.group_queue.deq()
if not group:
return
while group.breakables:
x = self.buffer.popleft()
self.output_width = x.output(self.output, self.output_width)
self.buffer_width -= x.width
while self.buffer and isinstance(self.buffer[0], Text):
x = self.buffer.popleft()
self.output_width = x.output(self.output, self.output_width)
self.buffer_width -= x.width
def text(self, obj):
"""Add literal text to the output."""
width = len(obj)
if self.buffer:
text = self.buffer[-1]
if not isinstance(text, Text):
text = Text()
self.buffer.append(text)
text.add(obj, width)
self.buffer_width += width
self._break_outer_groups()
else:
self.output.write(obj)
self.output_width += width
def breakable(self, sep=' '):
"""
Add a breakable separator to the output. This does not mean that it
will automatically break here. If no breaking on this position takes
place the `sep` is inserted which default to one space.
"""
width = len(sep)
group = self.group_stack[-1]
if group.want_break:
self.flush()
self.output.write(self.newline)
self.output.write(' ' * self.indentation)
self.output_width = self.indentation
self.buffer_width = 0
else:
self.buffer.append(Breakable(sep, width, self))
self.buffer_width += width
self._break_outer_groups()
def begin_group(self, indent=0, open=''):
"""
Begin a group. If you want support for python < 2.5 which doesn't has
the with statement this is the preferred way:
p.begin_group(1, '{')
...
p.end_group(1, '}')
The python 2.5 expression would be this:
with p.group(1, '{', '}'):
|
postpdm/ich_bau | support/views.py | Python | apache-2.0 | 440 | 0.025 | from django.shortcuts import render, redir | ect
from django.http import HttpResponseRedirect
from .models import SupportProject
# Create your views here.
def index( request ):
| sp = SupportProject.objects.all()
if sp.count() == 1:
return HttpResponseRedirect( sp.first().project.get_absolute_url() )
else:
context_dict = { 'sps' : sp, }
return render( request, 'support/index.html', context_dict )
|
AVatch/django-rest-boilerplate | djb/manage.py | Python | mit | 246 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.env | iron.setdefault("DJANGO_SETTINGS_MODULE", "djb.settings")
from django.core.management import execute_from_command_line
execute_from_ | command_line(sys.argv)
|
AjayKrP/Computer-Network | LEAKY-BUCKET/client.py | Python | gpl-3.0 | 2,214 | 0.00813 | import socket
from packet import send_data, send_close
import random, time, pickle
import threading
from packet import *
listening = True
total_acked = 0
def listen_ack(sock):
global total_acked
while listening:
raw_data, _ = sock.recvfrom(2048)
ack_packet = pickle.loads(raw_data)
if ack_packet.ptype == 'A':
total_acked += 1
#print('Ack Received for ', ack_packet.seq_no)
def main(raddr, data):
global listening
port = random.randint(1000,2**16 -1)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
laddr = ('127.0.0.1', port,)
sock.bind(laddr)
print('Sending the data in the range of 10 - 30 Kbps')
print('Trying to send {} packets to {}'.format(len(data), raddr))
ack_t = threading.Thread(target=listen_ack,args=(sock,))
ack_t.start()
i = 0
send_data(i, sock, raddr, data[i])
i += 1
while i < len(data):
prepare_sending()
try :
for k in range(get_time()):
sending_rate = random.randint(10, 30)
print('Sending At ', sending_rate )
send_data(i, sock, raddr, data[i])
i += 1
except IndexError:
break
listening = False
ack_t.join()
print('Data sent')
#if total_acked != len(data):
# print('Receiver lost {} packets because bucked was full'.format(len(data) - total_acked))
send_close(i+1, sock, raddr)
if __name__ == '__main__':
data = "There really is a Linux, and these people are using it, but it is just a part of the system they use." \
" Linux is the kernel: the program in the system that allocates the machine's resources to the other programs " \
"that you run. The kernel is an essential part of an operating system, but useless by itself | ; it can only function" \
" in the context of a complete operating system. Linux is normally used in combination with the GNU operating | system:" \
" the whole system is basically GNU with Linux added, or GNU/Linux. All the so-called Linux distributions are really " \
"distributions of GNU/Linux!"
main(('127.0.0.1',8081,), data.split(' '))
|
invliD/lana-dashboard | lana_dashboard/lana_data/migrations/0008_wireguard.py | Python | agpl-3.0 | 1,317 | 0.003042 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-04-07 17:45
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('lana_data', '0007_peering'),
]
operations = [
migrations.CreateModel(
name='WireGuardTunnel',
fields=[
('tunnel_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='lana_data.Tunnel')),
],
bases=('lana_data.tunnel',),
),
migrations.CreateModel(
| name='WireGuardTunnelEndpoint',
fields=[
('tunnelendpoint_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='lana_data.Tunn | elEndpoint')),
('port', models.IntegerField(blank=True, help_text='Defaults to remote AS number if ≤ 65535.', null=True, verbose_name='Port')),
('public_key', models.CharField(blank=True, max_length=255, null=True, verbose_name='Public key')),
],
bases=('lana_data.tunnelendpoint',),
),
]
|
pexip/pygobject | examples/demo/demos/infobars.py | Python | lgpl-2.1 | 3,918 | 0.001531 | #!/usr/bin/env python
# -*- Mode: Python; py-indent-offset: 4 -*-
# vim: tabstop=4 shiftwidth=4 expandtab
#
# Copyright (C) 2010 Red Hat, Inc., John (J5) Palmieri <johnp@redhat.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
title = "Info Bars"
description = """
Info bar widgets are used to report important messages to the user.
"""
from gi.repository import Gtk
class InfobarApp:
def __init__(self):
self.window = Gtk.Window()
self.window.set_title('Info Bars')
self.window.set_border_width(8)
self.window.connect('destroy', Gtk.main_quit)
vbox = Gtk.VBox(spacing=0)
self.window.add(vbox)
bar = Gtk.InfoBar()
vbox.pack_start(bar, False, False, 0)
bar.set_message_type(Gtk.MessageType.INFO)
label = Gtk.Label(label='This is an info bar with message type Gtk.MessageType.INFO')
bar.get_content_area().pack_start(label, False, False, 0)
bar = Gtk.InfoBar()
vbox.pack_start(bar, False, False, 0)
bar.set_message_type(Gtk.MessageType.WARNING)
label = Gtk.Label(label='This is an info bar with message type Gtk.MessageType.WARNING')
bar.get_content_area().pack_start(label, False, False, 0)
bar = Gtk.InfoBar()
bar.add_button(Gtk.STOCK_OK, Gtk.ResponseType.OK)
bar.connect('response', self.on_bar_response)
vbox.pack_start(bar, False, False, 0)
bar.set_message_type(Gtk.MessageType.QUESTION)
label = Gtk.Label(label='This is an info bar with message type Gtk.MessageType.QUESTION')
bar.get_content_area().pack_start(label, Fal | se, False, 0)
bar = Gtk.InfoBar()
vbox.pack_start(bar, Fa | lse, False, 0)
bar.set_message_type(Gtk.MessageType.ERROR)
label = Gtk.Label(label='This is an info bar with message type Gtk.MessageType.ERROR')
bar.get_content_area().pack_start(label, False, False, 0)
bar = Gtk.InfoBar()
vbox.pack_start(bar, False, False, 0)
bar.set_message_type(Gtk.MessageType.OTHER)
label = Gtk.Label(label='This is an info bar with message type Gtk.MessageType.OTHER')
bar.get_content_area().pack_start(label, False, False, 0)
frame = Gtk.Frame(label="Info bars")
vbox.pack_start(frame, False, False, 8)
vbox2 = Gtk.VBox(spacing=8)
vbox2.set_border_width(8)
frame.add(vbox2)
# Standard message dialog
label = Gtk.Label(label='An example of different info bars')
vbox2.pack_start(label, False, False, 0)
self.window.show_all()
def on_bar_response(self, info_bar, response_id):
dialog = Gtk.MessageDialog(transient_for=self.window,
modal=True,
destroy_with_parent=True,
message_type=Gtk.MessageType.INFO,
buttons=Gtk.ButtonsType.OK,
text='You clicked on an info bar')
dialog.format_secondary_text('Your response has id %d' % response_id)
dialog.run()
dialog.destroy()
def main(demoapp=None):
InfobarApp()
Gtk.main()
if __name__ == '__main__':
main()
|
MERegistro/meregistro | meregistro/apps/registro/forms/ExtensionAulicaConexionInternetForm.py | Python | bsd-3-clause | 1,222 | 0.013912 | # -*- coding: utf-8 -*-
from apps.registro.models.ExtensionAulicaConexionInternet import ExtensionAulicaConexionInternet
from apps.registro.models.TipoConexion import TipoConexion
from django.core.exceptions import ValidationError
from django import forms
class ExtensionAulicaConexionInternetForm(forms.ModelForm):
tipo_conexion = forms.ModelChoiceField(queryset = TipoConexion.objects.all().order_by('nombre'), required = False)
verificado = forms.BooleanField(required=False)
class Meta:
model = Exten | sionAulicaConexionInternet
exclude = ['extension_aulica']
def __chequear_si_tiene_conexion(self, field):
if self.cleaned_data['tiene_conexion']:
if (self.cleaned_data[field] is None
or self.cleaned_data[field] == ''):
raise ValidationError('Este campo es obligatorio.')
return self.clea | ned_data[field]
return None
def clean_tipo_conexion(self):
return self.__chequear_si_tiene_conexion('tipo_conexion')
def clean_proveedor(self):
return self.__chequear_si_tiene_conexion('proveedor')
def clean_costo(self):
return self.__chequear_si_tiene_conexion('costo')
def clean_cantidad(self):
return self.__chequear_si_tiene_conexion('cantidad')
|
hydroshare/hydroshare_temp | urls.py | Python | bsd-3-clause | 5,696 | 0.005794 | from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from django.conf.urls.i18n import i18n_patterns
from django.contrib import admin
from mezzanine.core.views import direct_to_template
from mezzanine.conf import settings
from hs_core.api import v1_api
from theme import views as theme
import autocomplete_light
autocomplete_light.autodiscover()
admin.autodiscover()
# Add the urlpatterns for any custom Django applications here.
# You can also change the ``home`` view to add your own functionality
# to the project's homepage.
urlpatterns = i18n_patterns("",
# Change the admin prefix here to use an alternate URL for the
# admin interface, which would be marginally more secure.
url("^admin/", include(admin.site.urls)),
url('^ga_resources/', include('ga_resources.urls')),
url('^ga_interactive/', include('ga_interactive.urls')),
url('^r/(?P<shortkey>[A-z0-9\-_]+)', 'hs_core.views.short_url'),
# url('^party/', include('hs_scholar_profile.urls'))
url(r'^user/$', theme.UserProfileView.as_view()),
url(r'^user/(?P<user>.*)/', theme.UserProfileView.as_view()),
url(r'^verify/(?P<pk>[0-9]*)/', 'hs_core.views.verify'),
url(r'^django_irods/', include('django_irods.urls')),
url(r'^autocomplete/', include('autocomplete_light.urls')),
url(r'^hs_metrics/', include('hs_metrics.urls')),
)
# Filebrowser admin media library.
if getattr(settings, "PACKAGE_NAME_FILEBROWSER") in settings.INSTALLED_APPS:
urlpatterns += i18n_patterns("",
("^admin/media-library/", include("%s.urls" %
| settings.PACKAGE_NAME_FILEBROWSER)),
)
# Put API URLs before Mezzanine so that Mezzanine doesn't consume them
urlpatterns += patterns('',
(r'^api/', include(v1_api.urls) ),
url("^api/%s/doc/" % (v1_api.api_name,),
include('tastypie_swagger.urls',
| namespace='tastypie_swagger'),
kwargs={'tastypie_api_module':'hs_core.api.v1_api',
'namespace':'tastypie_swagger'}
),
url('^hsapi/', include('hs_core.urls'))
url('^party/', include('hs_party.urls'))
)
urlpatterns += patterns('',
# We don't want to presume how your homepage works, so here are a
# few patterns you can use to set it up.
# HOMEPAGE AS STATIC TEMPLATE
# ---------------------------
# This pattern simply loads the index.html template. It isn't
# commented out like the others, so it's the default. You only need
# one homepage pattern, so if you use a different one, comment this
# one out.
# url("^$", direct_to_template, {"template": "index.html"}, name="home"),
# HOMEPAGE AS AN EDITABLE PAGE IN THE PAGE TREE
# ---------------------------------------------
# This pattern gives us a normal ``Page`` object, so that your
# homepage can be managed via the page tree in the admin. If you
# use this pattern, you'll need to create a page in the page tree,
# and specify its URL (in the Meta Data section) as "/", which
# is the value used below in the ``{"slug": "/"}`` part.
# Also note that the normal rule of adding a custom
# template per page with the template name using the page's slug
# doesn't apply here, since we can't have a template called
# "/.html" - so for this case, the template "pages/index.html"
# should be used if you want to customize the homepage's template.
url("^$", "mezzanine.pages.views.page", {"slug": "/"}, name="home"),
# HOMEPAGE FOR A BLOG-ONLY SITE
# -----------------------------
# This pattern points the homepage to the blog post listing page,
# and is useful for sites that are primarily blogs. If you use this
# pattern, you'll also need to set BLOG_SLUG = "" in your
# ``settings.py`` module, and delete the blog page object from the
# page tree in the admin if it was installed.
# url("^$", "mezzanine.blog.views.blog_post_list", name="home"),
# MEZZANINE'S URLS
# ----------------
# ADD YOUR OWN URLPATTERNS *ABOVE* THE LINE BELOW.
# ``mezzanine.urls`` INCLUDES A *CATCH ALL* PATTERN
# FOR PAGES, SO URLPATTERNS ADDED BELOW ``mezzanine.urls``
# WILL NEVER BE MATCHED!
# If you'd like more granular control over the patterns in
# ``mezzanine.urls``, go right ahead and take the parts you want
# from it, and use them directly below instead of using
# ``mezzanine.urls``.
("^", include("mezzanine.urls")),
# MOUNTING MEZZANINE UNDER A PREFIX
# ---------------------------------
# You can also mount all of Mezzanine's urlpatterns under a
# URL prefix if desired. When doing this, you need to define the
# ``SITE_PREFIX`` setting, which will contain the prefix. Eg:
# SITE_PREFIX = "my/site/prefix"
# For convenience, and to avoid repeating the prefix, use the
# commented out pattern below (commenting out the one above of course)
# which will make use of the ``SITE_PREFIX`` setting. Make sure to
# add the import ``from django.conf import settings`` to the top
# of this file as well.
# Note that for any of the various homepage patterns above, you'll
# need to use the ``SITE_PREFIX`` setting as well.
# ("^%s/" % settings.SITE_PREFIX, include("mezzanine.urls"))
)
# Adds ``STATIC_URL`` to the context of error pages, so that error
# pages can use JS, CSS and images.
handler404 = "mezzanine.core.views.page_not_found"
handler500 = "mezzanine.core.views.server_error"
|
wschoenell/chimera_imported_googlecode | src/chimera/util/image.py | Python | gpl-2.0 | 13,397 | 0.006121 |
import chimera.core.log
from chimera.core.remoteobject import RemoteObject
from chimera.core.exceptions import ChimeraException
from chimera.core.version import _chimera_name_, _chimera_long_description_
from chimera.util.coord import Coord
from chimera.util.position import Position
from chimera.util.filenamesequence import FilenameSequence
from chimera.util.sextractor import SExtractor
import pyfits
import numpy as N
try:
have_pywcs = True
import pywcs
except ImportError:
have_pywcs = False
import os
import string
import datetime as dt
import random
import bz2
import gzip
import zipfile
import sys
import shutil
from UserDict import DictMixin
import logging
log = logging.getLogger(__name__)
class WCSNotFoundException (ChimeraException):
pass
class ImageUtil (object):
@staticmethod
def formatDate (datetime):
if type(datetime) == float:
datetime = dt.datetime.fromtimestamp(datetime)
return datetime.strftime("%Y-%m-%dT%H:%M:%S")
@staticmethod
def makeFilename (path='$DATE-$TIME', subs={}, dateFormat="%Y%m%d", timeFormat="%H%M%S"):
"""Helper method to create filenames with increasing sequence number appended.
It can do variable substitution in the given path. Standard
variables are $DATE and $TIME (you can define the format of
these field passint the appropriate format string in
dateFormat and timeFormat, respectively).
Any other variable can be defined passing an subs dictionary
with key as variable name.
@param path: Filename path, with directories, environmnt variables.
@type path: str
@param subs: Dictionary of {VAR=NAME,...} to create aditional
variable substitutions.
@type subs: dict
@param dateFormat: Date format, as used in time.strftime, to
be used by $DATE variable.
@type dateFormat: str
@param timeFormat: Time format, as used in time.strftime, to
be used by $TIME variable.
@type timeFormat: str
@return: Filename.
@rtype: str
"""
localtime = dt.datetime.now()
utctime = dt.datetime.utcnow()
if localtime.hour < 12:
jd_day = localtime - dt.timedelta(days=1)
else:
jd_day = localtime
subs_dict = {"LAST_NOON_DATE": jd_day.strftime(dateFormat),
"DATE" : utctime.strftime(dateFormat),
"TIME" : utctime.strftime(timeFormat)}
# add any user-specific keywords
subs_dict.update(subs)
dirname, filename = os.path.split(path)
dirname = os.path.expanduser(dirname)
dirname = os.path.expandvars(dirname)
dirname = os.path.realpath(dirname)
basename, ext = os.path.splitext(filename)
if not ext:
ext = "fits"
else:
# remove first dot
ext = ext[1:]
# make substitutions
dirname = string.Template(dirname).safe_substitute(subs_dict)
basename = string.Template(basename).safe_substitute(subs_dict)
ext = string.Template(ext).safe_substitute(subs_dict)
fullpath = os.path.join(dirname, basename)
seq_num = FilenameSequence(fullpath, extension=ext).next()
finalname = os.path.join(dirname, "%s-%04d%s%s" % (basename, seq_num, os.path.extsep, ext))
if not os.path.exist | s(dirname):
os.makedirs(dirname)
if not os.path.isdir(dirname):
raise OSError("A file with the same name as the desired directory already exists. ('%s')" % dirname)
if os.path.exists(finalname):
finalname = os.path.join(dirname, "%s-% | 04d%s%s" % (filename, int (random.random()*1000), os.path.extsep, ext))
return finalname
class Image (DictMixin, RemoteObject):
"""
Class to manipulate FITS images with a Pythonic taste.
The underlying framework comes from the very good PyFITS library
with some PyWCS stuff to get WCS info (which as matter of fact use
WCSlib from Mark Calabretta). In addition, we use a wrapper to
E. Bertin SExctractor's written by Laurent Le Guillou. Thank you all guys.
Besides image functions, this class acts like a dictionary of FITS
headers. Use it just like any Python dict.
This class currently support only single extension IMAGE FITS
files.
"""
@staticmethod
def fromFile(filename, fix=True):
fd = pyfits.open(filename, mode="update")
img = Image(filename, fd)
if fix:
img.fix()
return img
@staticmethod
def create (data, imageRequest=None, filename=None):
if imageRequest:
try:
filename = imageRequest["filename"]
except KeyError:
if not filename:
raise TypeError("Invalid filename, you must pass filename=something"
"or a valid ImageRequest object")
filename = ImageUtil.makeFilename(filename)
hdu = pyfits.PrimaryHDU(data)
headers = [("DATE", ImageUtil.formatDate(dt.datetime.utcnow()), "date of file creation"),
("CREATOR", _chimera_name_, _chimera_long_description_)]
#TODO: Implement BITPIX support
hdu.scale('int16', '', bzero=32768, bscale=1)
if imageRequest:
headers += imageRequest.headers
for header in headers:
try:
hdu.header.update(*header)
except Exception, e:
log.warning("Couldn't add %s: %s" % (str(header), str(e)))
hduList = pyfits.HDUList([hdu])
hduList.writeto(filename)
hduList.close()
del hduList
del hdu
return Image.fromFile(filename)
#
# standard constructor
#
def __init__ (self, filename, fd):
RemoteObject.__init__(self)
self._fd = fd
self._filename = filename
self._http = None
self._wcs = None
filename = lambda self: self._filename
def compressedFilename(self):
if os.path.exists(self._filename+".bz2"):
return self._filename+".bz2"
elif os.path.exists(self._filename+".gzip"):
return self._filename+".gzip"
elif os.path.exists(self._filename+".zip"):
return self._filename+".zip"
else:
return self._filename
def http (self, http=None):
if http:
self._http = http
return self._http
def __str__ (self):
return "<Image %s>" % self.filename()
#
# serialization support
# we close before pickle and reopen after it
#
def __getstate__ (self):
self._fd.close()
return self.__dict__
def __setstate__ (self, args):
self.__dict__ = args
self._fd = pyfits.open(self._filename, mode="update")
#
# geometry
#
width = lambda self: self["NAXIS1"]
height = lambda self: self["NAXIS2"]
size = lambda self: (self.width(), self.height())
center = lambda self: (self.width()/2.0, self.height()/2.0)
#
# WCS
#
def pixelAt (self, *world):
if not self._findWCS():
return (0,0)
pixel = self._valueAt(self._wcs.wcs_sky2pix_fits, *world)
# round pixel to avoid large decimal numbers and get out strange -0
pixel = list(round(p, 6) for p in pixel)
if pixel[0] == (-0.0):
pixel[0] = 0.0
if pixel[1] == (-0.0):
pixel[1] = 0.0
return tuple(pixel)
def worldAt (self, *pixel):
if not self._findWCS():
return Position.fromRaDec(0,0)
world = self._valueAt(self._wcs.wcs_pix2sky_fits, *pixel)
return Position.fromRaDec(Coord.fromD(world[0]), Coord.fromD(world[1]))
def _findWCS (self):
if not have_pywcs: return False
if not |
vaibhavg2896/PythonSimpleGUI | Rock-paper-scissor-lizard-Spock.py | Python | gpl-3.0 | 2,546 | 0.022388 | ##########################################################
#07:10 PM
#Thursday, June 11, 2016 (GMT+5:30)
#@ author : VAIBHAV GUPTA(15454)
#########################################################
# Rock-paper-scissors-lizard-Spock template
import simplegui
# helper functions
def name_to_number(name):
# delete the following pass statemen | t and fill in your code below
if name=="rock":
return 0
elif name=="Spock":
return 1
elif name=="paper":
return 2
elif name=="lizard":
return 3
elif name=="scissors":
return 4
else:
return None
# convert name to number using if/elif/else
# don't forget to return the result!
|
def number_to_name(number):
# delete the following pass statement and fill in your code below
if number==0:
return "rock"
elif number==1:
return "Spock"
elif number==2:
return "paper"
elif number==3:
return "lizard"
elif number==4:
return "scissors"
else: return None
# convert number to a name using if/elif/else
# don't forget to return the result!
def rpsls(player_choice):
import random
# delete the following pass statement and fill in your code below
# print a blank line to separate consecutive games
print ""
# print out the message for the player's choice
print "Player chooses",player_choice
# convert the player's choice to player_number using the function name_to_number()
player_number=name_to_number(player_choice)
# compute random guess for comp_number using random.randrange()
comp_number=random.randrange(0,5)
# convert comp_number to comp_choice using the function number_to_name()
comp_choice=number_to_name(comp_number)
# print out the message for computer's choice
print "Computer chooses",comp_choice
# compute difference of comp_number and player_number modulo five
diff=(comp_number-player_number)%5
# use if/elif/else to determine winner, print winner message
if diff<=2 and diff>0:
print "Computer wins!"
elif diff<=4 and diff >2:
print "Player wins!"
else:print "Error"
# test your code - THESE CALLS MUST BE PRESENT IN YOUR SUBMITTED CODE
# always remember to check your completed program against the grading rubric
frame=simplegui.create_frame('Game',300,300)
frame.start()
frame.add_label('"rock", "Spock", "paper", "lizard", "scissors"')
frame.add_input('Enter your choice',rpsls,100)
|
rymcimcim/django-foosball | game/migrations/0001_initial.py | Python | mit | 2,887 | 0.003117 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-01 18:03
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Match',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serializ | e=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('winner_score', models.PositiveSmallIntegerField()),
('looser_score', models.PositiveSmallIntegerField()),
('ball', models.CharField(choices=[('wolna', 'wolna'), ('szybka', 'szybka')], default='wolna', max_length=10)),
| ('added_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='MatchSet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('winner_points', models.PositiveSmallIntegerField()),
('looser_points', models.PositiveSmallIntegerField()),
('match', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Match')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Team',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('players', models.ManyToManyField(to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='match',
name='looser_team',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lost_match', to='game.Team'),
),
migrations.AddField(
model_name='match',
name='winner_team',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='won_match', to='game.Team'),
),
]
|
google/mediapipe | mediapipe/python/image_test.py | Python | apache-2.0 | 7,872 | 0.003684 | # Copyright 2021 The MediaPipe Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for mediapipe.python._framework_bindings.image."""
import gc
import random
import sys
from absl.testing import absltest
import cv2
import mediapipe as mp
import numpy as np
import PIL.Image
# TODO: Add unit tests specifically for memory management.
class ImageTest(absltest.TestCase):
def test_create_image_from_gray_cv_mat(self):
w, h = random.randrange(3, 100), random.randrange(3, 100)
mat = cv2.cvtColor(
np.random.randint(2**8 - 1, size=(h, w, 3), dtype=np.uint8),
cv2.COLOR_RGB2GRAY)
mat[2, 2] = 42
image = mp.Image(image_format=mp.ImageFormat. | GRAY8, data=mat)
self.assertTrue(np.array_equal(mat, image.numpy_view()))
with self.assertRaisesRegex(IndexError, 'index dimen | sion mismatch'):
print(image[w, h, 1])
with self.assertRaisesRegex(IndexError, 'out of bounds'):
print(image[w, h])
self.assertEqual(42, image[2, 2])
def test_create_image_from_rgb_cv_mat(self):
w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 3
mat = cv2.cvtColor(
np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8),
cv2.COLOR_RGB2BGR)
mat[2, 2, 1] = 42
image = mp.Image(image_format=mp.ImageFormat.SRGB, data=mat)
self.assertTrue(np.array_equal(mat, image.numpy_view()))
with self.assertRaisesRegex(IndexError, 'out of bounds'):
print(image[w, h, channels])
self.assertEqual(42, image[2, 2, 1])
def test_create_image_from_rgb48_cv_mat(self):
w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 3
mat = cv2.cvtColor(
np.random.randint(2**16 - 1, size=(h, w, channels), dtype=np.uint16),
cv2.COLOR_RGB2BGR)
mat[2, 2, 1] = 42
image = mp.Image(image_format=mp.ImageFormat.SRGB48, data=mat)
self.assertTrue(np.array_equal(mat, image.numpy_view()))
with self.assertRaisesRegex(IndexError, 'out of bounds'):
print(image[w, h, channels])
self.assertEqual(42, image[2, 2, 1])
def test_create_image_from_gray_pil_image(self):
w, h = random.randrange(3, 100), random.randrange(3, 100)
img = PIL.Image.fromarray(
np.random.randint(2**8 - 1, size=(h, w), dtype=np.uint8), 'L')
image = mp.Image(image_format=mp.ImageFormat.GRAY8, data=np.asarray(img))
self.assertTrue(np.array_equal(np.asarray(img), image.numpy_view()))
with self.assertRaisesRegex(IndexError, 'index dimension mismatch'):
print(image[w, h, 1])
with self.assertRaisesRegex(IndexError, 'out of bounds'):
print(image[w, h])
def test_create_image_from_rgb_pil_image(self):
w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 3
img = PIL.Image.fromarray(
np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8),
'RGB')
image = mp.Image(image_format=mp.ImageFormat.SRGB, data=np.asarray(img))
self.assertTrue(np.array_equal(np.asarray(img), image.numpy_view()))
with self.assertRaisesRegex(IndexError, 'out of bounds'):
print(image[w, h, channels])
def test_create_image_from_rgba64_pil_image(self):
w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 4
img = PIL.Image.fromarray(
np.random.randint(2**16 - 1, size=(h, w, channels), dtype=np.uint16),
'RGBA')
image = mp.Image(
image_format=mp.ImageFormat.SRGBA64,
data=np.asarray(img).astype(np.uint16))
self.assertTrue(np.array_equal(np.asarray(img), image.numpy_view()))
with self.assertRaisesRegex(IndexError, 'out of bounds'):
print(image[1000, 1000, 1000])
def test_image_numby_view(self):
w, h, channels = random.randrange(3, 100), random.randrange(3, 100), 3
mat = cv2.cvtColor(
np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8),
cv2.COLOR_RGB2BGR)
image = mp.Image(image_format=mp.ImageFormat.SRGB, data=mat)
output_ndarray = image.numpy_view()
self.assertTrue(np.array_equal(mat, image.numpy_view()))
# The output of numpy_view() is a reference to the internal data and it's
# unwritable after creation.
with self.assertRaisesRegex(ValueError,
'assignment destination is read-only'):
output_ndarray[0, 0, 0] = 0
copied_ndarray = np.copy(output_ndarray)
copied_ndarray[0, 0, 0] = 0
def test_cropped_gray8_image(self):
w, h = random.randrange(20, 100), random.randrange(20, 100)
channels, offset = 3, 10
mat = cv2.cvtColor(
np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8),
cv2.COLOR_RGB2GRAY)
image = mp.Image(
image_format=mp.ImageFormat.GRAY8,
data=np.ascontiguousarray(mat[offset:-offset, offset:-offset]))
self.assertTrue(
np.array_equal(mat[offset:-offset, offset:-offset], image.numpy_view()))
def test_cropped_rgb_image(self):
w, h = random.randrange(20, 100), random.randrange(20, 100)
channels, offset = 3, 10
mat = cv2.cvtColor(
np.random.randint(2**8 - 1, size=(h, w, channels), dtype=np.uint8),
cv2.COLOR_RGB2BGR)
image = mp.Image(
image_format=mp.ImageFormat.SRGB,
data=np.ascontiguousarray(mat[offset:-offset, offset:-offset, :]))
self.assertTrue(
np.array_equal(mat[offset:-offset, offset:-offset, :],
image.numpy_view()))
# For image frames that store contiguous data, the output of numpy_view()
# points to the pixel data of the original image frame object. The life cycle
# of the data array should tie to the image frame object.
def test_image_numpy_view_with_contiguous_data(self):
w, h = 640, 480
mat = np.random.randint(2**8 - 1, size=(h, w, 3), dtype=np.uint8)
image = mp.Image(image_format=mp.ImageFormat.SRGB, data=mat)
self.assertTrue(image.is_contiguous())
initial_ref_count = sys.getrefcount(image)
self.assertTrue(np.array_equal(mat, image.numpy_view()))
# Get 2 data array objects and verify that the image frame's ref count is
# increased by 2.
np_view = image.numpy_view()
self.assertEqual(sys.getrefcount(image), initial_ref_count + 1)
np_view2 = image.numpy_view()
self.assertEqual(sys.getrefcount(image), initial_ref_count + 2)
del np_view
del np_view2
gc.collect()
# After the two data array objects getting destroyed, the current ref count
# should euqal to the initial ref count.
self.assertEqual(sys.getrefcount(image), initial_ref_count)
# For image frames that store non contiguous data, the output of numpy_view()
# stores a copy of the pixel data of the image frame object. The life cycle of
# the data array doesn't tie to the image frame object.
def test_image_numpy_view_with_non_contiguous_data(self):
w, h = 641, 481
mat = np.random.randint(2**8 - 1, size=(h, w, 3), dtype=np.uint8)
image = mp.Image(image_format=mp.ImageFormat.SRGB, data=mat)
self.assertFalse(image.is_contiguous())
initial_ref_count = sys.getrefcount(image)
self.assertTrue(np.array_equal(mat, image.numpy_view()))
np_view = image.numpy_view()
self.assertEqual(sys.getrefcount(image), initial_ref_count)
del np_view
gc.collect()
self.assertEqual(sys.getrefcount(image), initial_ref_count)
if __name__ == '__main__':
absltest.main()
|
abramhindle/UnnaturalCodeFork | python/testdata/launchpad/lib/lp/services/job/tests/test_runner.py | Python | agpl-3.0 | 23,096 | 0.000346 | # Copyright 2009 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests for job-running facilities."""
import logging
import re
import sys
from textwrap import dedent
from time import sleep
from lazr.jobrunner.jobrunner import (
LeaseHeld,
SuspendJobException,
)
from testtools.matchers import MatchesRegex
from testtools.testcase import ExpectedException
import transaction
from zope.interface import implements
from lp.services.config import config
from lp.services.features.testing import FeatureFixture
from lp.services.job.interfaces.job import (
IRunnableJob,
JobStatus,
)
from lp.services.job.model.job import Job
from lp.services.job.runner import (
BaseRunnableJob,
celery_enabled,
JobRunner,
TwistedJobRunner,
)
from lp.services.log.logger import BufferLogger
from lp.services.webapp import errorlog
from lp.testing import (
TestCaseWithFactory,
ZopeTestInSubProcess,
)
from lp.testing.fakemethod import FakeMethod
from lp.testing.layers import LaunchpadZopelessLayer
from lp.testing.mail_helpers import pop_notifications
class NullJob(BaseRunnableJob):
"""A job that does nothing but append a string to a list."""
implements(IRunnableJob)
JOB_COMPLETIONS = []
def __init__(self, completion_message, oops_recipients=None,
error_recipients=None):
self.message = completion_message
self.job = Job()
self.oops_recipients = oops_recipients
if self.oops_recipients is None:
self.oops_recipients = []
self.error_recipients = error_recipients
if self.error_recipients is None:
self.error_recipients = []
def run(self):
NullJob.JOB_COMPLETIONS.append(self.message)
def getOopsRecipients(self):
return self.oops_recipients
def getOopsVars(self):
return [('foo', 'bar')]
def getErrorRecipients(self):
return self.error_recipients
def getOperationDescription(self):
return 'appending a string to a list'
class RaisingJobException(Exception):
"""Raised by the RaisingJob when run."""
class RaisingJob(NullJob):
"""A job that raises when it runs."""
def run(self):
raise RaisingJobException(self.message)
class RaisingJobUserError(NullJob):
"""A job that raises a user error when it runs."""
user_error_types = (RaisingJobException, )
def run(self):
raise RaisingJobException(self.message)
class RaisingJobRaisingNotifyOops(NullJob):
"""A job that raises when it runs, and when calling notifyOops."""
def run(self):
raise RaisingJobException(self.message)
def notifyOops(self, oops):
raise RaisingJobException('oops notifying oops')
class RaisingJobRaisingNotifyUserError(NullJob):
"""A job that raises when it runs, and when notifying user errors."""
user_error_types = (RaisingJobException, )
def run(self):
raise RaisingJobException(self.message)
def notifyUserError(self, error):
raise RaisingJobException('oops notifying users')
class RetryError(Exception):
pass
class RaisingRetryJob(NullJob):
retry_error_types = (RetryError,)
max_retries = 1
def run(self):
raise RetryError()
class TestJobRunner(TestCaseWithFactory):
"""Ensure JobRunner behaves as expected."""
layer = LaunchpadZopelessLayer
def makeTwoJobs(self):
"""Test fixture. Create two jobs."""
return NullJob("job 1"), NullJob("job 2")
def test_runJob(self):
"""Ensure status is set to completed when a job runs to completion."""
job_1, job_2 = self.makeTwoJobs()
runner = JobRunner(job_1)
runner.runJob(job_1, None)
self.assertEqual(JobStatus.COMPLETED, job_1.job.status)
self.assertEqual([job_1], runner.completed_jobs)
def test_runAll(self):
"""Ensure runAll works in the normal case."""
job_1, job_2 = self.makeTwoJobs()
runner = JobRunner([job_1, job_2])
runner.runAll()
self.assertEqual(JobStatus.COMPLETED, job_1.job.status)
self.assertEqual(JobStatus.COMPLETED, job_2.job.status)
msg1 = NullJob.JOB_COMPLETIONS.pop()
msg2 = NullJob.JOB_COMPLETIONS.pop()
self.assertEqual(msg1, "job 2")
self.assertEqual(msg2, "job 1")
self.assertEqual([job_1, job_2], runner.completed_jobs)
def test_runAll_skips_lease_failures(self):
"""Ensure runAll skips jobs whose leases can't be acquired."""
job_1, job_2 = self.makeTwoJobs()
job_2.job.acquireLease()
runner = JobRunner([job_1, job_2])
runner.runAll()
self.assertEqual(JobStatus.COMPLETED, job_1.job.status)
self.assertEqual(JobStatus.WAITING, job_2.job.status)
self.assertEqual([job_1], runner.completed_jobs)
self.assertEqual([job_2], runner.incomplete_jobs)
self.assertEqual([], self.oopses)
def test_runAll_reports_oopses(self):
"""When an error is encountered, report an oops and continue."""
job_1, job_2 = self.makeTwoJobs()
def raiseError():
# Ensure that jobs which call transaction.abort work, too.
transaction.abort()
raise Exception('Fake exception. Foobar, I say!')
job_1.run = raiseError
runner = JobRunner([job_1, job_2])
runner.runAll()
self.assertEqual([], pop_notifications())
self.assertEqual([job_2], runner.completed_jobs)
self.assertEqual([job_1], runner.incomplete_jobs)
self.assertEqual(JobStatus.FAILED, job_1.job.status)
self.assertEqual(JobStatus.COMPLETE | D, job_2.job.status)
oops = self.oopses[-1]
self.assertIn('Fake exception. Foobar, I say!', oops['tb_text'])
self | .assertEqual(["{'foo': 'bar'}"], oops['req_vars'].values())
def test_oops_messages_used_when_handling(self):
"""Oops messages should appear even when exceptions are handled."""
job_1, job_2 = self.makeTwoJobs()
def handleError():
reporter = errorlog.globalErrorUtility
try:
raise ValueError('Fake exception. Foobar, I say!')
except ValueError:
reporter.raising(sys.exc_info())
job_1.run = handleError
runner = JobRunner([job_1, job_2])
runner.runAll()
oops = self.oopses[-1]
self.assertEqual(["{'foo': 'bar'}"], oops['req_vars'].values())
def test_runAll_aborts_transaction_on_error(self):
"""runAll should abort the transaction on oops."""
class DBAlterJob(NullJob):
def __init__(self):
super(DBAlterJob, self).__init__('')
def run(self):
self.job.log = 'hello'
raise ValueError
job = DBAlterJob()
runner = JobRunner([job])
runner.runAll()
# If the transaction was committed, job.log == 'hello'. If it was
# aborted, it is None.
self.assertIs(None, job.job.log)
def test_runAll_mails_oopses(self):
"""Email interested parties about OOPses."""
job_1, job_2 = self.makeTwoJobs()
def raiseError():
# Ensure that jobs which call transaction.abort work, too.
transaction.abort()
raise Exception('Fake exception. Foobar, I say!')
job_1.run = raiseError
job_1.oops_recipients = ['jrandom@example.org']
runner = JobRunner([job_1, job_2])
runner.runAll()
(notification,) = pop_notifications()
oops = self.oopses[-1]
self.assertIn(
'Launchpad encountered an internal error during the following'
' operation: appending a string to a list. It was logged with id'
' %s. Sorry for the inconvenience.' % oops['id'],
notification.get_payload(decode=True))
self.assertNotIn('Fake exception. Foobar, I say!',
notification.get_payload(decode=True))
self.assertEqual('Launchpad internal error', notification['subj |
QinerTech/QinerApps | openerp/addons/account/models/account_move.py | Python | gpl-3.0 | 68,633 | 0.005129 | # -*- coding: utf-8 -*-
import time
from openerp import api, fields, models, _
fr | om openerp.osv import expression
from openerp.exceptions import RedirectWarning, UserError
from openerp.tools.misc import formatLang
from openerp.tools import float_is_zero
from openerp.tools.safe_eval import safe_eval
#----------------------------------------------------------
# Entries
#----------------------------------------------------------
class AccountMove(models.Model):
_name = "account.move"
_description = "Account Entry"
_order = 'date desc, id desc'
@api.multi
@api. | depends('name', 'state')
def name_get(self):
result = []
for move in self:
if move.state == 'draft':
name = '* ' + str(move.id)
else:
name = move.name
result.append((move.id, name))
return result
@api.multi
@api.depends('line_ids.debit', 'line_ids.credit')
def _amount_compute(self):
for move in self:
total = 0.0
for line in move.line_ids:
total += line.debit
move.amount = total
@api.depends('line_ids.debit', 'line_ids.credit', 'line_ids.matched_debit_ids.amount', 'line_ids.matched_credit_ids.amount', 'line_ids.account_id.user_type_id.type')
def _compute_matched_percentage(self):
"""Compute the percentage to apply for cash basis method. This value is relevant only for moves that
involve journal items on receivable or payable accounts.
"""
for move in self:
total_amount = 0.0
total_reconciled = 0.0
for line in move.line_ids:
if line.account_id.user_type_id.type in ('receivable', 'payable'):
amount = abs(line.debit - line.credit)
total_amount += amount
for partial_line in (line.matched_debit_ids + line.matched_credit_ids):
total_reconciled += partial_line.amount
if total_amount == 0.0:
move.matched_percentage = 1.0
else:
move.matched_percentage = total_reconciled / total_amount
@api.one
@api.depends('company_id')
def _compute_currency(self):
self.currency_id = self.company_id.currency_id or self.env.user.company_id.currency_id
@api.multi
def _get_default_journal(self):
if self.env.context.get('default_journal_type'):
return self.env['account.journal'].search([('type', '=', self.env.context['default_journal_type'])], limit=1).id
name = fields.Char(string='Number', required=True, copy=False, default='/')
ref = fields.Char(string='Reference', copy=False)
date = fields.Date(required=True, states={'posted': [('readonly', True)]}, index=True, default=fields.Date.context_today)
journal_id = fields.Many2one('account.journal', string='Journal', required=True, states={'posted': [('readonly', True)]}, default=_get_default_journal)
currency_id = fields.Many2one('res.currency', compute='_compute_currency', store=True, string="Currency")
rate_diff_partial_rec_id = fields.Many2one('account.partial.reconcile', string='Exchange Rate Entry of', help="Technical field used to keep track of the origin of journal entries created in case of fluctuation of the currency exchange rate. This is needed when cancelling the source: it will post the inverse journal entry to cancel that part too.")
state = fields.Selection([('draft', 'Unposted'), ('posted', 'Posted')], string='Status',
required=True, readonly=True, copy=False, default='draft',
help='All manually created new journal entries are usually in the status \'Unposted\', '
'but you can set the option to skip that status on the related journal. '
'In that case, they will behave as journal entries automatically created by the '
'system on document validation (invoices, bank statements...) and will be created '
'in \'Posted\' status.')
line_ids = fields.One2many('account.move.line', 'move_id', string='Journal Items',
states={'posted': [('readonly', True)]}, copy=True)
partner_id = fields.Many2one('res.partner', related='line_ids.partner_id', string="Partner", store=True, states={'posted': [('readonly', True)]})
amount = fields.Monetary(compute='_amount_compute', store=True)
narration = fields.Text(string='Internal Note')
company_id = fields.Many2one('res.company', related='journal_id.company_id', string='Company', store=True, readonly=True,
default=lambda self: self.env.user.company_id)
matched_percentage = fields.Float('Percentage Matched', compute='_compute_matched_percentage', digits=0, store=True, readonly=True, help="Technical field used in cash basis method")
statement_line_id = fields.Many2one('account.bank.statement.line', string='Bank statement line reconciled with this entry', copy=False, readonly=True)
# Dummy Account field to search on account.move by account_id
dummy_account_id = fields.Many2one('account.account', related='line_ids.account_id', string='Account', store=False)
@api.model
def create(self, vals):
move = super(AccountMove, self.with_context(check_move_validity=False)).create(vals)
move.assert_balanced()
return move
@api.multi
def write(self, vals):
if 'line_ids' in vals:
res = super(AccountMove, self.with_context(check_move_validity=False)).write(vals)
self.assert_balanced()
else:
res = super(AccountMove, self).write(vals)
return res
@api.multi
def post(self):
invoice = self._context.get('invoice', False)
self._post_validate()
for move in self:
move.line_ids.create_analytic_lines()
if move.name == '/':
new_name = False
journal = move.journal_id
if invoice and invoice.move_name and invoice.move_name != '/':
new_name = invoice.move_name
else:
if journal.sequence_id:
# If invoice is actually refund and journal has a refund_sequence then use that one or use the regular one
sequence = journal.sequence_id
if invoice and invoice.type in ['out_refund', 'in_refund'] and journal.refund_sequence:
sequence = journal.refund_sequence_id
new_name = sequence.with_context(ir_sequence_date=move.date).next_by_id()
else:
raise UserError(_('Please define a sequence on the journal.'))
if new_name:
move.name = new_name
return self.write({'state': 'posted'})
@api.multi
def button_cancel(self):
for move in self:
if not move.journal_id.update_posted:
raise UserError(_('You cannot modify a posted entry of this journal.\nFirst you should set the journal to allow cancelling entries.'))
if self.ids:
self._cr.execute('UPDATE account_move '\
'SET state=%s '\
'WHERE id IN %s', ('draft', tuple(self.ids),))
self.invalidate_cache()
return True
@api.multi
def unlink(self):
for move in self:
#check the lock date + check if some entries are reconciled
move.line_ids._update_check()
move.line_ids.unlink()
return super(AccountMove, self).unlink()
@api.multi
def _post_validate(self):
for move in self:
if move.line_ids:
if not all([x.company_id.id == move.company_id.id for x in move.line_ids]):
raise UserError(_("Cannot create moves for different companies."))
self.assert_balanced()
return self._check_lock_date()
@api.multi
def _check_lock_date(self):
for move in self:
lock_date = max(move.company_id.period_lock_date, move.company_id.fiscalyear_lock_date)
if self.user_has_groups('account.gro |
Forage/Gramps | gramps/gen/filters/rules/event/_hasreferencecountof.py | Python | gpl-2.0 | 1,730 | 0.00578 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2007 Stephane Charette
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#------------------------------ | -------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.get_translation().gettext
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#----------- | --------------------------------------------------------------
from .._hasreferencecountbase import HasReferenceCountBase
#-------------------------------------------------------------------------
# "Events with a certain reference count"
#-------------------------------------------------------------------------
class HasReferenceCountOf(HasReferenceCountBase):
"""Events with a reference count of <count>"""
name = _('Events with a reference count of <count>')
description = _("Matches events with a certain reference count")
|
tri2sing/LinearAlgebraPython | test_hw7.py | Python | apache-2.0 | 743 | 0.048452 | from hw7 import QR_solve
from mat import coldict2mat
from mat import Mat
from orthonormalization import aug_orthonormalize
from QR import factor
from vec import Vec
from vecutil import list2vec
print('Augmented | Orthonormalize')
L = [list2vec(v) for v in [[4,3,1,2],[8,9,-5,-5],[10,1,-1,5]]]
print(coldict2mat(L))
Qlist, Rlist = aug_orthonormalize(L)
print(coldict2mat(Qlist))
print(coldict2mat(Rlist | ))
print((coldict2mat(Qlist)*coldict2mat(Rlist)))
print('QR Solve')
A=Mat(({'a','b','c'},{'A','B'}), {('a','A'):-1, ('a','B'):2, ('b','A'):5, ('b','B'):3,('c','A'):1, ('c','B'):-2})
print(A)
Q, R = factor(A)
print(Q)
print(R)
b = Vec({'a','b','c'}, {'a':1,'b':-1})
x = QR_solve(A,b)
print(x)
residual = A.transpose()*(b-A*x)
print(residual)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.