repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
j00bar/ansible | lib/ansible/modules/cloud/amazon/ec2_ami_copy.py | Python | gpl-3.0 | 6,886 | 0.00305 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ec2_ami_copy
short_description: copies AMI between AWS regions, return new image id
description:
- Copies AMI from a source region to a destination region. B(Since version 2.3 this module depends on boto3.)
version_added: "2.0"
options:
source_region:
description:
- The source region the AMI should be copied from.
required: true
source_image_id:
description:
- The ID of the AMI in source region that should be copied.
required: true
name:
description:
- The name of the new AMI to copy. (As of 2.3 the default is 'default', in prior versions it was 'null'.)
required: false
default: "default"
description:
description:
- An optional human-readable string describing the contents and purpose of the new AMI.
required: false
default: null
encrypted:
description:
- Whether or not the destination snapshots of the copied AMI should be encrypted.
required: false
default: null
version_added: "2.2"
kms_key_id:
description:
- KMS key id used to encrypt image. If not specified, uses default EBS Customer Master Key (CMK) for your account.
required: false
default: null
version_added: "2.2"
wait:
description:
- Wait for the copied AMI to be in state 'available' before returning.
required: false
default: "no"
choices: [ "yes", "no" ]
wait_timeout:
description:
- How long before wait gives up, in seconds. (As of 2.3 this option is deprecated. See boto3 Waiters)
required: false
default: 1200
tags:
description:
- A hash/dictionary of tags to add to the new copied AMI; '{"key":"value"}' and '{"key":"value","key":"value"}'
required: false
default: null
author: "Amir Moulavi <amir.moulavi@gmail.com>, Tim C <defunct@defunct.io>"
extends_documentation_fragment:
- aws
- ec2
requirements:
- boto3
'''
EXAMPLES = '''
# Basic AMI Copy
- ec2_ami_copy:
source_region: us-east-1
region: eu-west-1
source_image_id: ami-xxxxxxx
# AMI copy wait until available
- ec2_ami_copy:
source_region: us-east-1
region: eu-west-1
source_image_id: ami-xxxxxxx
wait: yes
register: image_id
# Named AMI copy
- ec2_ami_copy:
source_region: us-east-1
region: eu-west-1
source_image_id: ami-xxxxxxx
name: My-Awesome-AMI
description: latest patch
# Tagged AMI copy
- ec2_ami_copy:
source_region: us-east-1
region: eu-west-1
source_image_id: ami-xxxxxxx
tags:
Name: My-Super-AMI
Patch: 1.2.3
# Encrypted AMI copy
- ec2_ami_copy:
source_region: us-east-1
region: eu-west-1
source_image_id: ami-xxxxxxx
encrypted: yes
# Encrypted AMI copy with specified key
- ec2_ami_copy:
source_region: us-east-1
region: eu-west-1
source_image_id: ami-xxxxxxx
encrypted: yes
kms_key_id: arn:aws:kms:us-east-1:XXXXXXXXXXXX:key/746de6ea-50a4-4bcb-8fbc-e3b29f2d367b
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import (boto3_conn, ec2_argument_spec, get_aws_connection_info)
try:
import boto
import boto.ec2
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
try:
import boto3
from botocore.exceptions import ClientError, NoCredentialsError, NoRegionError, WaiterError
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
def copy_image(module, ec2):
"""
Copies an AMI
module : AnsibleModule object
ec2: ec2 connection object
"""
tags = module.params.get('tags')
params = {'SourceRegion': module.params.get('source_region'),
'SourceImageId': module.params.get('source_image_id'),
'Name': module.params.get('name'),
'Description': module.params.get('description'),
'Encrypted': module.params.get('encrypted'),
}
if module.params.get('kms_key_id'):
params['KmsKeyId'] = module.params.get('kms_key_id')
try:
image_id = ec2.copy_image(**params)['ImageId']
if module.params.get('wait'):
ec2.get_waiter('image_available').wait(ImageIds=[image_id])
if module.params.get('tags'):
ec2.create_tags(
Resources=[image_id],
Tags=[{'Key' : k, 'Value': v} for k,v in module.para | ms.get('tags').items()]
)
module.exit_json(changed=True, image_id=image_id)
except WaiterError as we:
module.fail_json(msg='An error occured waiting for the image to become available. (%s)' % we.reason)
except ClientError as ce:
module.fail_json(msg=ce.message)
except NoCredentialsError:
module.fail_json(msg='Unable to authenticate, AWS credentials are invalid.')
except | Exception as e:
module.fail_json(msg='Unhandled exception. (%s)' % str(e))
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
source_region=dict(required=True),
source_image_id=dict(required=True),
name=dict(default='default'),
description=dict(default=''),
encrypted=dict(type='bool', default=False, required=False),
kms_key_id=dict(type='str', required=False),
wait=dict(type='bool', default=False),
wait_timeout=dict(default=1200),
tags=dict(type='dict')))
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
# TODO: Check botocore version
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
if HAS_BOTO3:
try:
ec2 = boto3_conn(module, conn_type='client', resource='ec2', region=region, endpoint=ec2_url,
**aws_connect_params)
except NoRegionError:
module.fail_json(msg='AWS Region is required')
else:
module.fail_json(msg='boto3 required for this module')
copy_image(module, ec2)
if __name__ == '__main__':
main()
|
lucalianas/ProMort | promort/odin/permissions.py | Python | mit | 1,894 | 0.004224 | # Copyright (c) 2019, CRS4
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from rest_framework import permissions
from promort.settings import DEFAULT_GROUPS
class CanEnterGodMode(permissions.BasePermission):
"""
Only specific users that belong to ODIN_MEMBERS group will be allowed
to perf | orm queries using Odin toolkit
"""
RESTRICTE | D_METHODS = ['GET']
def has_permission(self, request, view):
if not (request.user and request.user.is_authenticated()):
return False
else:
if request.method in self.RESTRICTED_METHODS:
if request.user.groups.filter(
name__in=[DEFAULT_GROUPS['odin_members']['name']]
).exists():
return True
else:
return False
else:
return False
|
biothings/biothings_explorer | biothings_explorer/smartapi_kg/filter.py | Python | apache-2.0 | 917 | 0 | from collections import defaultdict
from .config import FILTER_FIELDS
def filterOps(ops, criteria):
"""filter an array of objects based on the filter criteria"""
all_values = defaultdict(set)
filters = {}
for op in ops:
for field in FILTER_FIELDS:
all_values[field].add(op["association"].get(field))
for field in FILTER_FIELDS:
if field not in criteria or criteria.get(field) is None:
filters[field] = all_values[field]
else:
if not isinstance(criteria[field], list):
criteria[field] = [criteria[field]]
filters[field] = se | t(c | riteria[field])
res = []
for op in ops:
match = True
for field in FILTER_FIELDS:
if op["association"][field] not in filters[field]:
match = False
break
if match:
res.append(op)
return res
|
rolando-contrib/scrapy | tests/test_utils_url.py | Python | bsd-3-clause | 19,716 | 0.005985 | # -*- coding: utf-8 -*-
import unittest
import six
from six.moves.urllib.parse import urlparse
from scrapy.spiders import Spider
from scrapy.utils.url import (url_is_from_any_domain, url_is_from_spider,
add_http_if_no_scheme, guess_scheme,
parse_url, strip_url)
__doctests__ = ['scrapy.utils.url']
class UrlUtilsTest(unittest.TestCase):
def test_url_is_from_any_domain(self):
url = 'http://www.wheele-bin-art.co.uk/get/product/123'
self.assertTrue(url_is_from_any_domain(url, ['wheele-bin-art.co.uk']))
self.assertFalse(url_is_from_any_domain(url, ['art.co.uk']))
url = 'http://wheele-bin-art.co.uk/get/product/123'
self.assertTrue(url_is_from_any_domain(url, ['wheele-bin-art.co.uk']))
self.assertFalse(url_is_from_any_domain(url, ['art.co.uk']))
url = 'http://www.Wheele-Bin-Art.co.uk/get/product/123'
self.assertTrue(url_is_from_any_domain(url, ['wheele-bin-art.CO.UK']))
self.assertTrue(url_is_from_any_domain(url, ['WHEELE-BIN-ART.CO.UK']))
url = 'http://192.169.0.15:8080/mypage.html'
self.assertTrue(url_is_from_any_domain(url, ['192.169.0.15:8080']))
self.assertFalse(url_is_from_any_domain(url, ['192.169.0.15']))
url = 'javascript:%20document.orderform_2581_1190810811.mode.value=%27add%27;%20javascript:%20document.orderform_2581_1190810811.submit%28%29'
self.assertFalse(url_is_from_any_domain(url, ['testdomain.com']))
self.assertFalse(url_is_from_any_domain(url+'.testdomain.com', ['testdomain.com']))
def test_url_is_from_spider(self):
spider = Spider(name='example.com')
self.assertTrue(url_is_from_spider('http://www.example.com/some/page.html', spider))
self.assertTrue(url_is_from_spider('http://sub.example.com/some/page.html', spider))
self.assertFalse(url_is_from_spider('http://www.example.org/some/page.html', spider))
self.assertFalse(url_is_from_spider('http://www.example.net/some/page.html', spider))
def test_url_is_from_spider_class_attributes(self):
class MySpider(Spider):
name = 'example.com'
self.assertTrue(url_is_from_spider('http://www.example.com/some/page.html', MySpider))
self.assertTrue(url_is_from_spider('http://sub.example.com/some/page.html', MySpider))
self.assertFalse(url_is_from_spider('http://www.example.org/some/page.html', MySpider))
self.assertFalse(url_is_from_spider('http://www.example.net/some/page.html', MySpider))
def test_url_is_from_spider_with_allowed_domains(self):
spider = Spider(name='example.com', allowed_domains=['example.org', 'example.net'])
self.assertTrue(url_is_from_spider('http://www.example.com/some/page.html', spider))
self.assertTrue(url_is_from_spider('http://sub.example.com/some/page.html', spider))
self.assertTrue(url_is_from_spider('http://example.com/some/page.html', spider))
self.assertTrue(url_is_from_spider('http://www.example.org/some/page.html', spider))
self.assertTrue(url_is_from_spider('http://www.example.net/some/page.html', spider))
self.assertFalse(url_is_from_spider('http://www.example.us/some/page.html', spider))
spider = Spider(name='example.com', allowed_domains=set(('example.com', 'example.net')))
self.assertTrue(url_is_from_spider('http://www.example.com/some/page.html', spider))
spider = Spider(name='example.com', allowed_domains=('example.com', 'example.net'))
self.assertTrue(url_is_from_spider('http://www.example.com/some/page.html', spider))
def test_url_is_from_spider_with_allowed_domains_class_attributes(self):
class MySpider(Spider):
name = 'example.com'
allowed_domains = ('example.org', 'example.net')
self.assertTrue(url_is_from_spider('http://www.example.com/some/page.html', MySpider))
self.assertTrue(url_is_from_spider('http://sub.example.com/some/page.html', MySpider))
self.assertTrue(url_is_from_spider('http://example.com/some/page.html', MySpider))
self.assertTrue(url_is_from_spider('http://www.example.org/some/page.html', MySpider))
self.assertTrue(url_is_from_spider('http://www.example.net/some/page.html', MySpider))
self.assertFalse(url_is_from_spider('http://www.example.us/some/page.html', MySpider))
class AddHttpIfNoScheme(unittest.TestCase):
def test_add_scheme(self):
self.assertEqual(add_http_if_no_scheme('www.example.com'),
'http://www.example.com')
def test_without_subdomain(self):
self.assertEqual(add_http_if_no_scheme('example.com'),
'http://example.com')
def test_path(self):
self.assertEqual(add_http_if_no_scheme('www.example.com/some/page.html'),
'http://www.example.com/some/page.html')
def test_port(self):
self.assertEqual(add_http_if_no_scheme('www.example.com:80'),
'http://www.example.com:80')
def test_fragment(self):
self.assertEqual(add_http_if_no_scheme('www.example.com/some/page#frag'),
'http://www.example.com/some/page#frag')
def test_query(self):
self.assertEqual(add_http_if_no_scheme('www.example.com/do?a=1&b=2&c=3'),
'http://www.example.com/do?a=1&b=2&c=3')
def test_username_password(self):
self.assertEqual(add_http_if_no_scheme('username:password@www.example.com'),
'http://username:password@www.example.com')
def test_complete_url(self):
self.assertEqual(add_http_i | f_no_scheme('username:password@www.example.com:80/some/page/do?a=1&b=2&c=3#frag'),
'http://username:password@www.example.com:80/some/page/do?a=1&b=2&c=3#frag')
def test_preserve_http(self):
self.assertEqual(add_http_if_no_scheme('http://www | .example.com'),
'http://www.example.com')
def test_preserve_http_without_subdomain(self):
self.assertEqual(add_http_if_no_scheme('http://example.com'),
'http://example.com')
def test_preserve_http_path(self):
self.assertEqual(add_http_if_no_scheme('http://www.example.com/some/page.html'),
'http://www.example.com/some/page.html')
def test_preserve_http_port(self):
self.assertEqual(add_http_if_no_scheme('http://www.example.com:80'),
'http://www.example.com:80')
def test_preserve_http_fragment(self):
self.assertEqual(add_http_if_no_scheme('http://www.example.com/some/page#frag'),
'http://www.example.com/some/page#frag')
def test_preserve_http_query(self):
self.assertEqual(add_http_if_no_scheme('http://www.example.com/do?a=1&b=2&c=3'),
'http://www.example.com/do?a=1&b=2&c=3')
def test_preserve_http_username_password(self):
self.assertEqual(add_http_if_no_scheme('http://username:password@www.example.com'),
'http://username:password@www.example.com')
def test_preserve_http_complete_url(self):
self.assertEqual(add_http_if_no_scheme('http://username:password@www.example.com:80/some/page/do?a=1&b=2&c=3#frag'),
'http://username:password@www.example.com:80/some/page/do?a=1&b=2&c=3#frag')
def test_protocol_relative(self):
self.assertEqual(add_http_if_no_scheme('//www.example.com'),
'http://www.example.com')
def test_protocol_relative_without_subdomain(self):
self.assertEqual(add_http_if_no_scheme('//example.com'),
'ht |
OCA/account-analytic | pos_analytic_by_config/models/pos_config.py | Python | agpl-3.0 | 318 | 0 | # | Copyright 2015 ACSONE SA/NV
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import fields, models
class PosConfig(models.Model):
_inherit = "pos.config"
account_analytic_id = fields.Many2one(
comodel_name="account.analytic.account", stri | ng="Analytic Account"
)
|
gurneyalex/connector-magento | __unported__/magentoerpconnect_pricing/__openerp__.py | Python | agpl-3.0 | 1,568 | 0 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Magento Connector - Pricing',
'version': '2.0.0',
'category': 'Connector',
'depends': ['magentoerpconnect',
| ],
'author': "MagentoERPconnect Core Editors,Odoo Community Association (OCA)",
'license': 'AGPL-3',
'website': 'http://www.odoo-magento-connector.com',
'description': """
Magento Connector | - Pricing
===========================
Extension for **Magento Connector**.
The prices of the products are managed in OpenERP using pricelists and
are pushed to Magento.
""",
'images': [],
'demo': [],
'data': ['magento_model_view.xml',
],
'installable': False,
'application': False,
}
|
napjon/moocs_solution | robotics-udacity/4.13 Value Program.py | Python | mit | 2,531 | 0.02015 | # ----------
# User Instructions:
#
# Create a function compute_value() which returns
# a grid of values. Value is defined as the minimum
# number of moves required to get from a cell to the
# goal.
#
# If it is impossible to reach the goal from a cell
# you should assign that cell a value of 99.
# ----------
grid2 = [[0, 1, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
|
[0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0]]
grid = [[0, 0, 0, 0],
[0, 1, 0, 0],
[0, 1, 0, 0]]
init = [0, 0]
goal = [len(grid)-1, len(grid[0])-1]
delta = [[-1, 0 ], # go up
[ 0, -1], # go left
[ 1, 0 ], # go down
[ 0, 1 ]] # go right
delta_name = ['^', '<', 'v', '>']
cost_step = 1 # the cost associated with moving from a cell to an adjacent one.
# ----------------------------------------
# insert code below
# | ----------------------------------------
def compute_value():
value = [[99 for row in range(len(grid[0]))] for col in range(len(grid))]
change = True
#This function will keep running if there's "change" inside the code
while change:
change = False#default to false
for x in range(len(grid)):#for every i and j, not very efficient,
for y in range(len(grid[0])):#But certainly get the job done
if goal[0] == x and goal[1] == y:#Check if the goal is same as x,y coordinate
if value[x][y] > 0:#check if it's greater than zero
value[x][y] = 0#If it does, set it to zero
change = True
elif grid[x][y] == 0:#else not goal and the grid coordinate is zero
for a in range(len(delta)):
x2 = x + delta[a][0]
y2 = y + delta[a][1]
if x2 >= 0 and x2 < len(grid) and y2 >=0 and y2< len(grid[0]):#if it's legitimate state, that is inside the grid
v2 = value[x2][y2] + cost_step #make a value from neighbour + cost_step
if v2 < value[x][y]:#if neighbours value is smaller than its value
change = True#change it
value[x][y] = v2#change the value to its neighbour value
for i in range(len(value)):
print value[i]
return value #make sure your function returns a grid of values as demonstrated in the previous video.
compute_value()
|
jakeogh/anormbookmarker | anormbookmarker/test/tests/Word/end_letters_swapped.py | Python | mit | 939 | 0 | #!/usr/bin/env python3
from anormbookmarker.test.test_enviroment import *
with self_contained_session(CONFIG.database_timestamp) as session:
BASE.metadata.create_all(session.bind)
buffalo = Word.construct(session=session, word='Buffalo')
session.commit()
buffalo_swap = Word.construct(session=session, word='ouffalB')
session.commit()
db_result = [('select COUNT(*) from alias;', 0),
| ('select COUNT(*) from aliasword;', 0),
('select COUNT(*) from bookmark;', 0),
('select COUNT(*) from filename;', 0),
('select COUNT(*) f | rom tag;', 0),
('select COUNT(*) from tag_relationship;', 0),
('select COUNT(*) from tagbookmarks;', 0),
('select COUNT(*) from tagword;', 0),
('select COUNT(*) from word;', 2),
('select COUNT(*) from wordmisspelling;', 0)]
check_db_result(config=CONFIG, db_result=db_result)
|
troika-framework/troika-http | setup.py | Python | bsd-3-clause | 2,147 | 0.000466 | #!/usr/bin/env python
import codecs
import setuptools
from troika.http import version
def read_requirements_file(name):
reqs = []
try:
with open(name, 'r') as req_file:
for line in req_file:
if line.startswith('-r'):
continue
elif '#' in line:
line = line[0:line.index('#')]
line = line.strip()
if line:
reqs.append(line)
except IOError:
pass
return reqs
with codecs.open('README.rst', 'rb', encoding='utf-8') as file_obj:
long_description = '\n' + file_obj.read()
setuptools.setup(
name='troika-http',
version=version.__version__,
description=('A Python 3 AsyncIO HTTP Application Framework inspired by '
'Tornado'),
long_description=long_description,
author='Gavin M. Roy',
author_email='gavinmroy@gmail.com',
url='http://github.com/gmr/troika',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Communications', 'Topic :: Internet',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries',
'Topic :: Software Develop | ment :: Libraries :: Application Frameworks'
],
include_package_data=True,
install_requires=read_requirements_file('requires/installation.txt'),
extras_require={'msgpack': ['u-msgpack-python'], 'yaml': ['pyyaml']},
license='BSD',
namespace_packages=['troika'],
py_modules=['troika.http'],
package_data={'': ['LICENSE', 'README.rst']},
tests_require=read | _requirements_file('requires/testing.txt'),
test_suite='nose.collector',
zip_safe=True)
|
phenoxim/cinder | cinder/volume/drivers/coprhd/helpers/urihelper.py | Python | apache-2.0 | 2,759 | 0.000362 | # Copyright (c) 2016 EMC Corporati | on
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwa | re
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class URIHelper(object):
"""This map will be a map of maps.
e.g for project component type, it will hold a map
of its operations vs their uris
"""
COMPONENT_TYPE_VS_URIS_MAP = dict()
"""Volume URIs."""
VOLUME_URIS_MAP = dict()
URI_VOLUMES = '/block/volumes'
URI_VOLUME = URI_VOLUMES + '/{0}'
URI_VOLUME_TASK_LIST = URI_VOLUME + '/tasks'
URI_VOLUME_TASK = URI_VOLUME_TASK_LIST + '/{1}'
"""Consistencygroup URIs."""
CG_URIS_MAP = dict()
URI_CGS = '/block/consistency-groups'
URI_CG = URI_CGS + '/{0}'
URI_CG_TASK_LIST = URI_CG + '/tasks'
URI_CG_TASK = URI_CG_TASK_LIST + '/{1}'
"""Export Group URIs."""
# Map to hold all export group uris
EXPORT_GROUP_URIS_MAP = dict()
URI_EXPORT_GROUP_TASKS_LIST = '/block/exports/{0}/tasks'
URI_EXPORT_GROUP_TASK = URI_EXPORT_GROUP_TASKS_LIST + '/{1}'
def __init__(self):
"""During initialization of the class, lets fill all the maps."""
self.__fillExportGroupMap()
self.__fillVolumeMap()
self.__fillConsistencyGroupMap()
self.__initializeComponentVsUriMap()
def __call__(self):
return self
def __initializeComponentVsUriMap(self):
self.COMPONENT_TYPE_VS_URIS_MAP["export"] = self.EXPORT_GROUP_URIS_MAP
self.COMPONENT_TYPE_VS_URIS_MAP[
"volume"] = self.VOLUME_URIS_MAP
self.COMPONENT_TYPE_VS_URIS_MAP[
"consistencygroup"] = self.CG_URIS_MAP
def __fillExportGroupMap(self):
self.EXPORT_GROUP_URIS_MAP["task"] = self.URI_EXPORT_GROUP_TASK
def __fillVolumeMap(self):
self.VOLUME_URIS_MAP["task"] = self.URI_VOLUME_TASK
def __fillConsistencyGroupMap(self):
self.CG_URIS_MAP["task"] = self.URI_CG_TASK
def getUri(self, componentType, operationType):
return (
self.COMPONENT_TYPE_VS_URIS_MAP.get(
componentType).get(
operationType)
)
"""Defining the singleton instance.
Use this instance any time the access is required for this module/class
"""
singletonURIHelperInstance = URIHelper()
|
BatOnBots-ML/batonbots-ml | BatOnBots-ML.py | Python | gpl-3.0 | 22,017 | 0.00801 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright (C) 2013 BatOnBots-ML.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Pre Alpha 0.1
"""
import pygtk
pygtk.require('2.0')
import re
from time import sleep
# Estes estão aqui misturados porque dá erro se o gui for importado depois do pygame!
# ISTO ACONTECE QUANDO INSTALEI O PYGTK ALL IN ONE NO WINDOWS
from ui.gui import *
import pygame
from auth.cauth_mod import CAuth
from interface.ServerInterface import ServerInterface
from log.logging_mod import Logging
log = Logging()
printd = log.debug
printi = log.info
printe = log.error
printw = log.warning
#
gtk.gdk.threads_init()
# Indica se o jogador esta ou não autenticado
autenticated = False
# Nome de utilizador com que o jogador se autenticou
playerName = ""
#
playersList = []
# Intervalo em que é feito o keepalive. Em segundos.
TIME | R = 5
# ID do timer do keepalive
timerID = -1
# Instância da classe Interface
interface | = ServerInterface()
#
EOL = "\r\n"
def auth_callback(value):
"""
Callback utilizada para a autenticação dos jogadores.
Quando um jogador termina a autenticação com ou sem sucesso, esta função é invocada.
"""
global autenticated
global playerName
global interface
global TIMER
global timerID
if (value != False):
autenticated = True
playerName = value
# Actualiza o estado a barra de estado
gobject.idle_add(gui.set_status_bar, 'Ligado! Sessão Iniciada Como ' + str(playerName))
# Depois de estar autenticado, o jogador já pode pedir a lista das batalhas.
gobject.idle_add(gui.buttonRefresh.set_sensitive, True)
gobject.idle_add(gui.buttonCreate.set_sensitive, True)
gobject.idle_add(gui.menuBarLogin.set_sensitive, True)
# Por razões de compatibilidade com o python 2.6 é utilizado o metodo 'get_child()'
gobject.idle_add(gui.menuBarLogin.get_child().set_label, 'Terminar Sessão')
# Inicia o keepalive
timerID = gobject.timeout_add_seconds( TIMER, interface.keepalive )
return
interface.shutdown()
gobject.idle_add(gui.menuBarLogin.set_sensitive, True)
gobject.idle_add(gui.set_status_bar, 'Desconectado...')
def exit_callback():
global interface
global exit
# Indica que é mesmo para terminar o programa
exit = 1
interface.shutdown()
gtk.main_quit()
def login_callback(widget=None):
"""
Callback para o botão de login.
"""
global interface
global gui
result = interface.create_sockets()
# Caso não tenha criado os sockets
if (result != 0):
gobject.idle_add(gui.set_status_bar, 'Desconectado...')
gobject.idle_add(gui.menuBarLogin.set_sensitive, True)
return
# Actualiza o estado a barra de estado
gobject.idle_add(gui.set_status_bar, 'A Conectar ao Servidor...')
# Conecta ao servidor
result = interface.conn_to_srv()
if (result != 0):
interface.shutdown()
# Actualiza o estado a barra de estado
gobject.idle_add(gui.set_status_bar, 'Desconectado...')
gobject.idle_add(gui.menuBarLogin.set_sensitive, True)
else:
# Actualiza o estado da barra de estado
gobject.idle_add(gui.set_status_bar, 'Conectado ao Servidor! Inserir Credenciais...')
# Client Authetication Module
cam = CAuth(interface.interfaceSock, auth_callback)
gobject.idle_add(cam.start)
def logout_callback(widget=None):
global interface
global gui
global playerName
playerName = ""
# Actualiza o estado a barra de estado
gobject.idle_add(gui.set_status_bar, 'Desconectado...')
gobject.idle_add(gui.menuBarLogin.set_sensitive, True)
gobject.idle_add(gui.buttonRefresh.set_sensitive , False)
gobject.idle_add(gui.buttonCreate.set_sensitive, False)
# Por razões de compatibilidade com o python 2.6 é utilizado o metodo 'get_child()'
gobject.idle_add(gui.menuBarLogin.get_child().set_label, 'Iniciar Sessão')
gobject.idle_add(gui.lsBattlesList.clear)
interface.shutdown()
def parse_battles_list(data):
"""
Recebe uma string com a lista das batalhas, separa essa lista do comando que a precede, e,
converte numa 'list'
"""
r = re.compile(r"(\[\])$|(_battles_list\()+(.{1,1024})\)\r\n$")
groups = r.match(data)
if (groups != None):
# Caso o comando enviado seja um comando sem argumentos tipo o 'server_state', o grupo será o numero 1.
# Caso seja um comando com argumentos tipo o 'robot_turn_left(90)', os grupos serao o segundo e o terceiro.
# Por isso a necessidade deste cilo IF.
if (groups.group(1) == None):
return eval(groups.group(3))
else:
return []
else:
return []
def _con_fail():
global gui
gtk.gdk.threads_enter()
md = GenericMessageDialog(gui, 'A ligação ao servidor foi terminada inesperadamente.', gtk.MESSAGE_ERROR)
gtk.gdk.threads_leave()
gobject.idle_add(md.show_it)
logout_callback()
def get_battles_list_callback():
"""
- Faz o pedido da lista das batalhas ao servidor.
- Recebe a lista.
- Actualiza a lista na GUI.
"""
global interface
global gui
global EOL
# Actualiza o estado a barra de estado
gobject.idle_add(gui.set_status_bar, 'A Pedir Lista de Batalhas ao Servidor...')
# Pede ao servidor a lista das salas disponíveis
retVal = interface.send_to_server("get_battles_list")
if ( retVal == -1 ):
_con_fail()
return False
# Provoca um atraso de 2 segundos para que o jogador não comece a clicar muitas vezes seguidas
# E a segunda função deste sleep é esperar um pouco pela resposta do servidor (ANTES do sock.rec())
# Para o caso de já ter qualquer coisa no buffer, não retornar logo o que está no buffer e nem esperar pela
# resposta com a lista da batalhas
sleep(0.5)
data = interface.recv_from_server()
# Quando há erro no socket
if (data != -1):
# Retira o último "" para não ficar um campo em branco na lista do 'split'
data = data.strip( EOL )
splitedData = data.split( EOL )
for data in splitedData:
data += EOL
result = command_parser(data)
if (result != -1):
command, args = result
# Verifica se é mesmo uma lista de batalhas, porque podem ficar comandos acumulados no buffer
if (command == '_battles_list'):
# Separa a lista das batalhas do comando
#_battles_list([['Sample Battle 1', 1, 1, 2, 10, 'NuGuN', False]])
data = parse_battles_list(data)
break
data = []
# Como quando há erros no socket, é retornado '-1' e o 'parse_battles_list' só aceita strings ou buffers
else:
data = []
# Actualiza a GUI com a lista das batalhas
gobject.idle_add( gui.create_list, data )
# Actualiza o estado da barra de estado
gobject.idle_add( gui.set_status_bar, 'Ligado!' )
gobject.idle_add( gui.buttonRefresh.set_sensitive, True )
####################################################################################
|
Jeff-Wang93/vent | tests/menu/test_menu.py | Python | apache-2.0 | 12,615 | 0.000396 | # -*- coding: utf-8 -*-
import curses
import npyscreen
from vent.helpers.paths import PathDirs
from vent.menu import VentApp
from vent.menus.main import MainForm
npyscreen.TEST_SETTINGS['CONTINUE_AFTER_TEST_INPUT'] = False
def run_menu(test_input):
""" Actually run the menu and process any input """
# initialize tutorial
paths = PathDirs()
first_time = paths.ensure_file(paths.init_file)
assert first_time[0] == True
npyscreen.TEST_SETTINGS['TEST_INPUT'] = test_input
A = VentApp()
try:
A.run(fork=False)
except npyscreen.ExhaustedTestInput as e:
pass
def test_tools_status():
""" Test the staticmethod tools_status """
a, b = MainForm.t_status(True)
assert isinstance(a, str)
assert isinstance(b, tuple)
def test_menu():
""" Run menu tests """
CTRL_Q = '^Q'
CTRL_T = '^T'
CTRL_X = '^X'
CTRL_V = '^V'
ENTER = curses.ascii.CR
TAB = curses.ascii.TAB
LEFT = curses.KEY_LEFT
RIGHT = curses.KEY_RIGHT
DOWN = curses.KEY_DOWN
SPACE = curses.ascii.SP
BACKSPACE = curses.ascii.BS
# go through help menus
run_menu([ENTER, CTRL_T, CTRL_X, 'b', 'm', ENTER, ENTER, CTRL_X, 'b', 'p',
ENTER, ENTER, CTRL_X, 'b', 't', ENTER, ENTER, CTRL_X, 'b', 'f',
ENTER, ENTER, CTRL_X, 'b', 'c', ENTER, ENTER, CTRL_X, 'b', 's',
ENTER, ENTER, CTRL_X, 'p', 'a', ENTER, ENTER, CTRL_X, 'p', 'b',
ENTER, ENTER, ENTER])
# go to help menu and leave again
run_menu([ENTER, CTRL_T, RIGHT, ENTER])
# go through the core tools menus
# install
run_menu([ENTER, CTRL_X, 'c', 'i', ENTER])
# build - ok
run_menu([ENTER, CTRL_X, 'c', 'b', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
RIGHT, ENTER, ENTER, ENTER])
# build - cancel
run_menu([ENTER, CTRL_X, 'c', 'b', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
ENTER])
# build - quit back to main
run_menu([ENTER, CTRL_X, 'c', 'b', CTRL_Q])
# build - toggle to main
run_menu([ENTER, CTRL_X, 'c', 'b', CTRL_T])
# start - ok
run_menu([ENTER, CTRL_X, 'c', 's', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
TAB, TAB, ENTER, ENTER, ENTER, ENTER, ENTER])
# start - cancel
run_menu([ENTER, CTRL_X, 'c', 's', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
TAB, ENTER])
# start - quit back to main
run_menu([ENTER, CTRL_X, 'c', 's', CTRL_Q])
# start - toggle to main
run_menu([ENTER, CTRL_X, 'c', 's', CTRL_T])
# configure - cancel
run_menu([ENTER, CTRL_X, 'c', 't', TAB, SPACE, TAB, SPACE, TAB, SPACE, TAB,
SPACE, TAB, SPACE, TAB, SPACE, TAB, SPACE, TAB, SPACE, TAB, TAB,
LEFT, ENTER])
# configure - quit back to main
run_menu([ENTER, CTRL_X, 'c', 't', CTRL_Q])
# configure - toggle back to main
run_menu([ENTER, CTRL_X, 'c', 't', CTRL_T])
# configure - ok
run_menu([ENTER, CTRL_X, 'c', 't', TAB, SPACE, TAB, SPACE, TAB, SPACE, TAB,
SPACE, TAB, SPACE, TAB, SPACE, TAB, SPACE, TAB, SPACE, TAB, TAB,
TAB, ENTER, TAB, TAB, ENTER, ENTER, ENTER])
# configure - quit in the middle of add
# run_menu([ENTER, CTRL_X, 'c', 't', SPACE, TAB, SPACE, TAB, SPACE, TAB,
# SPACE, TAB, SPACE, TAB, SPACE, TAB, SPACE, TAB, TAB, SPACE, TAB,
# SPACE, TAB, TAB, ENTER, DOWN, DOWN, DOWN, DOWN, DOWN, DOWN, DOWN,
# DOWN, DOWN, DOWN, DOWN, DOWN, LEFT, BACKSPACE, '3', TAB, TAB,
# ENTER, ENTER, TAB, ENTER, ENTER, TAB, ENTER, CTRL_Q])
# configure - instances add (add an instance of rq_worker)
# run_menu([ENTER, CTRL_X, 'c', 't', SPACE, TAB, SPACE, TAB, SPACE, TAB,
# SPACE, TAB, SPACE, TAB, SPACE, TAB, SPACE, TAB, TAB, SPACE, TAB,
# SPACE, TAB, TAB, ENTER, DOWN, DOWN, DOWN, DOWN, DOWN, DOWN, DOWN,
# DOWN, DOWN, DOWN, DOWN, DOWN, LEFT, BACKSPACE, '3', TAB, TAB,
# ENTER, ENTER, TAB, ENTER, ENTER, TAB, ENTER, TAB, TAB, ENTER])
# configure - quit in the middle of delete
# run_menu([ENTER, CTRL_X, 'c', 't', SPACE, TAB, SPACE, TAB, SPACE, TAB,
# SPACE, TAB, SPACE, TAB, SPACE, TAB, SPACE, TAB, TAB, SPACE, TAB,
# SPACE, TAB, SPACE, TAB, TAB, ENTER, DOWN, DOWN, DOWN, DOWN, DOWN,
# DOWN, DOWN, DOWN, DOWN, DOWN, DOWN, DOWN, LEFT, BACKSPACE, '2',
# TAB, TAB, ENTER, ENTER, TAB, ENTER, CTRL_Q])
# configure - instances delete (delete an instance of file_drop)
# run_menu([ENTER, CTRL_X, 'c', 't', SPACE, TAB, SPACE, TAB, SPACE, TAB,
# SPACE, TAB, SPACE, TAB, SPACE, TAB, SPACE, TAB, TAB, SPACE, TAB,
# SPACE, TAB, SPACE, TAB, TAB, ENTER, DOWN, DOWN, DOWN, DOWN, DOWN,
# DOWN, DOWN, DOWN, DOWN, DOWN, DOWN, DOWN, LEFT, BACKSPACE, '2',
# TAB, TAB, ENTER, ENTER, TAB, ENTER, SPACE, TAB, TAB, ENTER])
# clean - ok
run_menu([ENTER, CTRL_X, 'c', 'c', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
RIGHT, ENTER, ENTER, ENTER])
# clean - cancel
run_menu([ENTER, CTRL_X, 'c', 'c', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
ENTER])
# clean - quit back to main
run_menu([ENTER, CTRL_X, 'c', 'c', CTRL_Q])
# clean - toggle to main
run_menu([ENTER, CTRL_X, 'c', 'c', CTRL_T])
# inventory - quit back to main
run_menu([ENTER, CTRL_X, 'c', 'v', CTRL_Q])
# inventory - toggle to main
run_menu([ENTER, CTRL_X, 'c', 'v', CTRL_T])
# inventory - toggle group view
run_menu([ENTER, CTRL_X, 'c', 'v', CTRL_V, CTRL_V, CTRL_V, CTRL_V, CTRL_V,
CTRL_V, CTRL_V, CTRL_V, CTRL_T])
run_menu([ENTER, CTRL_X, 'c', 's', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
RIGHT, ENTER, ENTER, ENTER, ENTER, ENTER])
run_menu([ENTER, CTRL_X, 'c', 's', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
ENTER])
run_menu([ENTER, CTRL_X, 'c', 's', CTRL_Q])
run_menu([ENTER, CTRL_X, 'c', 's', CTRL_T])
# services running - core services
run_menu([ENTER, CTRL_X, 's', 'c', CTRL_T])
# services running - external services
run_menu([ENTER, CTRL_X, 's', 'e', CTRL_T])
run_menu([ENTER, CTRL_X, 'c', 'p', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
RIGHT, ENTER, ENTER, ENTER])
run_menu([ENTER, CTRL_X, 'c', 'p', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
ENTER])
run_menu([ENTER, CTRL_X, 'c', 'p', CTRL_Q])
run_menu([ENTER, CTRL_X, 'c', 'p', CTRL_T])
run_menu([ENTER, CTRL_X, 'c', 'u', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
RIGHT, ENTER, ENTER, ENTER])
run_menu([ENTER, CTRL_X, 'c', 'u', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
ENTER])
run_menu([ENTER, CTRL_X, 'c', 'u', CTRL_Q])
run_menu([ENTER, CTRL_X, 'c', 'u', CTRL_T])
run_menu([ENTE | R, CTRL_X, 'c', 'r', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
RIGHT, ENTER, ENTER, ENTER])
run_menu([ENTER, | CTRL_X, 'c', 'r', ENTER])
run_menu([ENTER, CTRL_X, 'c', 'r', CTRL_Q])
run_menu([ENTER, CTRL_X, 'c', 'r', CTRL_T])
run_menu([ENTER, CTRL_X, 'c', 't', TAB, ENTER, ENTER, ENTER])
# go through the plugins menus
run_menu([ENTER, CTRL_X, 'p', 'a', TAB, TAB, TAB, TAB, TAB, TAB, TAB, TAB,
RIGHT, ENTER, SPACE, TAB, TAB, TAB, TAB, TAB, TAB, TAB, ENTER,
SPACE, TAB, SPACE, TAB, SPACE, TAB, TAB, SPACE, TAB, SPACE, TAB,
TAB, ENTER, ENTER, ENTER])
cmds = [ENTER, CTRL_X, 'p', 'a', TAB, TAB, TAB, 'alpine', TAB, TAB, TAB,
TAB, TAB, TAB, ENTER, ENTER, ENTER]
cmds += (43 * [BACKSPACE])
cmds += [TAB, TAB, TAB, BACKSPACE, BACKSPACE, BACKSPACE, BACKSPACE,
BACKSPACE, BACKSPACE, TAB, TAB, TAB, TAB, TAB, TAB, ENTER, ENTER,
ENTER, CTRL_Q]
run_menu(cmds)
run_menu([ENTER, CTRL_X, 'p', 'a', TAB, TAB, TAB, 'alpine', TAB, 'alpine',
TAB, TAB, TAB, TAB, TAB, ENTER, ENTER, ENTER, TAB, TAB, ENTER,
ENTER, ENTER])
run_menu([ENTER, CTRL_X, 'p', 'a', CTRL_T, CTRL_T, TAB, TAB, TAB, TAB, TAB,
TAB, TAB, TAB, ENTER])
run_menu([ENTER, CTRL_X, 'p', 'b', TAB, TAB, RIGHT, ENTER, ENTER, ENTER])
|
bnrubin/ubuntu-bots | Lart/plugin.py | Python | gpl-2.0 | 6,326 | 0.001423 | # -*- Encoding: utf-8 -*-
###
# Copyright (c) 2005, Daniel DiPaolo
# (c) 2006, Dennis Kaarsemaker
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import re
from supybot.commands import *
import supybot.plugins as plugins
import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
import supybot.ircdb as ircdb
import supybot.conf as conf
import random
def checkIgnored(hostmask, recipient='', users=ircdb.users, channels=ircdb.channels):
if ircdb.ignores.checkIgnored(hostmask):
return True
try:
id = ircdb.users.getUserId(hostmask)
user = users.getUser(id)
except KeyError:
# If there's no user...
if ircutils.isChannel(recipient):
channel = channels.getChannel(recipient)
if channel.checkIgnored(hostmask):
return True
else:
return False
else:
| return False
if user._checkCapability('owner'):
# Owners shouldn | 't ever be ignored.
return False
elif user.ignore:
return True
elif recipient:
if ircutils.isChannel(recipient):
channel = ircdb.channels.getChannel(recipient)
if channel.checkIgnored(hostmask):
return True
else:
return False
else:
return False
else:
return False
class Lart(plugins.ChannelIdDatabasePlugin):
_meRe = re.compile(r'\bme\b', re.I)
_myRe = re.compile(r'\bmy\b', re.I)
def _replaceFirstPerson(self, s, nick):
s = self._meRe.sub(nick, s)
s = self._myRe.sub('%s\'s' % nick, s)
return s
def addValidator(self, irc, text):
if '$who' not in text:
irc.error('Larts must contain $who.', Raise=True)
def lart(self, irc, msg, args, channel, id, text):
"""[<channel>] [<id>] <who|what> [for <reason>]
Uses the Luser Attitude Readjustment Tool on <who|what> (for <reason>,
if given). If <id> is given, uses that specific lart. <channel> is
only necessary if the message isn't sent in the channel itself.
"""
if not self.registryValue('enabled', msg.args[0]):
return
if ' for ' in text:
(target, reason) = map(str.strip, text.split(' for ', 1))
else:
(target, reason) = (text, '')
if id is not None:
try:
lart = self.db.get(channel, id)
except KeyError:
irc.error(format('There is no lart with id #%i.', id))
return
else:
lart = self.db.random(channel)
if not lart:
irc.error(format('There are no larts in my database '
'for %s.', channel))
return
text = self._replaceFirstPerson(lart.text, msg.nick)
formatText = ircutils.stripFormatting(target).lower()
if (ircutils.strEqual(target, irc.nick) or 'Evilrockbot' in formatText) and random.uniform(0,100) < 25:
target = msg.nick
reason = ''
elif 'stdin' in formatText or 'tsimpson' in formatText:
target = msg.nick
reason = ''
else:
target = self._replaceFirstPerson(target, msg.nick)
reason = self._replaceFirstPerson(reason, msg.nick)
if target.endswith('.'):
target = target.rstrip('.')
text = text.replace('$who', target)
text = text.replace('$chan', msg.args[0])
if reason:
text += ' for ' + reason
if self.registryValue('showIds', channel):
text += format(' (#%i)', lart.id)
irc.reply(text, action=True)
lart = wrap(lart, ['channeldb', optional('id'), 'text'])
pity = lart
def callPrecedence(self, irc):
before = []
for cb in irc.callbacks:
if cb.name() == 'IRCLogin':
before.append(cb)
return (before, [])
def inFilter(self, irc, msg):
if not msg.command == 'PRIVMSG':
return msg
if not conf.supybot.defaultIgnore():
return msg
s = callbacks.addressed(irc.nick, msg)
if not s:
return msg
if checkIgnored(msg.prefix):
return msg
try:
if ircdb.users.getUser(msg.prefix):
return msg
except:
pass
cmd, args = (s.split(None, 1) + [None])[:2]
if cmd and cmd[0] in str(conf.supybot.reply.whenAddressedBy.chars.get(msg.args[0])):
cmd = cmd[1:]
if cmd in self.listCommands():
tokens = callbacks.tokenize(s, channel=msg.args[0])
self.Proxy(irc, msg, tokens)
return msg
Class = Lart
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
|
openstack/octavia | specs-tests/test_titles.py | Python | apache-2.0 | 4,052 | 0 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glob
import docutils.core
from docutils.parsers import rst
from docutils.parsers.rst import directives
from docutils.parsers.rst import roles
import testtools
class FakeDirective(rst.Directive):
has_content = True
def run(self):
return []
def fake_role(name, rawtext, text, lineno, inliner,
options=None, content=None):
return [], []
directives.register_directive('seqdiag', FakeDirective)
directives.register_directive('blockdiag', FakeDirective)
directives.register_directive('nwdiag', FakeDirective)
directives.register_directive('actdiag', FakeDirective)
directives.register_directive('graphviz', FakeDirective)
roles.register_local_role('doc', fake_role)
class TestTitles(testtools.TestCase):
def _get_title(self, section_tree):
section = {
'subtitles': [],
}
for node in section_tree:
if node.tagname == 'title':
section['name'] = node.rawsource
elif node.tagname == 'section':
subsection = self._get_title(node)
section['subtitles'].append(subsection['name'])
return section
def _get_titles(self, spec):
titles = {}
for node in spec:
if node.tagname == 'section':
section = self._get_title(node)
titles[section['name']] = section['subtitles']
return titles
def _check_titles(self, titles):
self.assertEqual(7, len(titles))
problem = 'Problem description'
self.assertIn(problem, titles)
self.assertEqual(0, len(titles[problem]))
proposed = 'Proposed change'
self.assertIn(proposed, titles)
self.assertIn('Alternatives', titles[proposed])
self.assertIn('Data model impact', titles[proposed])
self.assertIn('REST API impact', titles[proposed])
self.assertIn('Security impact', titles[proposed])
self.assertIn('Notifications impact', titles[proposed])
self.assertIn('Other end user impact', titles[proposed])
self.assertIn('Performance Impact', titles[proposed])
self.assertIn('Other deployer impact', titles[proposed])
self.assertIn('Devel | oper impact', titles[proposed])
impl = 'Implementation'
self.assertIn(im | pl, titles)
self.assertEqual(2, len(titles[impl]))
self.assertIn('Assignee(s)', titles[impl])
self.assertIn('Work Items', titles[impl])
deps = 'Dependencies'
self.assertIn(deps, titles)
self.assertEqual(0, len(titles[deps]))
testing = 'Testing'
self.assertIn(testing, titles)
self.assertEqual(0, len(titles[testing]))
docs = 'Documentation Impact'
self.assertIn(docs, titles)
self.assertEqual(0, len(titles[docs]))
refs = 'References'
self.assertIn(refs, titles)
self.assertEqual(0, len(titles[refs]))
def test_template(self):
files = set(glob.glob('specs/*.rst') + glob.glob('specs/*/*'))
files = files - set(glob.glob('specs/*/*.dot'))
files = files - set(glob.glob('specs/*/*.diag'))
for filename in files:
self.assertTrue(filename.endswith(".rst"),
"spec's file must use 'rst' extension.")
with open(filename) as f:
data = f.read()
spec = docutils.core.publish_doctree(data)
titles = self._get_titles(spec)
self._check_titles(titles)
|
rcgee/oq-hazardlib | openquake/hazardlib/tests/source/point_test.py | Python | agpl-3.0 | 31,027 | 0.000322 | from __future__ import division
# The Hazard Library
# Copyright (C) 2012-2016 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from decimal import Decimal
import numpy
from openquake.hazardlib.const import TRT
from openquake.hazardlib.source.point import PointSource
from openquake.hazardlib.source.rupture import ParametricProbabilisticRupture
from openquake.hazardlib.mfd import TruncatedGRMFD, EvenlyDiscretizedMFD
from openquake.hazardlib.scalerel.peer import PeerMSR
from openquake.hazardlib.scalerel.wc1994 import WC1994
from openquake.hazardlib.geo import Point, PlanarSurface, NodalPlane, Polygon
from openquake.hazardlib.pmf import PMF
from openquake | .hazardlib.tom import PoissonTOM
from | openquake.hazardlib.calc import filters
from openquake.hazardlib.site import \
Site, SiteCollection, FilteredSiteCollection
from openquake.hazardlib.tests.geo.surface import \
_planar_test_data as planar_surface_test_data
from openquake.hazardlib.tests import assert_pickleable
def make_point_source(lon=1.2, lat=3.4, **kwargs):
default_arguments = {
'source_id': 'source_id', 'name': 'source name',
'tectonic_region_type': TRT.SUBDUCTION_INTRASLAB,
'mfd': TruncatedGRMFD(a_val=1, b_val=2, min_mag=3,
max_mag=5, bin_width=1),
'location': Point(lon, lat, 5.6),
'nodal_plane_distribution': PMF([(1, NodalPlane(1, 2, 3))]),
'hypocenter_distribution': PMF([(1, 4)]),
'upper_seismogenic_depth': 1.3,
'lower_seismogenic_depth': 4.9,
'magnitude_scaling_relationship': PeerMSR(),
'rupture_aspect_ratio': 1.333,
'rupture_mesh_spacing': 1.234,
'temporal_occurrence_model': PoissonTOM(50.)
}
default_arguments.update(kwargs)
kwargs = default_arguments
ps = PointSource(**kwargs)
assert_pickleable(ps)
return ps
class PointSourceCreationTestCase(unittest.TestCase):
def make_point_source(self, **kwargs):
source = make_point_source(**kwargs)
for key in kwargs:
self.assertIs(getattr(source, key), kwargs[key])
def assert_failed_creation(self, exc, msg, **kwargs):
with self.assertRaises(exc) as ae:
self.make_point_source(**kwargs)
self.assertEqual(str(ae.exception), msg)
def test_negative_upper_seismogenic_depth(self):
self.assert_failed_creation(
ValueError,
'upper seismogenic depth must be non-negative',
upper_seismogenic_depth=-0.1
)
def test_non_positive_rupture_mesh_spacing(self):
msg = 'rupture mesh spacing must be positive'
self.assert_failed_creation(ValueError, msg, rupture_mesh_spacing=-0.1)
self.assert_failed_creation(ValueError, msg, rupture_mesh_spacing=0)
def test_lower_depth_above_upper_depth(self):
self.assert_failed_creation(
ValueError,
'lower seismogenic depth must be below upper seismogenic depth',
upper_seismogenic_depth=10, lower_seismogenic_depth=8
)
def test_lower_depth_equal_to_upper_depth(self):
self.assert_failed_creation(
ValueError,
'lower seismogenic depth must be below upper seismogenic depth',
upper_seismogenic_depth=10, lower_seismogenic_depth=10
)
def test_hypocenter_depth_out_of_seismogenic_layer(self):
self.assert_failed_creation(
ValueError,
'depths of all hypocenters must be in between '
'lower and upper seismogenic depths',
upper_seismogenic_depth=3, lower_seismogenic_depth=8,
hypocenter_distribution=PMF([(Decimal('0.3'), 4),
(Decimal('0.7'), 8.001)])
)
def test_negative_aspect_ratio(self):
self.assert_failed_creation(
ValueError,
'rupture aspect ratio must be positive',
rupture_aspect_ratio=-1
)
def test_zero_aspect_ratio(self):
self.assert_failed_creation(
ValueError,
'rupture aspect ratio must be positive',
rupture_aspect_ratio=0
)
def test_successfull_creation(self):
self.make_point_source()
class PointSourceIterRupturesTestCase(unittest.TestCase):
def _get_rupture(self, min_mag, max_mag, hypocenter_depth,
aspect_ratio, dip, rupture_mesh_spacing,
upper_seismogenic_depth=2,
lower_seismogenic_depth=16):
source_id = name = 'test-source'
trt = TRT.ACTIVE_SHALLOW_CRUST
mfd = TruncatedGRMFD(a_val=2, b_val=1, min_mag=min_mag,
max_mag=max_mag, bin_width=1)
location = Point(0, 0)
nodal_plane = NodalPlane(strike=45, dip=dip, rake=-123.23)
nodal_plane_distribution = PMF([(1, nodal_plane)])
hypocenter_distribution = PMF([(1, hypocenter_depth)])
magnitude_scaling_relationship = PeerMSR()
rupture_aspect_ratio = aspect_ratio
tom = PoissonTOM(time_span=50)
point_source = PointSource(
source_id, name, trt, mfd, rupture_mesh_spacing,
magnitude_scaling_relationship, rupture_aspect_ratio, tom,
upper_seismogenic_depth, lower_seismogenic_depth,
location, nodal_plane_distribution, hypocenter_distribution
)
ruptures = list(point_source.iter_ruptures())
self.assertEqual(len(ruptures), 1)
[rupture] = ruptures
self.assertIs(rupture.temporal_occurrence_model, tom)
self.assertIs(rupture.tectonic_region_type, trt)
self.assertEqual(rupture.rake, nodal_plane.rake)
self.assertIsInstance(rupture.surface, PlanarSurface)
self.assertEqual(rupture.surface.mesh_spacing, rupture_mesh_spacing)
return rupture
def _check_dimensions(self, surface, length, width, delta=1e-3):
length_top = surface.top_left.distance(surface.top_right)
length_bottom = surface.bottom_left.distance(surface.bottom_right)
self.assertAlmostEqual(length_top, length_bottom, delta=delta)
self.assertAlmostEqual(length_top, length, delta=delta)
width_left = surface.top_left.distance(surface.bottom_left)
width_right = surface.top_right.distance(surface.bottom_right)
self.assertAlmostEqual(width_left, width_right, delta=delta)
self.assertAlmostEqual(width_right, width, delta=delta)
self.assertAlmostEqual(width, surface.width, delta=delta)
self.assertAlmostEqual(length, surface.length, delta=delta)
def test_1_rupture_is_inside(self):
rupture = self._get_rupture(min_mag=5, max_mag=6, hypocenter_depth=8,
aspect_ratio=1, dip=30,
rupture_mesh_spacing=1)
self.assertEqual(rupture.mag, 5.5)
self.assertEqual(rupture.hypocenter, Point(0, 0, 8))
self.assertAlmostEqual(rupture.occurrence_rate, 0.0009)
surface = rupture.surface
self._check_dimensions(surface, 5.623413252, 5.623413252, delta=0.01)
self.assertAlmostEqual(0, surface.top_left.distance(Point(
-0.0333647435005, -0.00239548066924, 6.59414668702
)), places=5)
self.assertAlmostEqual(0, surface.top_right.distance(Point(
0.00239548107539, 0.0333647434713, 6.59414668702
)), places=5)
self.assertAlmostEqual(0, surface.bottom_left.distance(Point(
-0.00239548107539, -0.03336474347 |
skytrack/tps5 | src/lib/libharu/if/python/hpdf.py | Python | gpl-2.0 | 77,143 | 0.018524 | ##
## * << Haru Free PDF Library 2.0.8 >> -- hpdf.h
## *
## * URL http://libharu.org/
## *
## * Copyright (c) 1999-2006 Takeshi Kanno
## *
## * Permission to use, copy, modify, distribute and sell this software
## * and its documentation for any purpose is hereby granted without fee,
## * provided that the above copyright notice appear in all copies and
## * that both that copyright notice and this permission notice appear
## * in supporting documentation.
## * It is provided "as is" without express or implied warranty.
## *
##
## port to python by Li Jun
## http://groups.google.com/group/pythoncia
import os
import sys
import types
def setpath():
dllpath='%s/dll' %(os.path.dirname(os.path.realpath(__file__)))
if 'PATH' in os.environ:
if dllpath not in os.environ['PATH']:
os.environ['PATH']='%s;%s' % (dllpath, os.environ['PATH'])
else:
os.environ['PATH']=dllpath
setpath()
from hpdf_consts import *
from hpdf_types import *
if os.sys.platform=='win32':
harudll='libhpdf.dll'
#haru=WinDLL(harudll)
haru=CDLL(harudll)
else:
harudll='libhpdf.so'
haru=CDLL(harudll)
HPDF_HANDLE=c_void_p
HPDF_D | oc=HPDF_HANDLE
HPDF_Page=HPDF_HANDLE
HPDF_Pages=HPDF_HANDLE
HPDF_Stream=HPDF_HANDLE
HPDF_Image=HPDF_HANDLE
HPDF_Font=HPDF_HANDLE
HPDF_Outline=HPDF_HANDLE
HPDF_Encoder=HPDF_HANDLE
HPDF_Destination=HPDF_HANDLE
HPDF_XObject=HPDF_HANDLE
HPDF_Annotation=HPDF_HANDLE
HPDF_ExtGState=HPDF_HANDLE
#const char * HPDF_GetVersion (void)
HPDF_GetVersion=haru.HPDF_GetVersion
HPDF_GetVersion.restype=c_char_p
#HPD | F_Doc HPDF_NewEx (HPDF_Error_Handler user_error_fn, HPDF_Alloc_Func user_alloc_fn, HPDF_Free_Func user_free_fn, HPDF_UINT mem_pool_buf_size, void *user_data)
HPDF_NewEx=haru.HPDF_NewEx
HPDF_NewEx.restype=HPDF_Doc
#HPDF_Doc HPDF_New (HPDF_Error_Handler user_error_fn, void *user_data)
HPDF_New=haru.HPDF_New
HPDF_New.restype=HPDF_Doc
#HPDF_STATUS HPDF_SetErrorHandler (HPDF_Doc pdf, HPDF_Error_Handler user_error_fn)
HPDF_SetErrorHandler=haru.HPDF_SetErrorHandler
HPDF_SetErrorHandler.restype=HPDF_STATUS
#void HPDF_Free (HPDF_Doc pdf)
HPDF_Free=haru.HPDF_Free
HPDF_Free.restype=None
#HPDF_STATUS HPDF_NewDoc (HPDF_Doc pdf)
HPDF_NewDoc=haru.HPDF_NewDoc
HPDF_NewDoc.restype=HPDF_STATUS
#void HPDF_FreeDoc (HPDF_Doc pdf)
HPDF_FreeDoc=haru.HPDF_FreeDoc
HPDF_FreeDoc.restype=None
#HPDF_BOOL HPDF_HasDoc (HPDF_Doc pdf)
HPDF_HasDoc=haru.HPDF_HasDoc
HPDF_HasDoc.restype=HPDF_BOOL
#void HPDF_FreeDocAll (HPDF_Doc pdf)
HPDF_FreeDocAll=haru.HPDF_FreeDocAll
HPDF_FreeDocAll.restype=None
#HPDF_STATUS HPDF_SaveToStream (HPDF_Doc pdf)
HPDF_SaveToStream=haru.HPDF_SaveToStream
HPDF_SaveToStream.restype=HPDF_STATUS
#HPDF_UINT32 HPDF_GetStreamSize (HPDF_Doc pdf)
HPDF_GetStreamSize=haru.HPDF_GetStreamSize
HPDF_GetStreamSize.restype=HPDF_UINT32
#HPDF_STATUS HPDF_ReadFromStream (HPDF_Doc pdf, HPDF_BYTE *buf, HPDF_UINT32 *size)
_HPDF_ReadFromStream=haru.HPDF_ReadFromStream
_HPDF_ReadFromStream.restype=HPDF_STATUS
def HPDF_ReadFromStream(
pdf, #HPDF_Doc
buf, #POINTER(HPDF_BYTE)
size, #POINTER(HPDF_UINT32)
):
if type(buf) in (types.ListType, types.TupleType):
size=len(buf)
buf=pointer((HPDF_BYTE*size)(*buf))
size=HPDF_UINT32(int(size))
return _HPDF_ReadFromStream(
pdf, #HPDF_Doc
buf, #POINTER(HPDF_BYTE)
size, #POINTER(HPDF_UINT32)
)
#HPDF_STATUS HPDF_ResetStream (HPDF_Doc pdf)
HPDF_ResetStream=haru.HPDF_ResetStream
HPDF_ResetStream.restype=HPDF_STATUS
#HPDF_STATUS HPDF_SaveToFile (HPDF_Doc pdf, const char *file_name)
HPDF_SaveToFile=haru.HPDF_SaveToFile
HPDF_SaveToFile.restype=HPDF_STATUS
#HPDF_STATUS HPDF_GetError (HPDF_Doc pdf)
HPDF_GetError=haru.HPDF_GetError
HPDF_GetError.restype=HPDF_STATUS
#HPDF_STATUS HPDF_GetErrorDetail (HPDF_Doc pdf)
HPDF_GetErrorDetail=haru.HPDF_GetErrorDetail
HPDF_GetErrorDetail.restype=HPDF_STATUS
#void HPDF_ResetError (HPDF_Doc pdf)
HPDF_ResetError=haru.HPDF_ResetError
HPDF_ResetError.restype=None
#HPDF_STATUS HPDF_SetPagesConfiguration (HPDF_Doc pdf, HPDF_UINT page_per_pages)
_HPDF_SetPagesConfiguration=haru.HPDF_SetPagesConfiguration
_HPDF_SetPagesConfiguration.restype=HPDF_STATUS
def HPDF_SetPagesConfiguration(
pdf, #HPDF_Doc
page_per_pages, #HPDF_UINT
):
page_per_pages=HPDF_UINT(int(page_per_pages))
return _HPDF_SetPagesConfiguration(
pdf, #HPDF_Doc
page_per_pages, #HPDF_UINT
)
#HPDF_Page HPDF_GetPageByIndex (HPDF_Doc pdf, HPDF_UINT index)
HPDF_GetPageByIndex=haru.HPDF_GetPageByIndex
HPDF_GetPageByIndex.restype=HPDF_Page
#---------------------------------------------------------------------------
#---------------------------------------------------------------------------
#HPDF_PageLayout HPDF_GetPageLayout (HPDF_Doc pdf)
HPDF_GetPageLayout=haru.HPDF_GetPageLayout
HPDF_GetPageLayout.restype=HPDF_PageLayout
#HPDF_STATUS HPDF_SetPageLayout (HPDF_Doc pdf, HPDF_PageLayout layout)
HPDF_SetPageLayout=haru.HPDF_SetPageLayout
HPDF_SetPageLayout.restype=HPDF_STATUS
#HPDF_PageMode HPDF_GetPageMode (HPDF_Doc pdf)
HPDF_GetPageMode=haru.HPDF_GetPageMode
HPDF_GetPageMode.restype=HPDF_PageMode
#HPDF_STATUS HPDF_SetPageMode (HPDF_Doc pdf, HPDF_PageMode mode)
HPDF_SetPageMode=haru.HPDF_SetPageMode
HPDF_SetPageMode.restype=HPDF_STATUS
#HPDF_UINT HPDF_GetViewerPreference (HPDF_Doc pdf)
HPDF_GetViewerPreference=haru.HPDF_GetViewerPreference
HPDF_GetViewerPreference.restype=HPDF_UINT
#HPDF_STATUS HPDF_SetViewerPreference (HPDF_Doc pdf, HPDF_UINT value)
HPDF_SetViewerPreference=haru.HPDF_SetViewerPreference
HPDF_SetViewerPreference.restype=HPDF_STATUS
#HPDF_STATUS HPDF_SetOpenAction (HPDF_Doc pdf, HPDF_Destination open_action)
HPDF_SetOpenAction=haru.HPDF_SetOpenAction
HPDF_SetOpenAction.restype=HPDF_STATUS
#---------------------------------------------------------------------------
#----- page handling -------------------------------------------------------
#HPDF_Page HPDF_GetCurrentPage (HPDF_Doc pdf)
HPDF_GetCurrentPage=haru.HPDF_GetCurrentPage
HPDF_GetCurrentPage.restype=HPDF_Page
#HPDF_Page HPDF_AddPage (HPDF_Doc pdf)
HPDF_AddPage=haru.HPDF_AddPage
HPDF_AddPage.restype=HPDF_Page
#HPDF_Page HPDF_InsertPage (HPDF_Doc pdf, HPDF_Page page)
HPDF_InsertPage=haru.HPDF_InsertPage
HPDF_InsertPage.restype=HPDF_Page
#HPDF_STATUS HPDF_Page_SetWidth (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetWidth=haru.HPDF_Page_SetWidth
_HPDF_Page_SetWidth.restype=HPDF_STATUS
def HPDF_Page_SetWidth(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetWidth(
page, #HPDF_Page
value, #HPDF_REAL
)
#HPDF_STATUS HPDF_Page_SetHeight (HPDF_Page page, HPDF_REAL value)
_HPDF_Page_SetHeight=haru.HPDF_Page_SetHeight
_HPDF_Page_SetHeight.restype=HPDF_STATUS
def HPDF_Page_SetHeight(
page, #HPDF_Page
value, #HPDF_REAL
):
value=HPDF_REAL(value)
return _HPDF_Page_SetHeight(
page, #HPDF_Page
value, #HPDF_REAL
)
#HPDF_STATUS
#HPDF_Page_SetSize (HPDF_Page page,
# HPDF_PageSizes size,
# HPDF_PageDirection direction);
HPDF_Page_SetSize=haru.HPDF_Page_SetSize
HPDF_Page_SetSize.restype=HPDF_STATUS
|
sanchezz93/Giga-Compiler | Entrega 5/Machine.py | Python | mit | 8,699 | 0.026325 | from Memory import *
from Giga import *
from Cube import *
def executeVirtualMachine(functions, quadruples, constants):
print("Virtual machine running...")
countQuadruples = 0
activeMemory = Memory('module', constVarCount , tempVarCount)
globalMemory = Memory('main', globalVarCount , 0)
while quadruples[countQuadruples]['op'] != 'END' :
quadruple = quadruples[countQuadruples]
print(quadruple)
#Change to the real values when handling memory
if quadruple['op'] == '+':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 + valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '-':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 - valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '=':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 = valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '*':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = float(valueVar1) * float(valueVar2)
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '/':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 / valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '<=':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 <= valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '>=':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 >= valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '<':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, consta | nts)
else:
valueVar2 = activeMemory.getVal | ueAtAddress(var2, constants)
resultValue = valueVar1 < valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '>':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 > valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '==':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 == valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '!=':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 != valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '||':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 or valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == '&&':
var1 = quadruple['var1']
var2 = quadruple['var2']
result = quadruple['result']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
if var2 >= 10000 and var2 < 20000:
valueVar2 = globalMemory.getValueAtAddress(var2, constants)
else:
valueVar2 = activeMemory.getValueAtAddress(var2, constants)
resultValue = valueVar1 and valueVar2
activeMemory.storeValue(result, resultValue)
elif quadruple['op'] == 'PRINT':
var1 = quadruple['resultado']
if var1 >= 10000 and var1 < 20000:
valueVar1 = globalMemory.getValueAtAddress(var1, constants)
else:
valueVar1 = activeMemory.getValueAtAddress(var1, constants)
print(valueVar1)
# elif quadruple[countQuadruples]['op'] == 'ARR':
# elif quadruple[countQuadruples]['op'] == 'ENDFUNC':
# elif quad |
jaddison/django-simple-elasticsearch | simple_elasticsearch/management/commands/es_manage.py | Python | bsd-3-clause | 4,680 | 0.003205 | import sys
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from ...utils import get_indices, create_indices, rebuild_indices, delete_indices
try:
raw_input
except NameError:
raw_input = input
class Unbuffered(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = Unbuffered(sys.stdout)
class ESCommandError(CommandError):
pass
class Command(BaseCommand):
help = ''
def add_arguments(self, parser):
parser.add_argument('--list', action='store_true', dest='list', default=False)
parser.add_argument('--initialize', action='store_true', dest='initialize', default=False)
parser.add_argument('--rebuild', action='store_true', dest='rebuild', default=False)
parser.add_argument('--cleanup', action='store_true', dest='cleanup', default=False)
parser.add_argument('--no_input', '--noinput', action='store_true', dest='no_input', default=False)
parser.add_argument('--indexes', action='store', dest='indexes', default='')
def handle(self, *args, **options):
no_input = options.get('no_input')
requested_indexes = options.get('indexes', '') or []
if requested_indexes:
requested_indexes = requested_indexes.split(',')
if options.get('list'):
self.subcommand_list()
elif options.get('initialize'):
self.subcommand_initialize(requested_indexes, no_input)
elif options.get('rebuild'):
self.subcommand_rebuild(requested_indexes, no_input)
elif options.get('cleanup'):
self.subcommand_cleanup(requested_indexes, no_input)
def subcommand_list(self):
print("Available ES indexes:")
for index_name, type_classes in get_indices().items():
print(" - index '{0}':".format(index_name))
for type_class in type_classes:
print(" - type '{0}'".format(type_class.get_type_name()))
def subcommand_initialize(self, indexes=None, no_input=False):
user_input = 'y' if no_input else ''
while user_input != 'y':
user_input = raw_input('Are you sure you want to initialize {0} index(es)? [y/N]: '.format('the ' + ', '.join(indexes) if indexes else '**ALL**')).lower()
if user_input == 'n':
break
if user_input == 'y':
sys.stdout.write | ("Creating ES indexes: ")
results, aliases = create_indices(indices=indexes)
sys.stdout.write("complete.\n")
for alias, index in aliases:
print("'{0}' aliased to '{1}'".format(alias, | index))
def subcommand_cleanup(self, indexes=None, no_input=False):
user_input = 'y' if no_input else ''
while user_input != 'y':
user_input = raw_input('Are you sure you want to clean up (ie DELETE) {0} index(es)? [y/N]: '.format('the ' + ', '.join(indexes) if indexes else '**ALL**')).lower()
if user_input == 'n':
break
if user_input == 'y':
sys.stdout.write("Deleting ES indexes: ")
indices = delete_indices(indices=indexes)
sys.stdout.write("complete.\n")
for index in indices:
print("'{0}' index deleted".format(index))
else:
print("{0} removed.".format(len(indices)))
def subcommand_rebuild(self, indexes, no_input=False):
if getattr(settings, 'DEBUG', False):
import warnings
warnings.warn('Rebuilding with `settings.DEBUG = True` can result in out of memory crashes. See https://docs.djangoproject.com/en/stable/ref/settings/#debug', stacklevel=2)
# make sure the user continues explicitly after seeing this warning
no_input = False
user_input = 'y' if no_input else ''
while user_input != 'y':
user_input = raw_input('Are you sure you want to rebuild {0} index(es)? [y/N]: '.format('the ' + ', '.join(indexes) if indexes else '**ALL**')).lower()
if user_input in ['n', '']:
break
if user_input == 'y':
sys.stdout.write("Rebuilding ES indexes: ")
results, aliases = rebuild_indices(indices=indexes)
sys.stdout.write("complete.\n")
for alias, index in aliases:
print("'{0}' rebuilt and aliased to '{1}'".format(alias, index))
else:
print("You chose not to rebuild indices.")
|
RCOS-Grading-Server/HWserver | migration/migrator/migrations/master/20190528164521_numeric_id.py | Python | bsd-3-clause | 4,956 | 0.003228 | """Migration for the Submitty master database."""
def up(config, database):
"""
Run up migration.
:param config: Object holding configuration details about Submitty
:type config: migrator.config.Config
:param database: Object for interacting with given database for environment
:type database: migrator.db.Database
"""
database.execute("ALTER TABLE IF EXISTS users ADD COLUMN IF NOT EXISTS user_numeric_id varchar;")
database.execute("""CREATE OR REPLACE FUNCTION sync_user() RETURNS trigger AS
-- TRIGGER function to sync users data on INSERT or UPDATE of user_record in
-- table users. NOTE: INSERT should not trigger this function as function
-- sync_courses_users will also sync users -- but only on INSERT.
$$
DECLARE
course_row RECORD;
db_conn VARCHAR;
query_string TEXT;
BEGIN
FOR course_row IN SELECT semester, course FROM courses_users WHERE user_id=NEW.user_id LOOP
RAISE NOTICE 'Semester: %, Course: %', co | urse_row.semester, course_row.course;
db_conn := format('dbname=submitty_%s_%s', course_row.semester, course_row.course);
query_string := 'UPDATE users SET user_numeric_id=' || quote_nullable(NEW.user_numeric_id) || ', user_firstname=' || quote_literal(NEW.user_firstname) || ', user_preferred_firstname=' || quote_nullable(NEW.user_pr | eferred_firstname) || ', user_lastname=' || quote_literal(NEW.user_lastname) || ', user_preferred_lastname=' || quote_nullable(NEW.user_preferred_lastname) || ', user_email=' || quote_literal(NEW.user_email) || ', user_updated=' || quote_literal(NEW.user_updated) || ', instructor_updated=' || quote_literal(NEW.instructor_updated) || ' WHERE user_id=' || quote_literal(NEW.user_id);
-- Need to make sure that query_string was set properly as dblink_exec will happily take a null and then do nothing
IF query_string IS NULL THEN
RAISE EXCEPTION 'query_string error in trigger function sync_user()';
END IF;
PERFORM dblink_exec(db_conn, query_string);
END LOOP;
-- All done.
RETURN NULL;
END;
$$ LANGUAGE plpgsql;""")
database.execute("""CREATE OR REPLACE FUNCTION sync_courses_user() RETURNS TRIGGER AS
-- TRIGGER function to sync users data on INSERT or UPDATE of user_record in
-- table courses_user.
$$
DECLARE
user_row record;
db_conn varchar;
query_string text;
BEGIN
db_conn := format('dbname=submitty_%s_%s', NEW.semester, NEW.course);
IF (TG_OP = 'INSERT') THEN
-- FULL data sync on INSERT of a new user record.
SELECT * INTO user_row FROM users WHERE user_id=NEW.user_id;
query_string := 'INSERT INTO users (user_id, user_numeric_id, user_firstname, user_preferred_firstname, user_lastname, user_preferred_lastname, user_email, user_updated, instructor_updated, user_group, registration_section, manual_registration) ' ||
'VALUES (' || quote_literal(user_row.user_id) || ', ' || quote_nullable(user_row.user_numeric_id) || ', ' || quote_literal(user_row.user_firstname) || ', ' || quote_nullable(user_row.user_preferred_firstname) || ', ' || quote_literal(user_row.user_lastname) || ', ' ||
'' || quote_nullable(user_row.user_preferred_lastname) || ', ' || quote_literal(user_row.user_email) || ', ' || quote_literal(user_row.user_updated) || ', ' || quote_literal(user_row.instructor_updated) || ', ' ||
'' || NEW.user_group || ', ' || quote_nullable(NEW.registration_section) || ', ' || NEW.manual_registration || ')';
IF query_string IS NULL THEN
RAISE EXCEPTION 'query_string error in trigger function sync_courses_user() when doing INSERT';
END IF;
PERFORM dblink_exec(db_conn, query_string);
ELSIF (TG_OP = 'UPDATE') THEN
-- User update on registration_section
-- CASE clause ensures user's rotating section is set NULL when
-- registration is updated to NULL. (e.g. student has dropped)
query_string = 'UPDATE users SET user_group=' || NEW.user_group || ', registration_section=' || quote_nullable(NEW.registration_section) || ', rotating_section=' || CASE WHEN NEW.registration_section IS NULL THEN 'null' ELSE 'rotating_section' END || ', manual_registration=' || NEW.manual_registration || ' WHERE user_id=' || QUOTE_LITERAL(NEW.user_id);
IF query_string IS NULL THEN
RAISE EXCEPTION 'query_string error in trigger function sync_courses_user() when doing UPDATE';
END IF;
PERFORM dblink_exec(db_conn, query_string);
END IF;
-- All done.
RETURN NULL;
END;
$$ LANGUAGE plpgsql;""")
def down(config, database):
"""
Run down migration (rollback).
:param config: Object holding configuration details about Submitty
:type config: migrator.config.Config
:param database: Object for interacting with given database for environment
:type database: migrator.db.Database
"""
pass
|
Gateswong/GatesBlog | blog/__init__.py | Python | mit | 724 | 0.001381 | from flask import Flask
from flask.ext.bootstrap import Bootstrap
from flask.ext.sqlalchemy | import SQLAlchemy
from flask.ext.login import LoginManager
from config import config
bootstrap = Bootstrap()
db = SQLAlchemy()
log | in_manager = LoginManager()
login_manager.login_view = "control_panel.login"
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
bootstrap.init_app(app)
db.init_app(app)
login_manager.init_app(app)
from .blogs import blogs as blogs_blueprint
app.register_blueprint(blogs_blueprint)
from .control_panel import control_panel as control_panel_blueprint
app.register_blueprint(control_panel_blueprint)
return app
|
mskovacic/Projekti | raspberrypi/isprobavanje/pyqt5/Toolbar.py | Python | mit | 1,043 | 0.01534 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
ZetCode PyQt5 tutorial
This program creates a toolbar.
The toolbar has one action, which
terminates the application, if triggered.
author: Jan Bodnar
website: zetcode.com
las | t edited: January 2015
"""
import sys
from PyQt5.QtWidgets import QMainWindow, QAction, qApp, QApplication
from PyQt5.QtGui import QIcon
class Example(QMainWindow):
def __init__(self):
super().__init__()
self.initUI()
|
def initUI(self):
exitAction = QAction(QIcon('exit24.png'), 'Exit', self)
exitAction.setShortcut('Ctrl+Q')
exitAction.triggered.connect(qApp.quit)
self.toolbar = self.addToolBar('Exit')
self.toolbar.addAction(exitAction)
self.setGeometry(300, 300, 300, 200)
self.setWindowTitle('Toolbar')
self.show()
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_())
|
miketwo/pylacuna | pylacuna/caching.py | Python | mit | 434 | 0.002304 | #!/usr/bin/env python
'''
Import this module to have access to a global redis cache named GLOBAL_CACHE.
USAGE:
from caching import GLOBAL_CACHE
GLOB | AL_C | ACHE.store('foo', 'bar')
GLOBAL_CACHE.get('foo')
>> bar
'''
from redis_cache import SimpleCache
try:
GLOBAL_CACHE
except NameError:
GLOBAL_CACHE = SimpleCache(limit=1000, expire=60*60*24, namespace="GLOBAL_CACHE")
else:
# Already defined...
pass
|
coxmediagroup/googleads-python-lib | examples/dfp/v201411/network_service/get_all_networks.py | Python | apache-2.0 | 1,545 | 0.009709 | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all networks that y | ou have access to with the current login
credentials.
A networkCode should be left out for this request."""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
# Import appropriate modules from the client library.
from googleads import dfp
def main(client):
# Initialize appropriate service.
network_service = client.GetService('NetworkService', version='v201411')
# Get all networks that you have access to with the current login credentials.
networks = network_service.ge | tAllNetworks()
# Display results.
for network in networks:
print ('Network with network code \'%s\' and display name \'%s\' was found.'
% (network['networkCode'], network['displayName']))
print '\nNumber of results found: %s' % len(networks)
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client)
|
childe/esproxy | django_cas/middleware.py | Python | mit | 2,415 | 0.002484 | """ Django CAS 2.0 authentication middleware """
from django.conf import settings
from django.contrib import auth
from django.contrib.auth.views import login, logout
from django.core.exceptions import PermissionDenied
from django.http import HttpResponseRedirect
from django_cas.exceptions import CasTicketException
from django_cas.views import login as cas_login, logout as cas_logout
from urllib import urlencode
__all__ = ['CASMiddleware']
class CASMiddleware(object):
"""Middleware that allows CAS authentication on admin pages"""
def process_request(self, request):
""" Checks that the authentication middleware is installed. """
error = ("The Django CAS middleware requires authentication "
"middleware to be installed. Edit your MIDDLEWARE_CLASSES "
"sett | ing to insert 'django.contrib.auth.middleware."
"AuthenticationMiddleware'.")
assert hasattr(request, 'user'), error
def process_view(self, request, view_func, view_args, view_kwargs):
""" Forwards unauthenticated requests to the admin page to the CAS
login URL, as well as calls to django.contrib.auth.views.login and
logout.
"""
if view_func == login:
return cas_login(request, *view_args, **view_kwargs)
| if view_func == logout:
return cas_logout(request, *view_args, **view_kwargs)
# The rest of this method amends the Django admin authorization wich
# will post a username/password dialog to authenticate to django admin.
if not view_func.__module__.startswith('django.contrib.admin.'):
return None
if request.user.is_authenticated():
if request.user.is_staff:
return None
else:
raise PermissionDenied("No staff priviliges")
params = urlencode({auth.REDIRECT_FIELD_NAME: request.get_full_path()})
return HttpResponseRedirect(settings.LOGIN_URL + '?' + params)
def process_exception(self, request, exception):
""" When we get a CasTicketException it is probably caused by the ticket timing out.
So logout and get the same page again."""
if isinstance(exception, CasTicketException):
auth.logout(request)
return HttpResponseRedirect(request.path)
else:
return None
|
ArcherSys/ArcherSys | Lib/distutils/dep_util.py | Python | mit | 10,613 | 0.003392 | <<<<<<< HEAD
<<<<<<< HEAD
"""distutils.dep_util
Utility functions for simple, timestamp-based dependency of files
and groups of files; also, function based entirely on such
timestamp dependency analysis."""
import os
from distutils.errors import DistutilsFileError
def newer (source, target):
"""Return true if 'source' exists and is more recently modified than
'target', or if 'source' exists and 'target' doesn't. Return false if
both exist and 'target' is the same age or younger than 'source'.
Raise DistutilsFileError if 'source' does not exist.
"""
if not os.path.exists(source):
raise DistutilsFileError("file '%s' does not exist" %
os.path.abspath(source))
if not os.path.exists(target):
return 1
from stat import ST_MTIME
mtime1 = os.stat(source)[ST_MTIME]
mtime2 = os.stat(target)[ST_MTIME]
return mtime1 > mtime2
# newer ()
def newer_pairwise (sources, targets):
"""Walk two filename lists in parallel, testing if each source is newer
than its corresponding target. Return a pair of lists (sources,
targets) where source is newer than target, according to the semantics
of 'newer()'.
"""
if len(sources) != len(targets):
raise ValueError("'sources' and 'targets' must be same length")
# build a pair of lists (sources, targets) where source is newer
n_sources = []
n_targets = []
for i in range(len(sources)):
if newer(sources[i], targets[i]):
n_sources.append(sources[i])
n_targets.append(targets[i])
return (n_sources, n_targets)
# newer_pairwise ()
def newer_group (sources, target, missing='error'):
"""Return true if 'target' is out-of-date with respect to any file
listed in 'sources'. In other words, if 'target' exists and is newer
than every file in 'sources', return false; otherwise return true.
'missing' controls what we do when a source file is missing; the
default ("error") is to blow up with an OSError from inside 'stat()';
if it is "ignore", we silently drop any missing source files; if it is
"newer", any missing source files make us assume that 'target' is
out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
carry out commands that wouldn't work because inputs are missing, but
that doesn't matter because you're not actually going to run the
commands).
"""
# If the target doesn't even exist, then it's definitely out-of-date.
if not os.path.exists(target):
return 1
# Otherwise we have to find out the hard way: if *any* source file
# is more recent than 'target', then 'target' is out-of-date and
# we can immediately return true. If we fall through to the end
# of the loop, then 'target' is up-to-date and we return false.
from stat import ST_MTIME
target_mtime = os.stat(target)[ST_MTIME]
for source in sources:
if not os.path.exists(source):
if missing == 'error': # blow up when we stat() the file
pass
elif missing == 'ignore': # missing source dropped from
continue # target's dependency list
elif missing == 'newer': # missing source means target is
return 1 # out-of-date
source_mtime = os.stat(source)[ST_MTIME]
if source_mtime > target_mtime:
return 1
else:
return 0
# newer_group ()
=======
"""distutils.dep_util
Utility functions for simple, timestamp-based dependency of files
and groups of files; also, function based entirely on such
timestamp dependency analysis."""
import os
from distutils.errors import DistutilsFileError
def newer (source, target):
"""Return true if 'source' exists and is more recently modified than
'target', or if 'source' exists and 'target' doesn't. Return false if
both exist and 'target' is the same age or younger than 'source'.
Raise DistutilsFileError if 'source' does not exist.
"""
if not os.path.exists(source):
raise DistutilsFileError("file '%s' does not exist" %
os.path.abspath(source))
if not os.path.exists(target):
return 1
from stat import ST_MTIME
mtime1 = os.stat(source)[ST_MTIME]
mtime2 = os.stat(target)[ST_MTIME]
return mtime1 > mtime2
# newer ()
def newer_pairwise (sources, targets):
"""Walk two filename lists in parallel, testing if each source is newer
than its corresponding target. Return a pair of lists (sources,
targets) where source is newer than target, according to the semantics
of 'newer()'.
"""
if len(sources) != len(targets):
raise ValueError("'sources' and 'targets' must be same length")
# build a pair of lists (sources, targets) where source is newer
n_sources = []
n_targets = []
for i in range(len(sources)):
if newer(sources[i], targets[i]):
n_sources.append(sources[i])
n_targets.append(targets[i])
return (n_sources, n_targets)
# newer_pairwise ()
def newer_group (sources, target, missing='error'):
"""Return true if 'target' is out-of-date with respect to any file
listed in 'sources'. In other words, if 'target' exists and is newer
than every file in 'sources', return false; otherwise return true.
'missing' controls what we do when a source file is missing; the
default ("error") is to blow up with an | OSError from inside 'stat()';
if it is "ignore", we silently drop any missing source files; if it is
"newer", any missing source files make us assume that 'target' is
out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
carry out commands that wouldn't work because inputs are missing, but
that doesn't matter because you're not actually going to run the
commands).
"""
# If the target d | oesn't even exist, then it's definitely out-of-date.
if not os.path.exists(target):
return 1
# Otherwise we have to find out the hard way: if *any* source file
# is more recent than 'target', then 'target' is out-of-date and
# we can immediately return true. If we fall through to the end
# of the loop, then 'target' is up-to-date and we return false.
from stat import ST_MTIME
target_mtime = os.stat(target)[ST_MTIME]
for source in sources:
if not os.path.exists(source):
if missing == 'error': # blow up when we stat() the file
pass
elif missing == 'ignore': # missing source dropped from
continue # target's dependency list
elif missing == 'newer': # missing source means target is
return 1 # out-of-date
source_mtime = os.stat(source)[ST_MTIME]
if source_mtime > target_mtime:
return 1
else:
return 0
# newer_group ()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
"""distutils.dep_util
Utility functions for simple, timestamp-based dependency of files
and groups of files; also, function based entirely on such
timestamp dependency analysis."""
import os
from distutils.errors import DistutilsFileError
def newer (source, target):
"""Return true if 'source' exists and is more recently modified than
'target', or if 'source' exists and 'target' doesn't. Return false if
both exist and 'target' is the same age or younger than 'source'.
Raise DistutilsFileError if 'source' does not exist.
"""
if not os.path.exists(source):
raise DistutilsFileError("file '%s' does not exist" %
os.path.abspath(source))
if not os.path.exists(target):
return 1
from stat import ST_MTIME
mtime1 = os.stat(source)[ST_MTIME]
mtime2 = os.stat(target)[ST_MTIME]
return mtime1 > mtime2
# newer ()
def newer_pairwise (sources, targets):
"""Walk two filename lists in parallel, testing if each source is newer
than its corresponding target. Return a pair of lists (sources,
targets |
lmazuel/azure-sdk-for-python | azure-servicebus/azure/servicebus/_common_serialization.py | Python | mit | 18,799 | 0.002766 | #-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
import ast
import base64
import sys
import types
import warnings
if sys.version_info < (3,):
from cStringIO import StringIO
from urllib2 import quote as url_quote
from urllib2 import unquote as url_unquote
else:
from io import StringIO
from urllib.parse import quote as url_quote
from urllib.parse import unquote as url_unquote
from datetime import datetime
from xml.sax.saxutils import escape as xml_escape
try:
from xml.etree import cElementTree as ETree
except ImportError:
from xml.etree import ElementTree as ETree
from ._common_conversion import (
_str,
)
from ._common_error import (
_ERROR_VALUE_SHOULD_BE_BYTES,
_WARNING_VALUE_SHOULD_BE_BYTES,
)
from ._common_models import (
Feed,
HeaderDict,
WindowsAzureData,
_Base64String,
_dict_of,
_list_of,
_scalar_list_of,
_unicode_type,
_xml_attribute,
)
_etree_entity_feed_namespaces = {
'atom': 'http://www.w3.org/2005/Atom',
'm': 'http://schemas.microsoft.com/ado/2007/08/dataservices/metadata',
'd': 'http://schemas.microsoft.com/ado/2007/08/dataservices',
}
def _make_etree_ns_attr_name(ns, name):
return '{' + ns + '}' + name
def _get_etree_tag_name_without_ns(tag):
val = tag.partition('}')[2]
return val
def _get_etree_text(element):
text = element.text
return text if text is not None else ''
def _get_readable_id(id_name, id_prefix_to_skip):
"""simplified an id to be more friendly for us people"""
# id_name is in the form 'https://namespace.host.suffix/name'
# where name may contain a forward slash!
pos = id_name.find('//')
if pos != -1:
pos += 2
if id_prefix_to_skip:
pos = id_name.find(id_prefix_to_skip, pos)
if pos != -1:
pos += len(id_prefix_to_skip)
pos = id_name.find('/', pos)
if pos != -1:
return id_name[pos + 1:]
return id_name
def _to_datetime(strtime):
return datetime.strptime(strtime, "%Y-%m-%dT%H:%M:%S.%f")
_KNOWN_SERIALIZATION_XFORMS = {
'last_modified': 'Last-Modified',
'cache_control': 'Cache-Control',
}
def _get_serialization_name(element_name):
"""converts a Python name into a serializable name"""
known = _KNOWN_SERIALIZATION_XFORMS.get(element_name)
if known is not None:
return known
if element_name.startswith('x_ms_'):
return element_name.replace('_', '-')
if element_name.endswith('_id'):
element_name = element_name.replace('_id', 'ID')
for name in ['content_', 'last_modified', 'if_', 'cache_control']:
if element_name.startswit | h(name):
element_name = element_name.replace('_', '-_')
return ''.join(name.capitalize() for name in element_ | name.split('_'))
def _convert_class_to_xml(source, xml_prefix=True):
if source is None:
return ''
xmlstr = ''
if xml_prefix:
xmlstr = '<?xml version="1.0" encoding="utf-8"?>'
if isinstance(source, list):
for value in source:
xmlstr += _convert_class_to_xml(value, False)
elif isinstance(source, WindowsAzureData):
class_name = source.__class__.__name__
xmlstr += '<' + class_name + '>'
for name, value in vars(source).items():
if value is not None:
if isinstance(value, list) or \
isinstance(value, WindowsAzureData):
xmlstr += _convert_class_to_xml(value, False)
else:
xmlstr += ('<' + _get_serialization_name(name) + '>' +
xml_escape(str(value)) + '</' +
_get_serialization_name(name) + '>')
xmlstr += '</' + class_name + '>'
return xmlstr
def _set_continuation_from_response_headers(feeds, response):
x_ms_continuation = HeaderDict()
for name, value in response.headers:
if 'x-ms-continuation' in name:
x_ms_continuation[name[len('x-ms-continuation') + 1:]] = value
if x_ms_continuation:
setattr(feeds, 'x_ms_continuation', x_ms_continuation)
def _get_request_body(request_body):
'''Converts an object into a request body. If it's None
we'll return an empty string, if it's one of our objects it'll
convert it to XML and return it. Otherwise we just use the object
directly'''
if request_body is None:
return b''
if isinstance(request_body, WindowsAzureData):
request_body = _convert_class_to_xml(request_body)
if isinstance(request_body, bytes):
return request_body
if isinstance(request_body, _unicode_type):
return request_body.encode('utf-8')
request_body = str(request_body)
if isinstance(request_body, _unicode_type):
return request_body.encode('utf-8')
return request_body
class _ETreeXmlToObject(object):
@staticmethod
def parse_response(response, return_type):
'''
Parse the HTTPResponse's body and fill all the data into a class of
return_type.
'''
root = ETree.fromstring(response.body)
xml_name = getattr(return_type, '_xml_name', return_type.__name__)
if root.tag == xml_name:
return _ETreeXmlToObject._parse_response_body_from_xml_node(root, return_type)
return None
@staticmethod
def parse_enum_results_list(response, return_type, resp_type, item_type):
"""resp_body is the XML we received
resp_type is a string, such as Containers,
return_type is the type we're constructing, such as ContainerEnumResults
item_type is the type object of the item to be created, such as Container
This function then returns a ContainerEnumResults object with the
containers member populated with the results.
"""
# parsing something like:
# <EnumerationResults ... >
# <Queues>
# <Queue>
# <Something />
# <SomethingElse />
# </Queue>
# </Queues>
# </EnumerationResults>
return_obj = return_type()
root = ETree.fromstring(response.body)
items = []
for container_element in root.findall(resp_type):
for item_element in container_element.findall(resp_type[:-1]):
items.append(_ETreeXmlToObject.fill_instance_element(item_element, item_type))
for name, value in vars(return_obj).items():
# queues, Queues, this is the list its self which we populated
# above
if name == resp_type.lower():
# the list its self.
continue
value = _ETreeXmlToObject.fill_data_member(root, name, value)
if value is not None:
setattr(return_obj, name, value)
setattr(return_obj, resp_type.lower(), items)
return return_obj
@staticmethod
def parse_simple_list(response, return_type, item_type, list_name):
respbody = response.body
res = return_type()
res_items = []
root = ETree.fromstring(respbody)
type_name = type.__name__
item_name = item_type.__name__
for item in root.findall(item_name):
res_items.append(_ETreeXmlToObject.fill_instance_element(item, item_type))
setattr(res, list_name, res_items)
return res
@staticmethod
def convert_ |
TaskEvolution/Task-Coach-Evolution | taskcoach/taskcoachlib/domain/attachment/__init__.py | Python | gpl-3.0 | 878 | 0 | '''
Task Coach - Your friendly task manager
Copyright (C) 2004-2013 Task Coach developers <dev | elopers@taskcoach.org>
Task Coach is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Task Coach is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERC | HANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from attachment import *
from attachmentowner import AttachmentOwner
from attachmentlist import AttachmentList
from sorter import AttachmentSorter
|
Hazelwire/Hazelwire | src/Mainframe/ClientHandler/P2PSanityCheck.py | Python | gpl-3.0 | 3,894 | 0.00642 | # Copyright (c) 2011 The Hazelwire Team.
#
# This file is part of Hazelwire.
#
# Hazelwire is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Hazelwire is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Hazelwire. If not, see <http://www.gnu.org/licenses/>.
"""
The main class for the Peer-to-Peer sanity check.
Works by sending every VM a request to scan a given IP address on the given ports.
When all threads are finished return the list of results.
"""
import socket, threading, logging
class PeerToPeerSanityChecker:
def __init__(self, targetIP, clients, ports):
"""
Initialises the PeerToPeerSanityChecker.
@type targetIP: string
@param targetIP: the IP of the target VM.
@type clients: list
@param clients: the list of VMs to send a Peer-to-Peer sanity check request to.
@type ports: list
@param ports: the list of ports to scan.
"""
self.threads = []
self.allresults = []
self.VMs = clients
self.ports = ports
self.targetIP = targetIP
self.writeLock = threading.Lock()
def sendRequest(self, IP):
"""
Send a Peer-to-Peer sanity check request to the given IP. Receive the results and append them to the list of all results.
@type IP: string
@param IP: the IP to send the request to.
"""
logging.info("[P2PCHECK] Asking " + IP + " to do a P2PCheck on " + self.targetIP)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(5)
try:
sock.connect((IP, 9998))
except:
logging.info("[P2PCHECK] Client " + IP + " is not running P2PRequestListener!")
self.writeResults([{'port':'','fine':"False"}], IP)
return
msg = 'CHECK ' + str(self.targetIP) + '\n'
msg += "PORT " + str(self.ports) + '\n'
msg += "ENDPORTS\n"
sock.settimeout(None)
sock.send(msg)
results = []
data = sock.recv(1024).strip()
lines = data.split('\n')
for line in lines:
if line.startswith("RESULT"):
results.append({'port': line.split(' ')[1], 'fine':line.split(' ')[2]})
self.writeResults(results, IP)
sock.close()
def writeResults(self, results, IP):
"""
Helper function to safely append the given results reported by the given IP to the list of all results.
@type results: list
@param results: the list of results, a result consists of a port number and a boolean indicating if the check was succesf | ul.
@type IP: string
@param IP: the IP of the VM that did the check.
"""
self.writeLock.acquire()
self.allresults.append({'results':results, 'IP':IP})
| self.writeLock.release()
def getResults(self):
"""
Waits for all the checks to finish and returns the list of all the results
@rtype: list
@return: the list of all results
"""
for thread in self.threads:
thread.join()
return self.allresults
def checkIP(self):
"""
Check the IP given at initialization.
Creates a thread for every VM to speed up the check.
"""
for VM in self.VMs:
self.threads.append(threading.Thread(target=self.sendRequest, args=[VM]))
self.threads[-1].start()
|
qnub/omsk-weather | setup.py | Python | lgpl-3.0 | 5,263 | 0.00399 | #!/usr/bin/env python
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2012-2013 qnub <qnub.ru@gmail.com>
# This file is distributed under the license LGPL version 3 or later
### END LICENSE
###################### DO NOT TOUCH THIS (HEAD TO THE SECOND PART) ######################
import os
import sys
try:
import DistUtilsExtra.auto
except ImportError:
print >> sys.stderr, 'To build omweather you need https://launchpad.net/python-distutils-extra'
sys.exit(1)
assert DistUtilsExtra.auto.__version__ >= '2.18', 'needs DistUtilsExtra.auto >= 2.18'
def update_config(libdir, values = {}):
filename = os.path.join(libdir, 'omweather_lib/omweatherconfig.py')
oldvalues = {}
try:
fin = file(filename, 'r')
fout = file(filename + '.new', 'w')
for line in fin:
fields = line.split(' = ') # Separate variable from value
if fields[0] in values:
oldvalues[fields[0]] = fields[1].strip()
line = "%s = %s\n" % (fields[0], values[fields[0]])
fout.write(line)
fout.flush()
fout.close()
fin.close()
os.rename(fout.name, fin.name)
except (OSError, IOError), e:
print ("ERROR: Can't find %s" % filename)
sys.exit(1)
return oldvalues
def move_desktop_file(root, target_data, prefix):
# The desktop file is rightly installed into install_data. But it should
# always really be installed into prefix, because while we can install
# normal data files anywhere we want, the desktop file needs to exist in
# the main system to be found. Only actually useful for /opt installs.
old_desktop_path = os.path.normpath(root + target_data +
'/ | share/applications')
old_desktop_file = old_desktop_path + '/omweather.desktop'
desktop_path = os.path.normpath(root + prefix + '/share/applications')
desktop_file = desktop_path + '/omweather.desktop'
if not os.path.exists(old_desktop_file):
print ("ERROR: Can't find", old_desktop_file)
sys.exit(1)
elif target_da | ta != prefix + '/':
# This is an /opt install, so rename desktop file to use extras-
desktop_file = desktop_path + '/extras-omweather.desktop'
try:
os.makedirs(desktop_path)
os.rename(old_desktop_file, desktop_file)
os.rmdir(old_desktop_path)
except OSError as e:
print ("ERROR: Can't rename", old_desktop_file, ":", e)
sys.exit(1)
return desktop_file
def update_desktop_file(filename, target_pkgdata, target_scripts):
try:
fin = file(filename, 'r')
fout = file(filename + '.new', 'w')
for line in fin:
if 'Icon=' in line:
line = "Icon=%s\n" % (target_pkgdata + 'media/omweather.svg')
elif 'Exec=' in line:
cmd = line.split("=")[1].split(None, 1)
line = "Exec=%s" % (target_scripts + 'omweather')
if len(cmd) > 1:
line += " %s" % cmd[1].strip() # Add script arguments back
line += "\n"
fout.write(line)
fout.flush()
fout.close()
fin.close()
os.rename(fout.name, fin.name)
except (OSError, IOError), e:
print ("ERROR: Can't find %s" % filename)
sys.exit(1)
def compile_schemas(root, target_data):
if target_data == '/usr/':
return # /usr paths don't need this, they will be handled by dpkg
schemadir = os.path.normpath(root + target_data + 'share/glib-2.0/schemas')
if (os.path.isdir(schemadir) and
os.path.isfile('/usr/bin/glib-compile-schemas')):
os.system('/usr/bin/glib-compile-schemas "%s"' % schemadir)
class InstallAndUpdateDataDirectory(DistUtilsExtra.auto.install_auto):
def run(self):
DistUtilsExtra.auto.install_auto.run(self)
target_data = '/' + os.path.relpath(self.install_data, self.root) + '/'
target_pkgdata = target_data + 'share/omweather/'
target_scripts = '/' + os.path.relpath(self.install_scripts, self.root) + '/'
values = {'__omweather_data_directory__': "'%s'" % (target_pkgdata),
'__version__': "'%s'" % self.distribution.get_version()}
update_config(self.install_lib, values)
desktop_file = move_desktop_file(self.root, target_data, self.prefix)
update_desktop_file(desktop_file, target_pkgdata, target_scripts)
compile_schemas(self.root, target_data)
##################################################################################
###################### YOU SHOULD MODIFY ONLY WHAT IS BELOW ######################
##################################################################################
DistUtilsExtra.auto.setup(
name='omweather',
version='0.5-public1',
license='LGPL-3',
author='qnub',
author_email='qnub.ru@gmail.com',
description='Tray widget with air temperature in Omsk, Russia',
long_description='Show current air temperature in Russia, Omsk.',
url='http://qnub.github.com/omsk-weather/',
cmdclass={'install': InstallAndUpdateDataDirectory}
)
|
GeoLabs/QgsWPSClient | streaming.py | Python | gpl-2.0 | 22,331 | 0.015674 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Client for streaming based WPS.
It exploits asynchronous capabilities of WPS and QGIS for visualizing
intermediate results from a WPS
-------------------
copyright : (C) 2012 by Germán Carrillo (GeoTux)
email : geotux_tuxman@linuxmail.org
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import QColor, QMessageBox
from PyQt4.QtNetwork import QNetworkRequest, QNetworkAccessManager
from qgis.core import (QgsNetworkAccessManager, QgsVectorLayer, QgsRasterLayer,
QgsMapLayerRegistry, QgsFeature, QgsGeometry)
from qgis.gui import QgsRubberBand, QgsVertexMarker
from wpslib.processdescription import getFileExtension,isMimeTypeVector,isMimeTypeRaster
from wpslib.executionresult import decodeBase64
from functools import partial
import apicompat
import tempfile
import os, platform
import glob
class Streaming(QObject):
""" Class for keeping track of stream chunks and
providing methods for handling and visualizing them
"""
# Define SIGNALS/SLOTS
playlistHandled = pyqtSignal(dict)
urlReady = pyqtSignal(str, int, str)
dataReady = pyqtSignal(str, int)
def __init__(self, parent, iface, chunks, playlistUrl, mimeType, encoding):
super(Streaming, self).__init__()
self.DEBUG = True
# Variables from other classes
self.parent = parent # For GUI access
self.iface = iface
self.chunks = chunks
self.playlistUrl = playlistUrl
self.mimeType = mimeType
self.encoding = encoding
# Internal variables
self.__endTag = "#PLAYLIST-END"
self.__exceptionTag = "#EXCEPTION"
self.__exceptionUrl = ""
self.__exceptionFound = False
self.__playlistFinished = False # Did the end tag appeared?
self.__bytesInlastReply = 0 # To compare last and current reply sizes
self.__loadedChunks = 0 # For keeping track of # of loaded (to local vars) chunks
self.__deliveredChunks = 0 # For keeping track of # of loaded (to the map) chunks
self.__bFirstChunk = True
self.__features = {} # {0:[f0,f1,f2], 1:[f0,f1]}
self.__bGeomMulti = False # Is the geometry multi{point|line|polygon}
self.__geometryType = "" # Values: "Point","LineString","Polygon","Unknown", "NoGeometry"
self.__tmpGeometry = {} # For visualization purposes {chunkId1: rb1, chunkId2: rb2 }
self.__memoryLayer = None # The whole merged data
# For rasters only
self.__legend = self.iface.legendInterface()
self.__groupIndex = 0
self.__chunksDir = None
self.__virtualFile = "" # Virtual raster file path
if isMimeT | ypeRaster(self.mimeType, True) != None:
self.__chunksDir = tempfile.mkdtemp(prefix="tmpChunks")
# | Other objects
self.timer = QTimer()
self.timer.setInterval(1 * 1000) # 1 second
self.QNAM4Playlist = QNetworkAccessManager()
self.QNAM4Chunks = QNetworkAccessManager()
self.QNAM4Exception = QNetworkAccessManager()
# SIGNAL/SLOT connections
self.playlistHandled.connect(self.fetchChunks)
self.urlReady.connect(self.fetchResult)
self.dataReady.connect(self.loadData)
self.timer.timeout.connect(partial(self.fetchPlaylist, self.playlistUrl))
self.QNAM4Playlist.finished.connect(self.handlePlaylist)
self.QNAM4Chunks.finished.connect(self.handleChunk)
self.QNAM4Exception.finished.connect(self.handleException)
#self.QNAM4Playlist = QgsNetworkAccessManager.instance()
#theReply2.error.connect(self.handleErrors)
# GUI
self.parent.progressBar.setRange(0,0)
self.parent.lblProcess.setText("Reading output playlist...")
def start(self):
""" Start fetching """
self.fetchPlaylist(self.playlistUrl) # First call
def stop(self):
""" Stop fetching """
self.timer.stop()
self.QNAM4Playlist.finished.disconnect(self.handlePlaylist)
self.QNAM4Chunks.finished.disconnect(self.handleChunk)
self.removeTempGeometry(self.__geometryType)
if self.DEBUG: print "Stop streaming!"
def validateCompletedStream(self):
""" Is the stream complete (Did the end tag appeared?) """
#return (self.__loadedChunks >= self.chunks and self.chunks != 0)
return self.__playlistFinished
def allChunksDelivered(self):
""" Are all chunks already loaded into the map? """
return ((self.__loadedChunks == self.__deliveredChunks and
self.__playlistFinished) or self.__exceptionFound)
def fetchPlaylist(self, playlistLink):
url = QUrl(playlistLink)
self.QNAM4Playlist.get(QNetworkRequest(url)) # SLOT: handlePlaylist
def handlePlaylist(self, reply):
""" Parse the chunk URLs and update the loadedChunks counter """
# Check if there is redirection
reDir = reply.attribute(QNetworkRequest.RedirectionTargetAttribute).toUrl()
if not reDir.isEmpty():
self.fetchPlaylist(reDir.toString())
return
# Parse URLs only if there is new data in the reply
if reply.bytesAvailable() > self.__bytesInlastReply:
if self.DEBUG: print " Parsing the playlist..."
startFrom = reply.bytesAvailable() - self.__bytesInlastReply # Delta in bytes
self.__bytesInlastReply = reply.bytesAvailable()
newURLs = self.parseURLs(reply, startFrom)
else:
if self.DEBUG: print " No new data in the playlist..."
newURLs = {}
# Store new URLs
if len(newURLs) > 0:
self.__loadedChunks += len(newURLs)
if self.chunks:
self.parent.progressBar.setRange(0,self.chunks)
if self.DEBUG: print str(self.__loadedChunks) + " chunks loaded" + ((" out of " + str(self.chunks)) if self.chunks else "")
# If not complete, make additional calls
if not self.validateCompletedStream():
if not self.timer.isActive():
self.timer.start()
if self.DEBUG: print "Timer started..."
else:
self.timer.stop()
self.QNAM4Playlist.finished.disconnect(self.handlePlaylist)
if self.DEBUG: print "Playlist finished!"
if self.allChunksDelivered():
self.finishLoading()
if self.__exceptionFound:
self.fetchException()
if len(newURLs) > 0:
self.playlistHandled.emit(newURLs) # SLOT: fetchChunks
def parseURLs(self, reply, startFrom):
""" Get a dict of new IDs:URLs from the current playlist (newURLs) """
newURLs = {} # {0:URL0, 1:URL1, ...}
count = 0
#Get the delta and start reading it
allData = reply.readAll()
allData = allData.right(startFrom) # Get rid of old data
response = QTextStream(allData, QIODevice.ReadOnly)
data = response.readLine()
# Parse
while (data):
|
pambot/SMSBeds | lib/jinja/tests.py | Python | gpl-2.0 | 3,403 | 0.000588 | # -*- coding: utf-8 -*-
"""
jinja.tests
~~~~~~~~~~~
Jinja test functions. Used with the "is" operator.
:copyright: 2007 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import re
number_re = re.compile(r'^-?\d+(\.\d+)?$')
regex_type = type(number_re)
def test_odd():
"""
Return true if the variable is odd.
"""
return lambda e, c, v: v % 2 == 1
def test_even():
"""
Return true of the variable is even.
"""
return lambda e, c, v: v % 2 == 0
def test_defined():
"""
Return true if the variable is defined:
.. sourcecode:: jinja
{% if variable is defined %}
value of variable: {{ variable }}
{% else %}
variable is not defined
{% endif %}
See also the ``default`` filter.
"""
return lambda e, c, v: v is not e.undefined_singleton
def test_lower():
"""
Return true if the variable is lowercase.
"""
return lambda e, c, v: isinstance(v, basestring) and v.islower()
def test_upper():
"""
Return true if the variable is uppercase.
"""
return lambda e, c, v: isinstance(v, basestring) and v.isupper()
def test_numeric():
"""
Return true if the variable is numeric.
"""
return lambda e, c, v: isinstance(v, (int, long, float)) or (
isinstance(v, basestring) and
number_re.match(v) is not None)
def test_sequence():
"""
Return true if the variable is a sequence. Sequences are variables
that are iterable.
"""
def wrapped(environment, context, value):
try:
len(value)
| value.__getitem__
except:
return False
return True
return wrapped
def test_matching(regex):
r"""
Test if the variable matches the regular expression given. Note that
you have to escape special c | hars using *two* backslashes, these are
*not* raw strings.
.. sourcecode:: jinja
{% if var is matching @/^\d+$/ %}
var looks like a number
{% else %}
var doesn't really look like a number
{% endif %}
"""
def wrapped(environment, context, value):
if type(regex) is regex_type:
regex_ = regex
else:
if environment.disable_regexps:
raise RuntimeError('regular expressions disabled.')
if isinstance(regex, unicode):
regex_ = re.compile(regex, re.U)
elif isinstance(regex, str):
regex_ = re.compile(regex)
else:
return False
return regex_.search(value) is not None
return wrapped
def test_sameas(other):
"""
Check if an object points to the same memory address than another
object:
.. sourcecode:: jinja
{% if foo.attribute is sameas(false) %}
the foo attribute really is the `False` singleton
{% endif %}
*New in Jinja 1.2*
"""
return lambda e, c, v: v is other
TESTS = {
'odd': test_odd,
'even': test_even,
'defined': test_defined,
'lower': test_lower,
'upper': test_upper,
'numeric': test_numeric,
'sequence': test_sequence,
'matching': test_matching,
'sameas': test_sameas
}
|
ObsidianBlk/GemRB--Unofficial- | gemrb/GUIScripts/DualClass.py | Python | gpl-2.0 | 19,206 | 0.036551 | # -*-python-*-
# GemRB - Infinity Engine Emulator
# Copyright (C) 2003-2004 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
import GemRB
from GUIDefines import *
from ie_stats import *
from ie_restype import RES_2DA
import GUICommon
import Spellbook
import CommonTables
import LUSpellSelection
import LUProfsSelection
import LUSkillsSelection
#######################
pc = 0
OldClassName = 0
OldKitName = 0
ClassName = 0
NewMageSpells = 0
NewPriestMask = 0
NewClassId = 0
DualClassTable = 0
#######################
DCMainWindow = 0
DCMainClassButton = 0
DCMainSkillsButton = 0
DCMainDoneButton = 0
DCMainStep = 0
#######################
DCClassWindow = 0
DCClassDoneButton = 0
DCClass = 0
DCClasses = []
#######################
DCProfsWindow = 0
DCProfsDoneButton = 0
#######################
DCSkillsWindow = 0
DCSkillsDoneButton = 0
#######################
def DualClassWindow ():
"""Opens the dual class selection window."""
global pc, OldClassName, NewMageSpells, NewPriestMask, NewClassId, OldKitName, DualClassTable
global DCMainWindow, DCMainClassButton, DCMainDoneButton, DCMainSkillsButton, DCMainStep
# get our basic globals
pc = GemRB.GameGetSelectedPCSingle ()
DCMainStep = 1
# make sure to nullify old values
NewPriestMask = 0
NewMageSpells = 0
NewClassId = 0
# set up our main window
DCMainWindow = GemRB.LoadWindow (5)
# done button (off)
DCMainDoneButton = DCMainWindow.GetControl (2)
DCMainDoneButton.SetText (11973)
DCMainDoneButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, DCMainDonePress)
DCMainDoneButton.SetState (IE_GUI_BUTTON_DISABLED)
DCMainDoneButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
# cancel button (on)
DCMainCancelButton = DCMainWindow.GetControl (1)
DCMainCancelButton.SetText (13727)
DCMainCancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, DCMainCancelPress)
DCMainCancelButton.SetState (IE_GUI_BUTTON_ENABLED)
DCMainCancelButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
# class button (on)
DCMainClassButton = DCMainWindow.GetControl (3)
DCMainClassButton.SetText (11959)
DCMainClassButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, DCMainClassPress)
DCMainClassButton.SetState (IE_GUI_BUTTON_ENABLED)
DCMainClassButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
# skills button (off)
DCMainSkillsButton = DCMainWindow.GetControl (4)
DCMainSkillsButton.SetText (17372)
DCMainSkillsButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, DCMainSkillsPress)
DCMainSkillsButton.SetState (IE_GUI_BUTTON_DISABLED)
DCMainSkillsButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
# back button (on)
DCMainBackButton = DCMainWindow.GetControl (5)
if GUICommon.GameIsBG2():
DCMainBackButton.SetText (15416)
DCMainBackButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, DCMainBackPress)
DCMainBackButton.SetState (IE_GUI_BUTTON_ENABLED)
DCMainBackButton.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
# picture of character
DCMainPictureButton = DCMainWindow.GetControl (6)
DCMainPictureButton.SetState (IE_GUI_BUTTON_LOCKED)
DCMainPictureButton.SetFlags (IE_GUI_BUTTON_NO_IMAGE | IE_GUI_BUTTON_PICTURE, OP_SET)
DCMainPictureButton.SetPicture (GemRB.GetPlayerPortrait (pc, 0), "NOPORTMD")
# text area warning
DCTextArea = DCMainWindow.GetControl (7)
DCTextArea.SetText (10811)
# character name
DCLabel = DCMainWindow.GetControl (0x10000008)
DCLabel.SetText (GemRB.GetPlayerName (pc, 0))
# class name
Kit = GUICommon.GetKitIndex (pc)
OldClassName = CommonTables.Classes.GetRowName (CommonTables.Classes.FindValue (5, GemRB.GetPlayerStat (pc, IE_CLASS) ) )
if Kit:
OldKitName = CommonTables.KitList.GetValue (Kit, 0, 0)
else:
OldKitName = OldClassName
DCLabel = DCMainWindow.GetControl (0x10000009)
DCLabel.SetText (GUICommon.GetActorClassTitle (pc))
# get the names of the classes we can dual to
DualClassTable = GemRB.LoadTable ("dualclas")
for i in range (DualClassTable.GetColumnCount ()):
DCClasses.append (DualClassTable.GetColumnName (i))
# show our window
DCMainWindow.ShowModal (MODAL_SHADOW_GRAY)
return
def DCMainDonePress ():
"""Saves our dualclass changes and closes the window.
This de-activates our old class and readjusts the basic actor stats, such as
THAC0, spell casting, proficiencies, and so forth, to the new class."""
import GUIREC
# save our proficiencies
LUProfsSelection.ProfsSave (pc, LUProfsSelection.LUPROFS_TYPE_DUALCLASS)
# remove old class abilities
KitIndex = GUICommon.GetKitIndex (pc)
if KitIndex:
ABTable = CommonTables.KitList.GetValue (str(KitIndex), "ABILITIES")
else:
ABTable = CommonTables.ClassSkills.GetValue (OldClassName, "ABILITIES")
if ABTable != "*" and GemRB.HasResource (ABTable, RES_2DA, 1):
GUICommon.RemoveClassAbilities (pc, ABTable, GemRB.GetPlayerStat (pc, IE_LEVEL))
# remove old class casting
if not NewMageSpells:
for i in range (9):
GemRB.SetMemorizableSpellsCount (pc, 0, IE_SPELL_TYPE_WIZARD, i)
Spellbook.RemoveKnownSpells (pc, IE_SPELL_TYPE_PRIEST, 1,7, 1)
# apply our class abilities
ABTable = CommonTables.ClassSkills.GetValue (ClassName, "ABILITIES")
if ABTable != "*" and GemRB.HasResource (ABTable, RES_2DA, 1):
GUICommon.AddClassAbilities (pc, ABTable)
# learn our new priest spells
if NewPriestMask:
Spellbook.LearnPriestSpells (pc, 1, NewPriestMask)
GemRB.SetMemorizableSpellsCount (pc, 1, IE_SPELL_TYPE_PRIEST, 0)
# save our thief skills if we have them
LUSkillsSelection.SkillsSave (pc)
# save our new class and say was multi
OldClassId = GemRB.GetPlayerStat (pc, IE_CLASS)
MultClassId = (1 << (NewClassId-1)) | (1 << (OldClassId-1))
MultClassId = CommonTables.Classes.FindValue (4, MultClassId)
MultClassId = CommonTables.Classes.GetValue (MultClassId, 5)
GemRB.SetPlayerStat (pc, IE_CLASS, MultClassId)
GemRB.SetPlayerStat (pc, IE_MC_FLAGS, CommonTables.Classes.GetValue (OldClassName, "MC_WAS_ID", 1))
# update our levels and xp
if GUICommon.IsDualSwap (pc):
GemRB.SetPlayerStat (pc, IE_LEVEL2, 1)
else:
GemRB.SetPlayerStat (pc, IE_LEVEL2, GemRB.GetPlayerStat (pc, IE_LEVEL), 0)
GemRB.SetPlayerStat (pc, IE_LEVEL, 1)
GemRB.SetPlayerStat (pc, IE_XP, 0)
# new thac0
ThacoTable = GemRB.LoadTable ("THAC0")
GemRB.SetPlayerStat (pc, IE_TOHIT, ThacoTable.GetValue (NewClassId-1, 0, 1))
# new saves
SavesTable = CommonTables.Classes.GetValue (CommonTables.Classes.FindValue (5, NewClassId), 3, 0)
SavesTable = GemRB.LoadTable (SavesTable)
for i in range (5):
GemRB.SetPlayerStat (pc, IE_SAVEVSDEATH+i, SavesTable.GetValue (i, 0))
# close our window
if DCMainWindow:
DCMainWindow.Unload ()
GUIREC.UpdateRecordsWindow()
return
def DCMainCancelPress ():
"""Revert all changes and close the dual class window."""
# simulate pressing the back button until we get back to DCMainStep = 1
# to unset all things from the new class
while DCMainStep > 1:
DCMainBackPress ()
# close our window
if DCMainWindow:
DCMainWindow.U | nload ()
return
def DCMainBackPress ():
"""Reverts all current changes and reverts b | ack to the previous step."""
global DCMainStep, DCClass, NewMageSpells
global NewPriestMask
if DCMainStep == 2: # class selected, wait to choose skills
# disable the skills button and re-enable the class button
# the class will be reset when the class button is clicked
DCMainSkillsButton.SetState (IE_GUI_BUTTON_DISABLED)
DCMainClassButton.SetState (IE_GUI_BUTTON_ENABLED)
# back a step
DCMainStep = 1
elif DCMainStep == 3: # skills selected
# re-enable our skills button
DCMainSkillsButton.SetState (IE_GUI_BUTTON_ENABLED)
# un-learn our spells and sk |
netdaniels/it_classified | itclassified/migrations/0020_itclog_date_update.py | Python | mit | 557 | 0.001795 | # -*- coding: utf-8 -*-
# | Generated by Django 1.11.6 on 2017-10-25 00:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('itclassified', '0019_auto_20171024_2132'),
]
operations = [
migrations.AddField(
model_name='itclog',
name='date_update',
| field=models.DateTimeField(default=django.utils.timezone.now, help_text='The date of creation', null=True),
),
]
|
stefanschleifer/kindle-newsfetch | newsfetch.py | Python | bsd-3-clause | 10,161 | 0.025096 | #!/usr/bin/env python
#
# newsfetch.py
#
# kindle-newsfetch is a simple Python script which fetches calibre recipes,
# turns them into Kindle newspapers using 'ebook-convert' and sends them to
# the configured Kindle e-mail adress using 'calibre-smtp'.
#
# (c) 2011 Stefan Schleifer, see LICENSE-file
import sys, os
import ConfigParser
import subprocess
import glob
from datetime import datetime
import shutil
# full path to configuration file
CONFIGFILE = 'newsfetch.cfg'
# print help information
def usage():
print "\nUsage: %s <command> [options]\n" % sys.argv[0]
print "\tinit: Create configuration file."
print "\tall: Fetch and convert all configured items."
print "\tsection|-s <section_name>: Fetch and convert all items of given section."
print "\titem|-i <item_name>: Only fetch and convert item named <item_name>."
print "\tadd <recipe_name> <item_name> <section_name>: Add a new item <item_name> with recipe-id <recipe_name> to section <section_name>."
print "\tlist: Get a list of all configured items."
sys.exit(1)
# create configuraton file
def create_configuration():
try:
i = raw_input("I'm going to ask you a few questions and create %s, is this ok (y/n)? " % CONFIGFILE)
if i is not 'y':
print "Ok, not creating configuration file. Bye!"
sys.exit(1)
config = ConfigParser.SafeConfigParser()
config.add_section('config')
config.set('config', 'KINDLE_ADDR', raw_input("Please enter your Kindle e-mail address where you want the converted files to be delivered to: "))
recipes_path = raw_input("Please enter the absolute path to the directory where your recipes are stored [%s/recipes]: " % os.getcwd())
if not recipes_path: # user chose to use default value
recipes_path = "%s/recipes" % os.getcwd()
# create the directory if it does not exist
if not os.access(recipes_path, os.W_OK): os.mkdir(recipes_path)
config.set('config', 'RECIPES_PATH', recipes_path)
output_path = raw_input("Please enter the absolute path to the directory for storing the converted files [%s/tmp]: " % os.getcwd())
if not output_path: # user chose to use default value
output_path = "%s/tmp" % os.getcwd()
# create the directory if it does not exist
if not os.access(output_path, os.W_OK): os.mkdir(output_path)
config.set('config', 'OUTPUT_PATH', output_path)
config.set('config', 'SMTP_SERVER', raw_input("Please enter the address of your desired SMTP server: "))
config.set('config', 'SMTP_USER', raw_input("Please enter the username for the given server: "))
config.set('config', 'SMTP_PW', raw_input("Please enter the password for the given user (WILL BE STORED IN PLAINTEXT!): "))
config.set('config', 'SMTP_MAILADDR', raw_input("Please enter your mail address for this server: "))
ebook_convert = raw_input("Please enter the absolute path to 'ebook-convert' [/usr/bin/ebook-convert]: ")
if not ebook_convert:
ebook_convert = '/usr/bin/ebook-convert'
config.set('config', 'EBOOK_CONVERT', ebook_convert)
calibre_smtp = raw_input("Please enter the absolute path to 'calibre-smtp' [/usr/bin/calibre-smtp]: ")
if not calibre_smtp:
calibre_smtp = '/usr/bin/calibre-smtp'
config.set('config', 'CALIBRE-SMTP', calibre_smtp)
keep_backup = raw_input("Keep backup of converted newspapers (y/n)? ")
if 'y' == keep_backup:
backup_path = raw_input("Please enter the absolute path where to store the backup [%s/backup]: " % os.getcwd())
if not backup_path:
backup_path = "%s/backup" % os.getcwd()
if not os.access(backup_path, os.W_OK): os.mkdir(backup_path)
config.set('config', 'backup_path', backup_path)
config.set('config', 'backup', 'true')
else:
config.set('config', 'backup', 'false')
config.add_section('example')
config.set('example', 'nytimes', 'New York Times')
config.set('example', 'sueddeutsche', 'Sueddeutsche Zeitung')
with open(CONFIGFILE, 'w') as configfile:
config.write(configfile)
except Exception, e:
print "Could not create %s: %s" % (CONFIGFILE, e)
else:
print "Successfully created %s. We've added a few example entries too." % CONFIGFILE
sys.exit(0)
# list all configured items with their names
def list_all_items():
config = ConfigParser.SafeConfigParser()
config.read(CONFIGFILE)
for section in config.sections():
# ignore config and example sections
if section != 'config' and section != 'example':
print "Section: %s" % section
for recipe, name in config.items(section):
print "\t%s (%s)" % (name, recipe)
# add a new configuration item
def add_item(recipe, name, section):
config = ConfigParser.SafeConfigParser()
config.read(CONFIGFILE)
# check if section already exists
try:
config.add_section(section)
except ConfigParser.DuplicateSectionError, ValueError:
pass
# entry already exists, asking whether to replace it
if config.has_option(section, recipe):
i = raw_input("Recipe %s with name %s already exists in section %s, do you want to update it (y/n)? " % (recipe, config.get(section, recipe), section))
if i is not 'y':
raise Exception("Adding item aborted by user as the item already exists.")
config.set(section, recipe, name)
with open(CONFIGFILE, 'w') as configfile:
config.write(configfile)
print "Successfully added item %s. Please add the required %s.recipe in %s now." % (name, recipe, config.get('config', 'recipes_path'))
# return a list of unique recipe names which
# should be converted in the current run
def collect_recipes(section='all', item=None):
recipes = []
config = ConfigParser.SafeConfigParser()
config.read(CONFIGFILE)
if item is None: # no request for specific item
# all entries requested
if 'all' == section:
for section in config.sections():
if section != 'config' and section != 'example':
for recipe, name in config.items(section):
recipes.append(recipe)
else: # all entries for specific section
if config.has_section(section):
for recipe, name in config.items(section):
recipes.append(recipe)
else:
raise Exception("Section %s is not available in current configuration." % section)
else: # specific entry
for section in config.sections():
if section != 'config' and section != 'example':
for recipe, name in config.items(section):
if item == recipe:
recipes.append(item)
if 0 == len(recipes): # no such recipe found
raise Exception("Recipe named %s could not be found, please check the name and your configuration." % item)
# Attention: We're removing duplicate entries here, user hopefully expect this behavior!
return list(set(recipes))
# convert a list of recipes to .mobi-format using ebook-convert
def convert_recipes(recipes):
config = ConfigParser.SafeConfigParser()
config.read(CONFIGFILE)
recipes_path = config.get('config', 'recipes_path')
output_path = config.get('config', 'output_path')
ebook_convert = config.get('config', 'ebook_convert')
for recipe in recipes:
try:
retcode = subprocess.call([ebook_convert, os.path.join(recipes_path, recipe + ".recipe"), os.path.join(output_path, recipe + ".mobi"), "--output-profile=kindle"])
if 0 != retcode:
raise Exception("Err | or while converting recipe %s" % recipe)
except Exception ,e:
print "Could not convert %s: %s." % ( os.path.join(recipes_path, recipe + ".mobi" | ), e)
# send all .mobi-files in defined output-directory
# to user via calibre-smtp
def send_ebooks():
config = ConfigParser.SafeConfigParser()
config.read(CONFIGFILE)
calibre_smtp = config.get('config', 'calibre-smtp')
# get all .mobi-files in output-dir
files = glob.glob(config.get('config', 'output_path') + "/*.mobi")
for f in files:
try:
retcode = subprocess.call([calibre_smtp, '-r', config.get('config', 'smtp_server'), '-u', config.get('config', 'smtp_user'), '-p', config.get('config', 'smtp_pw'), '-s', 'Send to Kindle', '-a', f, '-vv', config.get('config', 'smtp_mailaddr'), config.get('config', 'kindle_addr'), 'Send to Kindle'])
if 0 != retcode:
raise Exception("Error while sending .mobi-files via calibre-smtp.")
except Exception, e:
print "Could not send convertes files via mail: %s" % e
# clean output direcotry
def cleanup():
config = ConfigParser.SafeConfig |
teracyhq/flask-boilerplate | app/datastore.py | Python | bsd-3-clause | 4,882 | 0.001639 | # -*- coding: utf-8 -*-
"""
datastore
~~~~~~~~~
base datastore module
How to use:
- with SQLAlchemy:
datastore = SQLAlchemyDatastore(db)
- with MongoDB: (TODO(hoatle): add this)
and use the methods provided from Datastore
"""
from abc import ABCMeta, abstractmethod
from sqlalchemy import desc
import inflection
from .utils import extract_dict, add_filters
class Datastore(object):
"""Abstract Datastore class.
.. versionadded:: 0.1.0
"""
__metaclass__ = ABCMeta
def __init__(self, db):
self.db = db
def commit(self):
pass
@abstractmethod
def put(self, model):
"""Creates a new model or updates an existing model.
.. versionadded:: 0.1.0
:param model: the model.
"""
pass
@abstractmethod
def delete(self, model):
"""Deletes an existing model from the database.
.. versionadded:: 0.1.0
:param model: the model.
"""
pass
class SQLAlchemyDatastore(Datastore):
"""SQLAlchemyDatastore class.
.. versionadded:: 0.1.0
"""
def __init__(self, db):
super(SQLAlchemyDatastore, self).__init__(db)
self.model_registry = {}
classes, table_names = [], []
for clazz in db.Model._decl_class_registry.values():
try:
table_names.append(clazz.__tablename__)
classes.append(clazz)
except: # TODO(hoatle): too broad exception
pass
for table in db.metadata.tables.items():
if table[0] in table_names:
| model = classes[table_names.index(table[0])]
self.model_registry[inflection.underscore(model.__name__)] = model
def commit(self):
self.db.session | .commit()
def put(self, model):
self.db.session.add(model)
# self.db.session.flush() # TODO(hoatle): do we need this, performance impact?
return model
def delete(self, model):
self.db.session.delete(model)
def get_model_class(self, model_name):
return self.model_registry.get(model_name)
def get_model_instance(self, model_name, **fields):
return self.get_model_class(model_name)(**fields)
# Base CRUD
def find_by_model_name(self, model_name, q=None, accepted_filter_keys=None, filters=None,
sort=None, offset=None, limit=None, **kwargs):
model_class = self.get_model_class(model_name)
query = model_class.query.from_self()
if filters is not None and len(filters) > 0:
query = add_filters(query, filters, accepted_filter_keys)
# {key:value,}
filter_dict = extract_dict(kwargs, extracted_keys=accepted_filter_keys)
if filter_dict is not None and len(filter_dict) > 0:
query = query.filter_by(**filter_dict)
if sort is not None:
# sort is expected to be something like: name,-description,id,+email
sort_args = [
desc(v.strip()[1:]) if v.startswith('-') else
v.strip()[1:] if v.startswith('+') else
v.strip()
for v in sort.split(',')]
query = query.order_by(*sort_args)
query = query.offset(offset).limit(limit)
return query
def create_by_model_name(self, model_name, accepted_keys, **fields):
values = extract_dict(fields, extracted_keys=accepted_keys)
model = self.get_model_instance(model_name, **values)
self.put(model)
self.commit()
return model
def read_by_model_name(self, model_name, pid, **kwargs):
model_class = self.get_model_class(model_name)
return model_class.query.get_or_404(pid)
def update_by_model_name(self, model_name, pid, accepted_keys, pid_key='id', **kwargs):
values = extract_dict(kwargs, extracted_keys=accepted_keys)
self.get_model_class(model_name).query.filter_by(**{pid_key: pid}).update(values)
self.commit()
return self.read_by_model_name(model_name, pid) # TODO(hoatle): avoid this?
def delete_by_model_name(self, model_name, pid, **kwargs):
model = self.read_by_model_name(model_name, pid, **kwargs)
self.delete(model)
self.commit()
class MongoEngineDatastore(Datastore):
"""MongoEngineDatastore class.
.. versionadded:: 0.1.0
"""
def put(self, model):
"""Saves the model to the database. If the model already exists,
it will be updated, otherwise it will be created.
.. versionadded:: 0.1.0
:param model: the model.
"""
model.save()
return model
def delete(self, model):
"""Deletes an existing model from the database.
.. versionadded:: 0.1.0
:param model: the model.
"""
model.delete()
|
upcounsel/PyRankinity | setup.py | Python | mit | 396 | 0.005051 | from setuptools import setup
setup(name='PyRankinity',
version='0.1',
description='Rankinity API Wrapper - See http://my.rankinity.com/api.en',
auth | or='UpCounsel',
author_email='brad@upcounsel.com',
url='https://www.github.com/upcounsel/pyrankinity',
packages=['pyrankinity'],
install_requires=[
'requests',
| ],
license='MIT'
) |
asajeffrey/servo | tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/browsers/sauce.py | Python | mpl-2.0 | 8,309 | 0.000963 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import glob
import os
import shutil
import subprocess
import tarfile
import tempfile
import time
import requests
from six.moves import cStringIO as StringIO
from .base import Browser, ExecutorBrowser, require_arg
from .base import get_timeout_multiplier # noqa: F401
from ..executors import executor_kwargs as bas | e_executor_kwargs
from ..executors.executorselenium import (SeleniumTestharnessExecutor, # noqa: F401
SeleniumRefTestExe | cutor) # noqa: F401
here = os.path.dirname(__file__)
# Number of seconds to wait between polling operations when detecting status of
# Sauce Connect sub-process.
sc_poll_period = 1
__wptrunner__ = {"product": "sauce",
"check_args": "check_args",
"browser": "SauceBrowser",
"executor": {"testharness": "SeleniumTestharnessExecutor",
"reftest": "SeleniumRefTestExecutor"},
"browser_kwargs": "browser_kwargs",
"executor_kwargs": "executor_kwargs",
"env_extras": "env_extras",
"env_options": "env_options",
"timeout_multiplier": "get_timeout_multiplier"}
def get_capabilities(**kwargs):
browser_name = kwargs["sauce_browser"]
platform = kwargs["sauce_platform"]
version = kwargs["sauce_version"]
build = kwargs["sauce_build"]
tags = kwargs["sauce_tags"]
tunnel_id = kwargs["sauce_tunnel_id"]
prerun_script = {
"MicrosoftEdge": {
"executable": "sauce-storage:edge-prerun.bat",
"background": False,
},
"safari": {
"executable": "sauce-storage:safari-prerun.sh",
"background": False,
}
}
capabilities = {
"browserName": browser_name,
"build": build,
"disablePopupHandler": True,
"name": "%s %s on %s" % (browser_name, version, platform),
"platform": platform,
"public": "public",
"selenium-version": "3.3.1",
"tags": tags,
"tunnel-identifier": tunnel_id,
"version": version,
"prerun": prerun_script.get(browser_name)
}
return capabilities
def get_sauce_config(**kwargs):
browser_name = kwargs["sauce_browser"]
sauce_user = kwargs["sauce_user"]
sauce_key = kwargs["sauce_key"]
hub_url = "%s:%s@localhost:4445" % (sauce_user, sauce_key)
data = {
"url": "http://%s/wd/hub" % hub_url,
"browserName": browser_name,
"capabilities": get_capabilities(**kwargs)
}
return data
def check_args(**kwargs):
require_arg(kwargs, "sauce_browser")
require_arg(kwargs, "sauce_platform")
require_arg(kwargs, "sauce_version")
require_arg(kwargs, "sauce_user")
require_arg(kwargs, "sauce_key")
def browser_kwargs(test_type, run_info_data, config, **kwargs):
sauce_config = get_sauce_config(**kwargs)
return {"sauce_config": sauce_config}
def executor_kwargs(test_type, server_config, cache_manager, run_info_data,
**kwargs):
executor_kwargs = base_executor_kwargs(test_type, server_config,
cache_manager, run_info_data, **kwargs)
executor_kwargs["capabilities"] = get_capabilities(**kwargs)
return executor_kwargs
def env_extras(**kwargs):
return [SauceConnect(**kwargs)]
def env_options():
return {"supports_debugger": False}
def get_tar(url, dest):
resp = requests.get(url, stream=True)
resp.raise_for_status()
with tarfile.open(fileobj=StringIO(resp.raw.read())) as f:
f.extractall(path=dest)
class SauceConnect():
def __init__(self, **kwargs):
self.sauce_user = kwargs["sauce_user"]
self.sauce_key = kwargs["sauce_key"]
self.sauce_tunnel_id = kwargs["sauce_tunnel_id"]
self.sauce_connect_binary = kwargs.get("sauce_connect_binary")
self.sauce_connect_args = kwargs.get("sauce_connect_args")
self.sauce_init_timeout = kwargs.get("sauce_init_timeout")
self.sc_process = None
self.temp_dir = None
self.env_config = None
def __call__(self, env_options, env_config):
self.env_config = env_config
return self
def __enter__(self):
# Because this class implements the context manager protocol, it is
# possible for instances to be provided to the `with` statement
# directly. This class implements the callable protocol so that data
# which is not available during object initialization can be provided
# prior to this moment. Instances must be invoked in preparation for
# the context manager protocol, but this additional constraint is not
# itself part of the protocol.
assert self.env_config is not None, 'The instance has been invoked.'
if not self.sauce_connect_binary:
self.temp_dir = tempfile.mkdtemp()
get_tar("https://saucelabs.com/downloads/sc-4.4.9-linux.tar.gz", self.temp_dir)
self.sauce_connect_binary = glob.glob(os.path.join(self.temp_dir, "sc-*-linux/bin/sc"))[0]
self.upload_prerun_exec('edge-prerun.bat')
self.upload_prerun_exec('safari-prerun.sh')
self.sc_process = subprocess.Popen([
self.sauce_connect_binary,
"--user=%s" % self.sauce_user,
"--api-key=%s" % self.sauce_key,
"--no-remove-colliding-tunnels",
"--tunnel-identifier=%s" % self.sauce_tunnel_id,
"--metrics-address=0.0.0.0:9876",
"--readyfile=./sauce_is_ready",
"--tunnel-domains",
",".join(self.env_config.domains_set)
] + self.sauce_connect_args)
tot_wait = 0
while not os.path.exists('./sauce_is_ready') and self.sc_process.poll() is None:
if not self.sauce_init_timeout or (tot_wait >= self.sauce_init_timeout):
self.quit()
raise SauceException("Sauce Connect Proxy was not ready after %d seconds" % tot_wait)
time.sleep(sc_poll_period)
tot_wait += sc_poll_period
if self.sc_process.returncode is not None:
raise SauceException("Unable to start Sauce Connect Proxy. Process exited with code %s", self.sc_process.returncode)
def __exit__(self, exc_type, exc_val, exc_tb):
self.env_config = None
self.quit()
if self.temp_dir and os.path.exists(self.temp_dir):
try:
shutil.rmtree(self.temp_dir)
except OSError:
pass
def upload_prerun_exec(self, file_name):
auth = (self.sauce_user, self.sauce_key)
url = "https://saucelabs.com/rest/v1/storage/%s/%s?overwrite=true" % (self.sauce_user, file_name)
with open(os.path.join(here, 'sauce_setup', file_name), 'rb') as f:
requests.post(url, data=f, auth=auth)
def quit(self):
"""The Sauce Connect process may be managing an active "tunnel" to the
Sauce Labs service. Issue a request to the process to close any tunnels
and exit. If this does not occur within 5 seconds, force the process to
close."""
kill_wait = 5
tot_wait = 0
self.sc_process.terminate()
while self.sc_process.poll() is None:
time.sleep(sc_poll_period)
tot_wait += sc_poll_period
if tot_wait >= kill_wait:
self.sc_process.kill()
break
class SauceException(Exception):
pass
class SauceBrowser(Browser):
init_timeout = 300
def __init__(self, logger, sauce_config):
Browser.__init__(self, logger)
self.sauce_config = sauce_config
def start(self, **kwargs):
pass
def stop(self, force=False):
pass
def pid(self):
return None
def is_alive(self):
# TODO: Should this check something about the connection?
return True
def cleanup( |
ilexius/odoo | addons/crm_partner_assign/__openerp__.py | Python | gpl-3.0 | 1,392 | 0.002874 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Partner Assignation & Geolocation',
'version': '1.0',
'category': 'Customer Relations | hip Management',
'description': """
This is the module used by Odoo SA to red | irect customers to its partners, based on geolocation.
======================================================================================================
This modules lets you geolocate Leads, Opportunities and Partners based on their address.
Once the coordinates of the Lead/Opportunity is known, they can be automatically assigned
to an appropriate local partner, based on the distance and the weight that was assigned to the partner.
""",
'depends': ['base_geolocalize', 'crm', 'account', 'portal'],
'data': [
'security/ir.model.access.csv',
'res_partner_view.xml',
'wizard/crm_forward_to_partner_view.xml',
'wizard/crm_channel_interested_view.xml',
'crm_lead_view.xml',
'crm_partner_assign_data.xml',
'crm_portal_view.xml',
'portal_data.xml',
'report/crm_lead_report_view.xml',
'report/crm_partner_report_view.xml',
],
'demo': [
'res_partner_demo.xml',
'crm_lead_demo.xml'
],
'test': ['test/partner_assign.yml'],
'installable': True,
'auto_install': False,
}
|
pfmoore/blist | blist/test/list_tests.py | Python | bsd-3-clause | 19,502 | 0.005743 | # This file taken from Python, licensed under the Python License Agreement
from __future__ import print_function
"""
Tests common to list and UserList.UserList
"""
import sys
import os
from blist.test import unittest
from blist.test import test_support
from blist.test import seq_tests
from decimal import Decimal
def CmpToKey(mycmp):
'Convert a cmp= function into a key= function'
class K(object):
def __init__(self, obj):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) == -1
return K
class CommonTest(seq_tests.CommonTest):
def test_init(self):
# Iterable arg is optional
self.assertEqual(self.type2test([]), self.type2test())
# Init clears previous values
a = self.type2test([1, 2, 3])
a.__init__()
self.assertEqual(a, self.type2test([]))
# Init overwrites previous values
a = self.type2test([1, 2, 3])
a.__init__([4, 5, 6])
self.assertEqual(a, self.type2test([4, 5, 6]))
# Mutables always return a new object
b = self.type2test(a)
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
def test_repr(self):
l0 = []
l2 = [0, 1, 2]
a0 = self.type2test(l0)
a2 = self.type2test(l2)
self.assertEqual(str(a0), 'blist(%s)' % str(l0))
self.assertEqual(repr(a0), 'blist(%s)' % repr(l0))
self.assertEqual(repr(a2), 'blist(%s)' % repr(l2))
self.assertEqual(str(a2), "blist([0, 1, 2])")
self.assertEqual(repr(a2), "blist([0, 1, 2])")
a2.append(a2)
a2.append(3)
self.assertEqual(str(a2), "blist([0, 1, 2, [...], 3])")
self.assertEqual(repr(a2), "blist([0, 1, 2, [...], 3])")
def test_print(self):
d = self.type2test(range(200))
d.append(d)
d.extend(range(200,400))
d.append(d)
d.append(400)
try:
fo = open(test_support.TESTFN, "w")
fo.write(str(d))
fo.close()
fo = open(test_support.TESTFN, "r")
self.assertEqual(fo.read(), repr(d))
finally:
fo.close()
os.remove(test_support.TESTFN)
def test_set_subscript(self):
a = self.type2test(list(range(20)))
self.assertRaises(ValueError, a.__setitem__, slice(0, 10, 0), [1,2,3])
self.assertRaises(TypeError, a.__setitem__, slice(0, 10), 1)
self.assertRaises(ValueError, a.__setitem__, slice(0, 10, 2), [1,2])
self.assertRaises(TypeError, a.__getitem__, 'x', 1)
a[slice(2,10,3)] = [1,2,3]
self.assertEqual(a, self.type2test([0, 1, 1, 3, 4, 2, 6, 7, 3,
9, 10, 11, 12, 13, 14, 15,
16, 17, 18, 19]))
def test_reversed(self):
a = self.type2test(list(range(20)))
r = reversed(a)
self.assertEqual(list(r), self.type2test(list(range(19, -1, -1))))
if hasattr(r, '__next__'): # pragma: no cover
self.assertRaises(StopIteration, r.__next__)
else: # pragma: no cover
self.assertRaises(StopIteration, r.next)
self.assertEqual(list(reversed(self.type2test())),
self.type2test())
def test_setitem(self):
a = self.type2test([0, 1])
a[0] = 0
a[1] = 100
self.assertEqual(a, self.type2test([0, 100]))
a[-1] = 200
self.assertEqual(a, self.type2test([0, 200]))
a[-2] = 100
self.assertEqual(a, self.type2test([100, 200]))
self.assertRaises(IndexError, a.__setitem__, -3, 200)
self.assertRaises(IndexError, a.__setitem__, 2, 200)
a = self.type2test([])
self.assertRaises(IndexError, a.__setitem__, 0, 200)
self.assertRaises(IndexError, a.__setitem__, -1, 200)
self.assertRaises(TypeError, a.__setitem__)
a = self.type2test([0,1,2,3,4])
a[0] = 1
a[1] = 2
a[2] = 3
self.assertEqual(a, self.type2test([1,2,3,3,4]))
a[0] = 5
a[1] = 6
a[2] = 7
self.assertEqual(a, self.type2test([5,6,7,3,4]))
a[-2] = 88
a[-1] = 99
self.assertEqual(a, self.type2test([5,6,7,88,99]))
a[-2] = 8
a[-1] = 9
self.assertEqual(a, self.type2test([5,6,7,8,9]))
def test_delitem(self):
a = self.type2test([0, 1])
del a[1]
self.assertEqual(a, [0])
del a[0]
self.assertEqual(a, [])
a = self.type2test([0, 1])
del a[-2]
self.assertEqual(a, [1])
del a[-1]
self.assertEqual(a, [])
a = self.type2test([0, 1])
self.assertRaises(IndexError, a.__delitem__, -3)
self.assertRaises(IndexError, a.__delitem__, 2)
a = self.type2test([])
self.assertRaises(IndexError, a.__delitem__, 0)
self.assertRaises(TypeError, a.__delitem__)
def test_setslice(self):
l = [0, 1]
a = self.type2test(l)
for i in range(-3, 4):
a[:i] = l[:i]
sel | f.assertEqual(a, l)
a2 = a[:]
a2[:i] = a[:i]
| self.assertEqual(a2, a)
a[i:] = l[i:]
self.assertEqual(a, l)
a2 = a[:]
a2[i:] = a[i:]
self.assertEqual(a2, a)
for j in range(-3, 4):
a[i:j] = l[i:j]
self.assertEqual(a, l)
a2 = a[:]
a2[i:j] = a[i:j]
self.assertEqual(a2, a)
aa2 = a2[:]
aa2[:0] = [-2, -1]
self.assertEqual(aa2, [-2, -1, 0, 1])
aa2[0:] = []
self.assertEqual(aa2, [])
a = self.type2test([1, 2, 3, 4, 5])
a[:-1] = a
self.assertEqual(a, self.type2test([1, 2, 3, 4, 5, 5]))
a = self.type2test([1, 2, 3, 4, 5])
a[1:] = a
self.assertEqual(a, self.type2test([1, 1, 2, 3, 4, 5]))
a = self.type2test([1, 2, 3, 4, 5])
a[1:-1] = a
self.assertEqual(a, self.type2test([1, 1, 2, 3, 4, 5, 5]))
a = self.type2test([])
a[:] = tuple(range(10))
self.assertEqual(a, self.type2test(list(range(10))))
if sys.version_info[0] < 3:
self.assertRaises(TypeError, a.__setslice__, 0, 1, 5)
self.assertRaises(TypeError, a.__setslice__)
def test_delslice(self):
a = self.type2test([0, 1])
del a[1:2]
del a[0:1]
self.assertEqual(a, self.type2test([]))
a = self.type2test([0, 1])
del a[1:2]
del a[0:1]
self.assertEqual(a, self.type2test([]))
a = self.type2test([0, 1])
del a[-2:-1]
self.assertEqual(a, self.type2test([1]))
a = self.type2test([0, 1])
del a[-2:-1]
self.assertEqual(a, self.type2test([1]))
a = self.type2test([0, 1])
del a[1:]
del a[:1]
self.assertEqual(a, self.type2test([]))
a = self.type2test([0, 1])
del a[1:]
del a[:1]
self.assertEqual(a, self.type2test([]))
a = self.type2test([0, 1])
del a[-1:]
self.assertEqual(a, self.type2test([0]))
a = self.type2test([0, 1])
del a[-1:]
self.assertEqual(a, self.type2test([0]))
a = self.type2test([0, 1])
del a[:]
self.assertEqual(a, self.type2test([]))
def test_append(self):
a = self.type2test([])
a.append(0)
a.append(1)
a.append(2)
self.assertEqual(a, self.type2test([0, 1, 2]))
self.assertRaises(TypeError, a.append)
def test_extend(self):
a1 = self.type2test([0])
a2 = self.type2test((0, 1))
a = a1[:]
a.extend(a2)
self.assertEqual(a, a1 + a2)
a.extend(self.type2test([]))
self.assertEqual(a, a1 + a2)
a.extend(a)
self.assertEqual(a, self.type2test([0, 0, 1, 0, 0, 1]))
a = self.type2test("spam")
a.extend("eggs")
self.assertEqual(a, list("spameggs"))
self.assertRaises(TypeError, a.extend, None)
self.ass |
snakeleon/YouCompleteMe-x86 | third_party/ycmd/third_party/JediHTTP/vendor/jedi/jedi/api/helpers.py | Python | gpl-3.0 | 10,756 | 0.000372 | """
Helpers for the API
"""
import re
from collections import namedtuple
from jedi._compatibility import u
from jedi.evaluate.helpers import call_of_leaf
from jedi import parser
from jedi.parser import tokenize
from jedi.cache import time_cache
from jedi import common
CompletionParts = namedtuple('CompletionParts', ['path', 'has_dot', 'name'])
def sorted_definitions(defs):
# Note: `or ''` below is required because `module_path` could be
return sorted(defs, key=lambda x: (x.module_path or '', x.line or 0, x.column or 0))
def get_on_completion_name(module, lines, position):
leaf = module.get_leaf_for_position(position)
if leaf is None or leaf.type in ('string', 'error_leaf'):
# Completions inside strings are a bit special, we need to parse the
# string. The same is true for comments and error_leafs.
line = lines[position[0] - 1]
# The first step of completions is to get the name
return re.search(r'(?!\d)\w+$|$', line[:position[1]]).group(0)
elif leaf.type not in ('name', 'keyword'):
return ''
return leaf.value[:position[1] - leaf.start_pos[1]]
def _get_code(code_lines, start_pos, end_pos):
# Get relevant lines.
lines = code_lines[start_pos[0] - 1:end_pos[0]]
# Remove the parts at the end of the line.
lines[-1] = lines[-1][:end_pos[1]]
# Remove first line indentation.
lines[0] = lines[0][start_pos[1]:]
return '\n'.join(lines)
class OnErrorLeaf(Exception):
@property
def error_leaf(self):
return self.args[0]
def _is_on_comment(leaf, position):
# We might be on a comment.
if leaf.type == 'endmarker':
try:
dedent = leaf.get_previous_leaf()
if dedent.type == 'dedent' and dedent.prefix:
# TODO This is needed because the fast parser uses multiple
# endmarker tokens within a file which is obviously ugly.
# This is so ugly that I'm not even commenting how it exactly
# happens, but let me tell you that I want to get rid of it.
leaf = dedent
except IndexError:
pass
comment_lines = common.splitlines(leaf.prefix)
difference = leaf.start_pos[0] - position[0]
prefix_start_pos = leaf.get_start_pos_of_prefix()
if difference == 0:
indent = leaf.start_pos[1]
elif position[0] == prefix_start_pos[0]:
indent = prefix_start_pos[1]
else:
indent = 0
line = comment_lines[-difference - 1][:position[1] - indent]
return '#' in line
def _get_code_for_stack(code_lines, module, position):
leaf = module.get_leaf_for_position(position, include_prefixes=True)
# It might happen that we're on whitespace or on a comment. This means
# that we would not get the right leaf.
if leaf.start_pos >= position:
if _is_on_comment(leaf, position):
return u('')
# If we're not on a comment simply get the previous leaf and proceed.
try:
leaf = leaf.get_previous_leaf()
except IndexError:
return u('') # At the beginning of the file.
is_after_newline = leaf.type == 'newline'
while leaf.type == 'newline':
try:
leaf = leaf.get_previous_leaf()
except IndexError:
return u('')
if leaf.type in ('indent', 'dedent'):
return u('')
elif leaf.type == 'error_leaf' or leaf.type == 'string':
# Error leafs cannot be parsed, completion in strings is al | so
# impossible.
raise OnErrorLeaf(leaf)
else:
if leaf == ';':
user_stmt = leaf.parent
else:
user_stmt = leaf.get_definition()
if user_stmt.parent.type == 'simple_stmt':
user_stmt = user_stmt.parent
if is_after_newline:
| if user_stmt.start_pos[1] > position[1]:
# This means that it's actually a dedent and that means that we
# start without context (part of a suite).
return u('')
# This is basically getting the relevant lines.
return _get_code(code_lines, user_stmt.get_start_pos_of_prefix(), position)
def get_stack_at_position(grammar, code_lines, module, pos):
"""
Returns the possible node names (e.g. import_from, xor_test or yield_stmt).
"""
class EndMarkerReached(Exception):
pass
def tokenize_without_endmarker(code):
tokens = tokenize.source_tokens(code, use_exact_op_types=True)
for token_ in tokens:
if token_.string == safeword:
raise EndMarkerReached()
else:
yield token_
code = _get_code_for_stack(code_lines, module, pos)
# We use a word to tell Jedi when we have reached the start of the
# completion.
# Use Z as a prefix because it's not part of a number suffix.
safeword = 'ZZZ_USER_WANTS_TO_COMPLETE_HERE_WITH_JEDI'
# Remove as many indents from **all** code lines as possible.
code = code + safeword
p = parser.ParserWithRecovery(grammar, code, start_parsing=False)
try:
p.parse(tokenizer=tokenize_without_endmarker(code))
except EndMarkerReached:
return Stack(p.stack)
raise SystemError("This really shouldn't happen. There's a bug in Jedi.")
class Stack(list):
def get_node_names(self, grammar):
for dfa, state, (node_number, nodes) in self:
yield grammar.number2symbol[node_number]
def get_nodes(self):
for dfa, state, (node_number, nodes) in self:
for node in nodes:
yield node
def get_possible_completion_types(grammar, stack):
def add_results(label_index):
try:
grammar_labels.append(inversed_tokens[label_index])
except KeyError:
try:
keywords.append(inversed_keywords[label_index])
except KeyError:
t, v = grammar.labels[label_index]
assert t >= 256
# See if it's a symbol and if we're in its first set
inversed_keywords
itsdfa = grammar.dfas[t]
itsstates, itsfirst = itsdfa
for first_label_index in itsfirst.keys():
add_results(first_label_index)
inversed_keywords = dict((v, k) for k, v in grammar.keywords.items())
inversed_tokens = dict((v, k) for k, v in grammar.tokens.items())
keywords = []
grammar_labels = []
def scan_stack(index):
dfa, state, node = stack[index]
states, first = dfa
arcs = states[state]
for label_index, new_state in arcs:
if label_index == 0:
# An accepting state, check the stack below.
scan_stack(index - 1)
else:
add_results(label_index)
scan_stack(-1)
return keywords, grammar_labels
def evaluate_goto_definition(evaluator, leaf):
if leaf.type == 'name':
# In case of a name we can just use goto_definition which does all the
# magic itself.
return evaluator.goto_definitions(leaf)
node = None
parent = leaf.parent
if parent.type == 'atom':
node = leaf.parent
elif parent.type == 'trailer':
node = call_of_leaf(leaf)
if node is None:
return []
return evaluator.eval_element(node)
CallSignatureDetails = namedtuple(
'CallSignatureDetails',
['bracket_leaf', 'call_index', 'keyword_name_str']
)
def _get_index_and_key(nodes, position):
"""
Returns the amount of commas and the keyword argument string.
"""
nodes_before = [c for c in nodes if c.start_pos < position]
if nodes_before[-1].type == 'arglist':
nodes_before = [c for c in nodes_before[-1].children if c.start_pos < position]
key_str = None
if nodes_before:
last = nodes_before[-1]
if last.type == 'argument' and last.children[1].end_pos <= position:
# Checked if the argument
key_str = last.children[0].value
elif last == '=':
key_str = nodes_before[-2].value
return nodes_before.count(','), key_str
|
priestc/autotip.io | autotip_io/autotip_io/admin.py | Python | mit | 731 | 0.005472 | from django.contrib import admin
from .models import Blog, GiveawaySubmission, Article
class GiveawaySubmissionAdmin(admin.ModelAdmin):
list_display = ('date_created', 'address', 'blockchain')
def blockchain(self | , obj):
url = "https://blockchain.info/address/%s" % obj.address
return "<a href='%s' target='_blank'>Link</a>" % url
blockchain.allow_tags = True
class BlogAdmin(admin.ModelAdmin):
list_display = ('date_created', 'title', 'author')
class ArticleAdmin(admin.ModelAdmin):
list_display = ('date_created', 'title', 'tagline', 'author')
admin.site.regist | er(GiveawaySubmission, GiveawaySubmissionAdmin)
admin.site.register(Blog, BlogAdmin)
admin.site.register(Article, ArticleAdmin)
|
rwgdrummer/maskgen | tests/test_utils.py | Python | bsd-3-clause | 288 | 0.020833 | from maskgen.jpeg import utils
import unittest
from test_support import TestSupport
class TestJpegUtils(TestSupport):
def tes | t_load(self):
self.assertEqual(91,utils.estimate_qf(self.locateFile('tests/images/test_project1. | jpg')))
if __name__ == '__main__':
unittest.main()
|
PyBossa/app-darkskies | get_images.py | Python | agpl-3.0 | 1,468 | 0.001362 | # -*- coding: utf-8 -*-
# This file is part of PyBOSSA.
#
# PyBOSSA is free software: you can redistribute it | and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free | Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBOSSA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBOSSA. If not, see <http://www.gnu.org/licenses/>.
import json
def get_iss_photos(start, n_photos, mission, size="small"):
"""
Gets public photos from ISS missions
:arg string size: Size of the image from ISS mission
:returns: A list of photos.
:rtype: list
"""
photos = []
for i in range(start, start + n_photos):
pattern_s = "http://eol.jsc.nasa.gov/sseop/images/ESC/%s/%s/%s-E-%s.JPG" % (
size,
mission,
mission,
i)
pattern_b = "http://eol.jsc.nasa.gov/sseop/images/ESC/%s/%s/%s-E-%s.JPG" % (
'large',
mission,
mission,
i)
tmp = dict(link_small=pattern_s,
link_big=pattern_b
)
photos.append(tmp)
return photos
|
pinae/ctSESAM-python-memorizing | tests/test_Crypter.py | Python | gpl-3.0 | 2,162 | 0.006013 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import unittest
from crypter import Crypter
from base64 import b64encode, b64decode
class TestCrypter(unittest.TestCase):
def test_encrypt(self):
message_string = "Important information with quite some length. " + \
"This message is as long as this because otherwise only one cipher block would " + \
"be encrypted. This long message insures that more than one block is needed."
password = "secret"
message = message_string.encode('utf-8')
crypter = Crypter(Crypter.createIvKey(password.encode('utf-8'), "pepper".encode('utf-8'), iterations=3))
ciphertext = crypter.encrypt(message)
self.assertEqual(
b'EFEgY5bexGnwjGSUQKK35TPD7fAjG66REq5m9N1eyFHrZQwzv+aLc7bVmJ9FzCyxbCnbyUnzDKiY505br' +
b'oEb+KO41XKW668xJzh/JvOK0Cu/+bc4/zSFHZM6JsTYEVDIXgR39ZlypeB34jDVI2544w1ey+DmTWbe8n' +
b'UbagjnmRkok6kOAq8Avsf9BVJMw3BnSn/4cCC+gOxOJY5fp4DecNDQnp0HyyUz2VMMh/JUYILS5+67fXq' +
b'29CbIQ1DOTqDfqRPA62nkRVPY83cKIe/UXw==',
b64encode(ciphertext))
def test_decrypt(self):
ciphertext = b'EFEgY5bexGnwjGSUQKK35TPD7fAjG66REq5m9N1eyFHrZQwzv+aLc7bVmJ9FzCyxbCnbyUnzDKiY505br' + \
b'oEb+KO41XKW668xJzh/JvOK0Cu/+bc4/zSFHZM6JsTYEVDIXgR39ZlypeB34jDVI2544w1ey+DmTWbe8n' + \
b'UbagjnmRkok6kOAq8Avsf9BVJMw3BnSn/4cCC | +gOxOJY5fp4DecNDQnp0HyyUz2VMMh/JUYILS5+67fXq' + \
b'29CbIQ1DOTqDfqRPA62nkRVPY83cKIe/UXw=='
self.assertEqual(0, len(b64decode(ciphertext)) % 16)
password = "secret"
crypter = Crypter(Crypter.createIvKey(password.encode('utf-8'), "pepper".encode('utf-8'), iterations=3))
self.assertEqual(b'Important information with quite some length. ' +
b'This message is | as long as this because otherwise only one cipher block would ' +
b'be encrypted. This long message insures that more than one block is needed.',
crypter.decrypt(b64decode(ciphertext)))
if __name__ == '__main__':
unittest.main()
|
HelloLily/hellolily | lily/cases/api/serializers.py | Python | agpl-3.0 | 9,977 | 0.001704 | import analytics
import anyjson
from channels import Group
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from lily.accounts.api.serializers import RelatedAccountSerializer
from lily.api.fields import SanitizedHtmlCharField
from lily.api.nested.mixins import RelatedSerializerMixin
from lily.api.nested.serializers import WritableNestedSerializer
from lily.api.serializers import ContentTypeSerializer
from lily.contacts.api.serializers import RelatedContactSerializer
from lily.contacts.models import Function
from lily.users.api.serializers import RelatedLilyUserSerializer, RelatedTeamSerializer
from lily.utils.api.serializers import RelatedTagSerializer
from lily.utils.request import is_external_referer
from ..models import Case, CaseStatus, CaseType
class CaseStatusSerializer(serializers.ModelSerializer):
"""
Serializer for case status model.
"""
class Meta:
model = CaseStatus
fields = (
'id',
'name',
)
class RelatedCaseStatusSerializer(RelatedSerializerMixin, CaseStatusSerializer):
pass
class CaseTypeSerializer(serializers.ModelSerializer):
"""
Serializer for case type model.
"""
class Meta:
model = CaseType
fields = (
'id',
'is_archived',
'name',
'use_as_filter',
)
class RelatedCaseTypeSerializer(RelatedSerializerMixin, CaseTypeSerializer):
pass
class CaseSerializer(WritableNestedSerializer):
"""
Serializer for the case model.
"""
# Set non mutable fields.
created_by = RelatedLilyUserSerializer(read_only=True)
content_type = ContentTypeSerializer(
read_only=True,
help_text='This is what the object is identified as in the back-end.',
)
# Related fields.
account = RelatedAccountSerializer(
required=False,
allow_null=True,
help_text='Account for which the case is being created.',
)
contact = RelatedContactSerializer(
required=False,
allow_null=True,
help_text='Contact for which the case is being created.',
)
assigned_to = RelatedLilyUserSerializer(
required=False,
allow_null=True,
assign_only=True,
help_text='Person which the case is assigned to.',
)
assigned_to_teams = RelatedTeamSerializer(
many=True,
required=False,
assign_only=True,
help_text='List of teams the case is assigned to.',
)
type = RelatedCaseTypeSerializer(
assign_only=True,
help_text='The type of case.',
)
status = RelatedCaseStatusSerializer(
assign_only=True,
help_text='Status of the case.',
)
tags = RelatedTagSerializer(
many=True,
required=False,
create_only=True,
help_text='Any tags used to further categorize the case.',
)
| description = SanitizedHtmlCharField(
help_text='Any extra text to describe the case (supports Markdown).',
)
# Show string versions of fields.
priority_display = serializers.CharField(
source='get_priority_display',
read_only=True,
help_text='Human readable value of the case\'s priority.',
)
def validate(self, data):
contact_id = data.get('contact', {})
if isinstance(contact_id, dict):
| contact_id = contact_id.get('id')
account_id = data.get('account', {})
if isinstance(account_id, dict):
account_id = account_id.get('id')
if contact_id and account_id:
if not Function.objects.filter(contact_id=contact_id, account_id=account_id).exists():
raise serializers.ValidationError({'contact': _('Given contact must work at the account.')})
# Check if we are related and if we only passed in the id, which means user just wants new reference.
errors = {
'account': _('Please enter an account and/or contact.'),
'contact': _('Please enter an account and/or contact.'),
}
if not self.partial:
# For POST or PUT we always want to check if either is set.
if not (account_id or contact_id):
raise serializers.ValidationError(errors)
else:
# For PATCH only check the data if both account and contact are passed.
if ('account' in data and 'contact' in data) and not (account_id or contact_id):
raise serializers.ValidationError(errors)
return super(CaseSerializer, self).validate(data)
def create(self, validated_data):
user = self.context.get('request').user
assigned_to = validated_data.get('assigned_to')
validated_data.update({
'created_by_id': user.pk,
})
if assigned_to:
Group('tenant-%s' % user.tenant.id).send({
'text': anyjson.dumps({
'event': 'case-assigned',
}),
})
if assigned_to.get('id') != user.pk:
validated_data.update({
'newly_assigned': True,
})
else:
Group('tenant-%s' % user.tenant.id).send({
'text': anyjson.dumps({
'event': 'case-unassigned',
}),
})
instance = super(CaseSerializer, self).create(validated_data)
# Track newly ceated accounts in segment.
if not settings.TESTING:
analytics.track(
user.id,
'case-created', {
'expires': instance.expires,
'assigned_to_id': instance.assigned_to_id if instance.assigned_to else '',
'creation_type': 'automatic' if is_external_referer(self.context.get('request')) else 'manual',
},
)
return instance
def update(self, instance, validated_data):
user = self.context.get('request').user
status_id = validated_data.get('status', instance.status_id)
assigned_to = validated_data.get('assigned_to')
if assigned_to:
assigned_to = assigned_to.get('id')
if isinstance(status_id, dict):
status_id = status_id.get('id')
status = CaseStatus.objects.get(pk=status_id)
# Automatically archive the case if the status is set to 'Closed'.
if status.name == 'Closed' and 'is_archived' not in validated_data:
validated_data.update({
'is_archived': True
})
# Check if the case being reassigned. If so we want to notify that user.
if assigned_to and assigned_to != user.pk:
validated_data.update({
'newly_assigned': True,
})
elif 'assigned_to' in validated_data and not assigned_to:
# Case is unassigned, so clear newly assigned flag.
validated_data.update({
'newly_assigned': False,
})
if (('status' in validated_data and status.name == 'Open') or
('is_archived' in validated_data and not validated_data.get('is_archived'))):
# Case is reopened or unarchived, so we want to notify the user again.
validated_data.update({
'newly_assigned': True,
})
if 'assigned_to' in validated_data or instance.assigned_to_id:
Group('tenant-%s' % user.tenant.id).send({
'text': anyjson.serialize({
'event': 'case-assigned',
}),
})
if (not instance.assigned_to_id or
instance.assigned_to_id and
'assigned_to' in validated_data and
not validated_data.get('assigned_to')):
Group('tenant-%s' % user.tenant.id).send({
'text': anyjson.serialize({
'event': 'case-unassigned',
}),
})
return super(CaseSerializer, self).update(instance, validated_data)
class Meta:
mo |
red-hood/calendarserver | twistedcaldav/storebridge.py | Python | apache-2.0 | 150,144 | 0.003743 | # -*- test-case-name: twistedcaldav.test.test_wrapping -*-
##
# Copyright (c) 2005-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
import hashlib
import time
from urlparse import urlsplit, urljoin
import uuid
from pycalendar.datetime import DateTime
from twext.enterprise.locking import LockTimeout
from twext.python.log import Logger
from twisted.internet.defer import succeed, inlineCallbacks, returnValue, maybeDeferred
from twisted.internet.protocol import Protocol
from twisted.python.hashlib import md5
from twisted.python.util import FancyEqMixin
from twistedcaldav import customxml, carddavxml, caldavxml, ical
from twistedcaldav.caldavxml import (
caldav_namespace, MaxAttendeesPerInstance, MaxInstances, NoUIDConflict
)
from twistedcaldav.carddavxml import carddav_namespace, NoUIDConflict as NovCardUIDConflict
from twistedcaldav.config import config
from twistedcaldav.customxml import calendarserver_namespace
from twistedcaldav.ical import (
Component as VCalendar, Property as VProperty,
iCalendarProductID, Component
)
from twistedcaldav.instance import (
InvalidOverriddenInstanceError, TooManyInstancesError
)
from twistedcaldav.memcachelock import MemcacheLockTimeoutError
from twistedcaldav.notifications import NotificationCollectionResource, NotificationResource
from twistedcaldav.resource import CalDAVResource, DefaultAlarmPropertyMixin, \
requiresPermissions
from twistedcaldav.scheduling_store.caldav.resource import ScheduleInboxResource
from twistedcaldav.sharing import (
invitationBindStatusToXMLMap, invitationBindModeToXMLMap
)
from twistedcaldav.util import bestAcceptType, matchClientFixes
from twistedcaldav.vcard import Component as VCard, InvalidVCardDataError
from txdav.base.propertystore.base import PropertyName
from txdav.caldav.icalendarstore import (
QuotaExceeded, AttachmentStoreFailed,
AttachmentStoreValidManagedID, Attac | hmentRemoveFailed,
AttachmentDropboxNotAllowed, InvalidComponentTypeError,
TooManyAttendeesError, InvalidCalendarAccessError, ValidOrganizerError,
InvalidPerUserDataMerge,
AttendeeAllowedError, ResourceDeletedError, InvalidAttachmentOperation,
ShareeAllowedError, DuplicatePrivateCommentsError, InvalidSplit,
AttachmentSizeTooLarge, UnknownTime | zone, SetComponentOptions)
from txdav.carddav.iaddressbookstore import (
KindChangeNotAllowedError, GroupWithUnsharedAddressNotAllowedError
)
from txdav.common.datastore.sql_tables import (
_BIND_MODE_READ, _BIND_MODE_WRITE,
_BIND_MODE_DIRECT, _BIND_STATUS_ACCEPTED
)
from txdav.common.icommondatastore import (
NoSuchObjectResourceError,
TooManyObjectResourcesError, ObjectResourceTooBigError,
InvalidObjectResourceError, ObjectResourceNameNotAllowedError,
ObjectResourceNameAlreadyExistsError, UIDExistsError,
UIDExistsElsewhereError, InvalidUIDError, InvalidResourceMove,
InvalidComponentForStoreError, AlreadyInTrashError,
HomeChildNameAlreadyExistsError
)
from txdav.idav import PropertyChangeNotAllowedError
from txdav.who.wiki import RecordType as WikiRecordType
from txdav.xml import element as davxml, element
from txdav.xml.base import dav_namespace, WebDAVUnknownElement, encodeXMLName
from txweb2 import responsecode, http_headers, http
from txweb2.dav.http import ErrorResponse, ResponseQueue, MultiStatusResponse
from txweb2.dav.noneprops import NonePropertyStore
from txweb2.dav.resource import (
TwistedACLInheritable, AccessDeniedError, davPrivilegeSet
)
from txweb2.dav.util import parentForURL, allDataFromStream, joinURL, davXMLFromStream
from txweb2.filter.location import addLocation
from txweb2.http import HTTPError, StatusResponse, Response
from txweb2.http_headers import ETag, MimeType, MimeDisposition
from txweb2.iweb import IResponse
from txweb2.responsecode import (
FORBIDDEN, NO_CONTENT, NOT_FOUND, CREATED, CONFLICT, PRECONDITION_FAILED,
BAD_REQUEST, OK, INSUFFICIENT_STORAGE_SPACE, SERVICE_UNAVAILABLE
)
from txweb2.stream import ProducerStream, readStream, MemoryStream
from twistedcaldav.timezones import TimezoneException
"""
Wrappers to translate between the APIs in L{txdav.caldav.icalendarstore} and
L{txdav.carddav.iaddressbookstore} and those in L{twistedcaldav}.
"""
log = Logger()
class _NewStorePropertiesWrapper(object):
"""
Wrap a new-style property store (a L{txdav.idav.IPropertyStore}) in the old-
style interface for compatibility with existing code.
"""
# FIXME: UID arguments on everything need to be tested against something.
def __init__(self, newPropertyStore):
"""
Initialize an old-style property store from a new one.
@param newPropertyStore: the new-style property store.
@type newPropertyStore: L{txdav.idav.IPropertyStore}
"""
self._newPropertyStore = newPropertyStore
@classmethod
def _convertKey(cls, qname):
namespace, name = qname
return PropertyName(namespace, name)
def get(self, qname):
try:
return self._newPropertyStore[self._convertKey(qname)]
except KeyError:
raise HTTPError(StatusResponse(
NOT_FOUND,
"No such property: %s" % (encodeXMLName(*qname),)
))
def set(self, prop):
try:
self._newPropertyStore[self._convertKey(prop.qname())] = prop
except PropertyChangeNotAllowedError:
raise HTTPError(StatusResponse(
FORBIDDEN,
"Property cannot be changed: %s" % (prop.sname(),)
))
def delete(self, qname):
try:
del self._newPropertyStore[self._convertKey(qname)]
except KeyError:
# RFC 2518 Section 12.13.1 says that removal of
# non-existing property is not an error.
pass
def contains(self, qname):
return (self._convertKey(qname) in self._newPropertyStore)
def list(self):
return [(pname.namespace, pname.name) for pname in
self._newPropertyStore.keys()]
class _NewStoreFileMetaDataHelper(object):
def exists(self):
return self._newStoreObject is not None
def name(self):
return self._newStoreObject.name() if self._newStoreObject is not None else self._name
def etag(self):
return succeed(ETag(self._newStoreObject.md5()) if self._newStoreObject is not None else None)
def contentType(self):
return self._newStoreObject.contentType() if self._newStoreObject is not None else None
def contentLength(self):
return self._newStoreObject.size() if self._newStoreObject is not None else None
def lastModified(self):
return self._newStoreObject.modified() if self._newStoreObject is not None else None
def creationDate(self):
return self._newStoreObject.created() if self._newStoreObject is not None else None
def newStoreProperties(self):
return self._newStoreObject.properties() if self._newStoreObject is not None else None
class _CommonHomeChildCollectionMixin(object):
"""
Methods for things which are like calendars.
"""
_childClass = None
def _initializeWithHomeChild(self, child, home):
"""
Initialize with a home child object.
@param child: the new store home child object.
@type calendar: L{txdav.common._.CommonHomeChild}
@param home: the home through which the given home child was accessed.
@type home: L{txdav.common._.CommonHome}
"""
self._newStoreObject = child
self._newStoreParentHome = home._newStoreHome
self._par |
michaelBenin/django-jinja | django_jinja/contrib/humanize/templatetags/_humanize.py | Python | bsd-3-clause | 624 | 0.001603 | # -*- coding: utf-8 -*-
from django.contrib.humanize.templatetags import humanize
from django_jinja import l | ibrary
lib = library.Library()
@lib.filter
def ordinal(source):
return humanize.ordinal(source)
@lib.filter
def intcomma(source, use_l10n=True):
return humanize.intcomma(source, use_l10n)
@lib.filter
def intword(source):
return humanize.intword(source)
@lib.filter
def apnumber(source):
return humanize.apnumber(source)
@lib.filter
def naturalday(source, arg=None):
return hum | anize.naturalday(source, arg)
@lib.filter
def naturaltime(source):
return humanize.naturaltime(source)
|
niceguydave/wagtail-cookiecutter-foundation | {{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/urls.py | Python | mit | 1,301 | 0.002306 | from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailsearch import urls as wagtailsearch_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
from wagtail.wagtailcore import urls as wagtail_urls
from feeds.feeds import BlogFeed
admin.autodiscover()
urlpatterns = patterns('',
url(r'^django-admin/', include(admin.site.urls)),
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^search/', include(wagtailsearch_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'^blog/feed/$', BlogFeed(), name='blog_feed'),
# For anything not caught by a more specific rule above, hand over to
# Wagtail's serving mechanism
url(r'', include(wagtail_urls)),
)
if settings.DEBUG:
from django.contrib.staticfiles.urls import staticfiles_urlpatterns |
from django.views.generic.base import RedirectView
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += [
url(r'^favicon\.ico$', RedirectView.as_view | (url=settings.STATIC_URL + 'favicon.ico', permanent=True))
]
|
hsk81/protobuf-rpc-js | example/server/py/rpc-server.py | Python | bsd-3-clause | 4,325 | 0.003699 | #!/usr/bin/env python
###############################################################################
import argparse, os
import tornado.web
import tornado.websocket
import tornado.ioloop
###############################################################################
###############################################################################
from protocol.rpc_pb2 import Rpc
from protocol import reflector_pb2 as Reflector
from protocol import calculator_pb2 as Calculator
###############################################################################
###############################################################################
def process(data):
rpc_req = Rpc.Request()
rpc_req.ParseFromString(data)
if rpc_req.name == '.Reflector.Service.ack':
req = Reflector.AckRequest()
req.ParseFromString(rpc_req.data)
res = Reflector.AckResult()
res.timestamp = req.timestamp
elif rpc_req.name == '.Calculator.Service.add':
req = Calculator.AddRequest()
req.ParseFromString(rpc_req.data)
res = Calculator.AddResult()
res.value = req.lhs + req.rhs
elif rpc_req.name == '.Calculator.Service.sub':
req = Calculator.SubRequest()
req.ParseFromString(rpc_req.data)
res = Calculator.SubResult()
res.value = req.lhs - req.rhs
elif rpc_req.name == '.Calculator.Service.mul':
req = Calculator.MulRequest()
req.ParseFromString(rpc_req.data)
res = Calculator.MulResult()
res.value = req.lhs * req.rhs
elif rpc_req.name == '.Calculator.Service.div':
req = Calculator.DivRequest()
req.ParseFromString(rpc_req.data)
res = Calculator.DivResult()
res.value = req.lhs / req.rhs
else:
raise Exception('{0}: not supported'.format(rpc_req.name) | )
rpc_res = Rpc.Response()
rpc_res.data = res.Seria | lizeToString()
rpc_res.id = rpc_req.id
return rpc_res.SerializeToString()
###############################################################################
###############################################################################
class WebSocketHandler(tornado.websocket.WebSocketHandler):
def on_message(self, data):
if arguments.logging:
print '[on:message]', repr(data)
self.write_message(process(data), binary=True)
def check_origin(self, origin):
return True
ws_application = tornado.web.Application([(r'/', WebSocketHandler)])
###############################################################################
class XhrHandler(tornado.web.RequestHandler):
def post(self):
if arguments.logging:
print '[on:message]', repr(self.request.body)
self.write(process(self.request.body).decode('latin-1'))
def set_default_headers(self):
self.add_header('Access-Control-Allow-Origin', '*')
xhr_application = tornado.web.Application([(r'/', XhrHandler)])
###############################################################################
###############################################################################
if __name__ == "__main__":
global arguments
parser = argparse.ArgumentParser(prog='RPC Server',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-v', '--version', action='version',
version='%(prog)s 1.1.2')
parser.add_argument('-l', '--logging',
default=os.environ.get('LOGGING', False), action='store_true',
help='Logging')
parser.add_argument('--ws-port', metavar='WS_PORT', type=int,
default=os.environ.get('RPC_PORT', 8089), nargs='?',
help='WS Server Port')
parser.add_argument('--xhr-port', metavar='XHR_PORT', type=int,
default=os.environ.get('RPC_PORT', 8088), nargs='?',
help='WS Server Port')
parser.add_argument('--json',
default=os.environ.get('JSON', False), action='store_true',
help='JSON encoding [NOT SUPPORTED]')
arguments = parser.parse_args()
ws_application.listen(arguments.ws_port)
xhr_application.listen(arguments.xhr_port)
tornado.ioloop.IOLoop.instance().start()
###############################################################################
###############################################################################
|
asifmadnan/protobuf2uml | bin/protobuf/ProtoParser.py | Python | apache-2.0 | 86,857 | 0.010868 | # Generated from java-escape by ANTLR 4.4
from antlr4 import *
from io import StringIO
package = globals().get("__package__", None)
ischild = len(package)>0 if package is not None else False
if ischild:
from .ProtoParserListener import ProtoParserListener
else:
from ProtoParserListener import ProtoParserListener
def serializedATN():
with StringIO() as buf:
buf.write("\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3,")
buf.write("\u0120\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36")
buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\3\2\3\2\3\3")
buf.write("\3\3\5\3M\n\3\3\4\3\4\3\5\3\5\5\5S\n\5\3\6\3\6\3\6\3\6")
buf.write("\3\6\3\6\3\6\7\6\\\n\6\f\6\16\6_\13\6\3\6\3\6\3\7\3\7")
buf.write("\3\7\3\7\3\b\3\b\3\t\3\t\5\tk\n\t\3\t\3\t\3\t\3\n\3\n")
buf.write("\3\13\3\13\3\13\3\13\3\13\3\13\3\f\3\f\3\f\3\f\7\f|\n")
buf.write("\f\f\f\16\f\177\13\f\3\f\3\f\3\r\3\r\3\r\3\r\3\16\3\16")
buf.write("\5\16\u0089\n\16\3\17\3\17\7\17\u008d\n\17\f\17\16\17")
buf.write("\u0090\13\17\3\17\3\17\3\20\3\20\3\20\3\20\3\21\3\21\3")
buf.write("\21\3\21\3\21\7\21\u009d\n\21\f\21\16\21\u00a0\13\21\5")
buf.write("\21\u00a2\n\21\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3\23")
buf.write("\3\24\3\24\7\24\u00ae\n\24\f\24\16\24\u00b1\13\24\3\25")
buf.write("\3\25\3\25\3\25\5\25\u00b7\n\25\3\25\3\25\3\26\3\26\3")
buf.write("\26\3\26\5\26\u00bf\n\26\3\26\3\26\3\27\3\27\3\30\3\30")
buf.write("\3\30\3\30\3\30\6\30\u00ca\n\30\r\30\16\30\u00cb\3\31")
buf.write("\3\31\3\31\3\31\3\31\3\31\5\31\u00d4\n\31\3\31\3\31\3")
buf.write("\32\3\32\3\32\3\32\3\32\5\32\u00dd\n\32\3\32\3\32\3\33")
buf.write("\3\33\3\33\3\33\5\33\u00e5\n\33\3\33\3\33\3\34\3\34\3")
buf.write("\35\3\35\3\35\3\35\6\35\u00ef\n\35\r\35\16\35\u00f0\3")
buf.write("\36\3\36\3\36\3\36\5\36\u00f7\n\36\3\36\3\36\3\37\3\37")
buf.write("\3 \3 \6 \u00ff\n \r \16 \u0100\3!\3!\3!\3!\3!\3!\3!\3")
buf.write("!\3!\3!\3!\7!\u010e\n!\f!\16!\u0111\13!\3!\3!\5!\u0115")
buf.write("\n!\3!\5!\u0118\n!\3\"\3\"\3#\3#\3$\3$\3$\2\2%\2\4\6\b")
buf.write("\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668")
buf.write(":<>@BDF\2\4\3\2#$\3\2\37\"\u0121\2H\3\2\2\2\4L\3\2\2\2")
buf.write("\6N\3\2\2\2\bR\3\2\2\2\n]\3\2\2\2\fb\3\2\2\2\16f\3\2\2")
buf.write("\2\20h\3\2\2\2\22o\3\2\2\2\24q\3\2\2\2\26w\3\2\2\2\30")
buf.write("\u0082\3\2\2\2\32\u0088\3\2\2\2\34\u008a\3\2\2\2\36\u0093")
buf.write("\3\2\2\2 \u00a1\3\2\2\2\"\u00a3\3\2\2\2$\u00a9\3\2\2\2")
buf.write("&\u00af\3\2\2\2(\u00b2\3\2\2\2*\u00ba\3\2\2\2,\u00c2\3")
buf.write("\2\2\2.\u00c9\3\2\2\2\60\u00cd\3\2\2\2\62\u00d7\3\2\2")
buf.write("\2\64\u00e0\3\2\2\2\66\u00e8\3\2\2\28\u00ee\3\2\2\2:\u00f2")
buf.write("\3\2\2\2<\u00fa\3\2\2\2>\u00fe\3\2\2\2@\u0102\3\2\2\2")
buf.write("B\u0119\3\2\2\2D\u011b\3\2\2\2F\u011d\3\2\2\2HI\t\2\2")
buf.write("\2I\3\3\2\2\2JM\7#\2\2KM\5\6\4\2LJ\3\2\2\2LK\3\2\2\2M")
buf.write("\5\3\2\2\2NO\t\3\2\2O\7\3\2\2\2PS\7\36\2\2QS\5\2\2\2R")
buf.write("P\3\2\2\2RQ\3\2\2\2S\t\3\2\2\2T\\\5\f\7\2U\\\5\20\t\2")
buf.write("V\\\5\24\13\2W\\\5\"\22\2X\\\5\64\33\2Y\\\5*\26\2Z\\\5")
buf.write(":\36\2[T\3\2\2\2[U\3\2\2\2[V\3\2\2\2[W\3\2\2\2[X\3\2\2")
buf.write("\2[Y\3\2\2\2[Z\3\2\ | 2\2\\_\3\2\2\2][\3\2\2\2]^\3\2\2\2")
buf.write("^`\3\2\2\2_]\3\2\2\2`a\7\2\2\3a\13\3\2\2\2bc\7\7\2\2c")
buf.write("d\5\16\b\2de\7\34\2\2e\r\3\2\2\2fg\5\2\2\2g\17\3\2\2\2")
buf.write("hj\7\b\2\2ik\7\6\2\2ji\3\2\2\2jk\3\2\2\2kl\3\2\2\2lm\5") |
buf.write("\22\n\2mn\7\34\2\2n\21\3\2\2\2op\7 \2\2p\23\3\2\2\2qr")
buf.write("\7\t\2\2rs\5 \21\2st\7\31\2\2tu\5\32\16\2uv\7\34\2\2v")
buf.write("\25\3\2\2\2wx\7\27\2\2x}\5\30\r\2yz\7\33\2\2z|\5\30\r")
buf.write("\2{y\3\2\2\2|\177\3\2\2\2}{\3\2\2\2}~\3\2\2\2~\u0080\3")
buf.write("\2\2\2\177}\3\2\2\2\u0080\u0081\7\30\2\2\u0081\27\3\2")
buf.write("\2\2\u0082\u0083\5 \21\2\u0083\u0084\7\31\2\2\u0084\u0085")
buf.write("\5\32\16\2\u0085\31\3\2\2\2\u0086\u0089\5\4\3\2\u0087")
buf.write("\u0089\5\34\17\2\u0088\u0086\3\2\2\2\u0088\u0087\3\2\2")
buf.write("\2\u0089\33\3\2\2\2\u008a\u008e\7\23\2\2\u008b\u008d\5")
buf.write("\36\20\2\u008c\u008b\3\2\2\2\u008d\u0090\3\2\2\2\u008e")
buf.write("\u008c\3\2\2\2\u008e\u008f\3\2\2\2\u008f\u0091\3\2\2\2")
buf.write("\u0090\u008e\3\2\2\2\u0091\u0092\7\24\2\2\u0092\35\3\2")
buf.write("\2\2\u0093\u0094\7#\2\2\u0094\u0095\7\32\2\2\u0095\u0096")
buf.write("\5\32\16\2\u0096\37\3\2\2\2\u0097\u00a2\7#\2\2\u0098\u0099")
buf.write("\7\25\2\2\u0099\u009a\5\2\2\2\u009a\u009e\7\26\2\2\u009b")
buf.write("\u009d\7%\2\2\u009c\u009b\3\2\2\2\u009d\u00a0\3\2\2\2")
buf.write("\u009e\u009c\3\2\2\2\u009e\u009f\3\2\2\2\u009f\u00a2\3")
buf.write("\2\2\2\u00a0\u009e\3\2\2\2\u00a1\u0097\3\2\2\2\u00a1\u0098")
buf.write("\3\2\2\2\u00a2!\3\2\2\2\u00a3\u00a4\7\n\2\2\u00a4\u00a5")
buf.write("\5$\23\2\u00a5\u00a6\7\23\2\2\u00a6\u00a7\5&\24\2\u00a7")
buf.write("\u00a8\7\24\2\2\u00a8#\3\2\2\2\u00a9\u00aa\7#\2\2\u00aa")
buf.write("%\3\2\2\2\u00ab\u00ae\5\24\13\2\u00ac\u00ae\5(\25\2\u00ad")
buf.write("\u00ab\3\2\2\2\u00ad\u00ac\3\2\2\2\u00ae\u00b1\3\2\2\2")
buf.write("\u00af\u00ad\3\2\2\2\u00af\u00b0\3\2\2\2\u00b0\'\3\2\2")
buf.write("\2\u00b1\u00af\3\2\2\2\u00b2\u00b3\7#\2\2\u00b3\u00b4")
buf.write("\7\31\2\2\u00b4\u00b6\7\37\2\2\u00b5\u00b7\5\26\f\2\u00b6")
buf.write("\u00b5\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7\u00b8\3\2\2\2")
buf.write("\u00b8\u00b9\7\34\2\2\u00b9)\3\2\2\2\u00ba\u00bb\7\13")
buf.write("\2\2\u00bb\u00bc\5,\27\2\u00bc\u00be\7\23\2\2\u00bd\u00bf")
buf.write("\5.\30\2\u00be\u00bd\3\2\2\2\u00be\u00bf\3\2\2\2\u00bf")
buf.write("\u00c0\3\2\2\2\u00c0\u00c1\7\24\2\2\u00c1+\3\2\2\2\u00c2")
buf.write("\u00c3\7#\2\2\u00c3-\3\2\2\2\u00c4\u00ca\5\24\13\2\u00c5")
buf.write("\u00ca\5\60\31\2\u00c6\u00ca\5*\26\2\u00c7\u00ca\5\"\22")
buf.write("\2\u00c8\u00ca\5\62\32\2\u00c9\u00c4\3\2\2\2\u00c9\u00c5")
buf.write("\3\2\2\2\u00c9\u00c6\3\2\2\2\u00c9\u00c7\3\2\2\2\u00c9")
buf.write("\u00c8\3\2\2\2\u00ca\u00cb\3\2\2\2\u00cb\u00c9\3\2\2\2")
buf.write("\u00cb\u00cc\3\2\2\2\u00cc/\3\2\2\2\u00cd\u00ce\7\35\2")
buf.write("\2\u00ce\u00cf\5\b\5\2\u00cf\u00d0\7#\2\2\u00d0\u00d1")
buf.write("\7\31\2\2\u00d1\u00d3\7\37\2\2\u00d2\u00d4\5\26\f\2\u00d3")
buf.write("\u00d2\3\2\2\2\u00d3\u00d4\3\2\2\2\u00d4\u00d5\3\2\2\2")
buf.write("\u00d5\u00d6\7\34\2\2\u00d6\61\3\2\2\2\u00d7\u00d8\7\r")
buf.write("\2\2\u00d8\u00d9\7\37\2\2\u00d9\u00dc\7\16\2\2\u00da\u00dd")
buf.write("\7\37\2\2\u00db\u00dd\7\17\2\2\u00dc\u00da\3\2\2\2\u00dc")
buf.write("\u00db\3\2\2\2\u00dd\u00de\3\2\2\2\u00de\u00df\7\34\2")
buf.write("\2\u00df\63\3\2\2\2\u00e0\u00e1\7\f\2\2\u00e1\u00e2\5")
buf.write("\66\34\2\u00e2\u00e4\7\23\2\2\u00e3\u00e5\58\35\2\u00e4")
buf.write("\u00e3\3\2\2\2\u00e4\u00e5\3\2\2\2\u00e5\u00e6\3\2\2\2")
buf.write("\u00e6\u00e7\7\24\2\2\u00e7\65\3\2\2\2\u00e8\u00e9\5\2")
buf.write("\2\2\u00e9\67\3\2\2\2\u00ea\u00ef\5\24\13\2\u00eb\u00ef")
buf.write("\5\60\31\2\u00ec\u00ef\5*\26\2\u00ed\u00ef\5\"\22\2\u00ee")
buf.write("\u00ea\3\2\2\2\u00ee\u00eb\3\2\2\2\u00ee\u00ec\3\2\2\2")
buf.write("\u00ee\u00ed\3\2\2\2\u00ef\u00f0\3\2\2\2\u00f0\u00ee\3")
buf.write("\2\2\2\u00f0\u00f1\3\2\2\2\u00f19\3\2\2\2\u00f2\u00f3")
buf.write("\7\20\2\2\u00f3\u00f4\5<\37\2\u00f4\u00f6\7\23\2\2\u00f5")
buf.write("\u00f7\5> |
0vercl0k/rp | src/third_party/beaengine/tests/0fc2.py | Python | mit | 11,629 | 0.005074 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
# @author : beaengine@gmail.com
from headers.BeaEnginePython import *
from nose.tools import *
class TestSuite:
def test(self):
compare = [
'eq', 'lt', 'le', 'unord', 'neq', 'nlt', 'nle', 'ord',
"eq_uq", "nge", "ngt", "false", "neq_oq", "ge", "gt",
"true", "eq_os", "lt_oq", "le_oq", "unord_s", "neq_us",
"nlt_uq", "nle_uq", "ord_s", "eq_us", "nge_uq", "ngt_uq",
"false_os", "neq_os", "ge_oq", "gt_oq", "true_us"
]
# 66 0F C2 /r ib
# CMPPD xmm1, xmm2/m128, imm8
for i in range(0,8):
Buffer = bytes.fromhex('660fc220{:02x}'.format(i))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(hex(myDisasm.infos.Instruction.Opcode), '0xfc2')
assert_equal(myDisasm.infos.Instruction.Mnemonic, 'cmp{}pd'.format(compare[i]).encode())
assert_equal(myDisasm.repr(), 'cmp{}pd xmm4, xmmword ptr [rax], {:02x}h'.format(compare[i], i))
# VEX.NDS.128.66.0F.WIG C2 /r ib
# VCMPPD xmm1, xmm2, xmm3/m128, imm8
for i in range(0,0x20):
myVEX = VEX('VEX.NDS.128.66.0F.WIG')
Buffer = bytes.fromhex('{}c220{:02x}'.format(myVEX.c4(), i))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0xc2)
assert_equal(myDisasm.infos.Instruction.Mnemonic, 'vcmp{}pd'.format(compare[i]).encode())
assert_equal(myDisasm.repr(), 'vcmp{}pd xmm12, xmm15, xmmword ptr [r8], {:02X}h'.format(compare[i], i))
# VEX.NDS.256.66.0F.WIG C2 /r ib
# VCMPPD ymm1, ymm2, ymm3/m256, imm8
for i in range(0,0x20):
myVEX = VEX('VEX.NDS.256.66.0F.WIG')
Buffer = bytes.fromhex('{}c220{:02x}'.format(myVEX.c4(), i))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0xc2)
assert_equal(myDisasm.infos.Instruction.Mnemonic, 'vcmp{}pd'.format(compare[i]).encode())
assert_equal(myDisasm.repr(), 'vcmp{}pd ymm12, ymm15, ymmword ptr [r8], {:02X}h'.format(compare[i], i))
# EVEX.NDS.128.66.0F.W1 C2 /r ib
# VCMPPD k1 {k2}, xmm2, xmm3/m128/m64bcst, imm8
for i in range(0,0x20):
myEVEX = EVEX('EVEX.NDS.128.66.0F.W1')
myEVEX.R = 0
Buffer = bytes.fromhex('{}c220{:02x}'.format(myEVEX.prefix(), i))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0xc2)
assert_equal(myDisasm.infos.Instruction.Mnemonic, 'vcmp{}pd'.format(compare[i]).encode())
assert_equal(myDisasm.repr(), 'vcmp{}pd k?, xmm31, xmmword ptr [r8], {:02X}h'.format(compare[i], i))
# EVEX.NDS.256.66.0F.W1 C2 /r ib
# VCMPPD k1 {k2}, ymm2, ymm3/m256/m64bcst, imm8
for i in range(0,0x20):
myEVEX = EVEX('EVEX.NDS.256.66.0F.W1')
Buffer = bytes.fromhex('{}c220{:02x}'.format(myEVEX.prefix(), i))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0xc2)
assert_equal(myDisasm.infos.Instruction.Mnemonic, 'vcmp{}pd'.format(compare[i]).encode())
assert_equal(myDisasm.repr(), 'vcmp{}pd k?, ymm31, ymmword ptr [r8], {:02X}h'.format(compare[i], i))
# EVEX.NDS.512.66.0F.W1 C2 /r ib
# VCMPPD k1 {k2}, zmm2, zmm3/m512/m64bcst{sae}, imm8
for i in range(0,0x20):
myEVEX = EVEX('EVEX.NDS.512.66.0F.W1')
Buffer = bytes.fromhex('{}c220{:02x}'.format(myEVEX.prefix(), i))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0xc2)
assert_equal(myDisasm.infos.Instruction.Mnemonic, 'vcmp{}pd'.format(compare[i]).encode())
assert_equal(myDisasm.repr(), 'vcmp{}pd k?, zmm31, zmmword ptr [r8], {:02X}h'.format(compare[i], i))
# NP 0F C2 /r ib
# CMPPS xmm1, xmm2/m128, imm8
for i in range(0,8):
Buffer = bytes.fromhex('0fc220{:02x}'.format(i))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(hex(myDisasm.infos.Instruction.Opcode), '0xfc2')
assert_equal(myDisasm.infos.Instruction.Mnemonic, 'cmp{}ps'.format(compare[i]).encode())
assert_equal(myDisasm.repr(), 'cmp{}ps xmm4, xmmword ptr [rax], {:02x}h'.format(compare[i], i))
# VEX.NDS.128.0F.WIG C2 /r ib
# VCMPPS xmm1, xmm2, xmm3/m128, imm8
for i in range(0,0x20):
myVEX = VEX('VEX.NDS.128.0F.WIG')
Buffer = bytes.fromhex('{}c220{:02x}'.format(myVEX.c4(), i))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0xc2)
assert_equal(myDisasm.infos.Instruction.Mnemonic, 'vcmp{}ps'.format(compare[i]).encode())
assert_equal(myDisasm.repr(), 'vcmp{}ps xmm12, xmm15, xmmword ptr [r8], {:02X}h'.format(compare[i], i))
# VEX.NDS.256.0F.WIG C2 /r ib
# VCMPPS ymm1, ymm2, ymm3/m256, imm8
for i in range(0,0x20):
myVEX = VEX('VEX.NDS.256.0F.WIG')
Buffer = bytes.fromhex('{}c220{:02x}'.format(myVEX.c4(), i))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0xc2)
assert_equal(myDisasm.infos.Instruction.Mnemonic, 'vcmp{}ps'.format(compare[i]).encode())
assert_equal(myDisasm.repr(), 'vcmp{}ps ymm12, ymm15, ymmword ptr [r8], {:02X}h'.format(compare[i], i))
# EVEX.NDS.128.0F.W0 C2 /r ib
# VCMPPS k1 {k2}, xmm2, xmm3/m128/m32bcst, imm8
for i in range(0,0x20):
myEVEX = EVEX('EVEX.NDS. | 128. | 0F.W0')
Buffer = bytes.fromhex('{}c220{:02x}'.format(myEVEX.prefix(), i))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0xc2)
assert_equal(myDisasm.infos.Instruction.Mnemonic, 'vcmp{}ps'.format(compare[i]).encode())
assert_equal(myDisasm.repr(), 'vcmp{}ps k?, xmm31, xmmword ptr [r8], {:02X}h'.format(compare[i], i))
# EVEX.NDS.256.0F.W0 C2 /r ib
# VCMPPS k1 {k2}, ymm2, ymm3/m256/m32bcst, imm8
for i in range(0,0x20):
myEVEX = EVEX('EVEX.NDS.256.0F.W0')
Buffer = bytes.fromhex('{}c220{:02x}'.format(myEVEX.prefix(), i))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0xc2)
assert_equal(myDisasm.infos.Instruction.Mnemonic, 'vcmp{}ps'.format(compare[i]).encode())
assert_equal(myDisasm.repr(), 'vcmp{}ps k?, ymm31, ymmword ptr [r8], {:02X}h'.format(compare[i], i))
# EVEX.NDS.512.0F.W0 C2 /r ib
# VCMPPS k1 {k2}, zmm2, zmm3/m512/m32bcst{sae}, imm8
for i in range(0,0x20):
myEVEX = EVEX('EVEX.NDS.512.0F.W0')
Buffer = bytes.fromhex('{}c220{:02x}'.format(myEVEX.prefix(), i))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0xc2)
assert_equal(myDisasm.infos.Instruction.Mnemonic, 'vcmp{}ps'.format(compare[i]).encode())
assert_equal(myDisasm.repr(), 'vcmp{}ps k?, zmm |
ayust/kitnirc | kitnirc/__init__.py | Python | mit | 444 | 0 | """KitnIRC - A Python IRC Bot Framework."""
import logging
from kitnirc import client
from kitnirc import events
from kitnirc import modular
from kitnirc import user
__version__ = "0.3.1"
# Prevents output of "no handler found" if n | o other | log handlers are added
_log = logging.getLogger("kitnirc")
_log.addHandler(logging.NullHandler())
__all__ = [
"client",
"events",
"modular",
"user",
]
# vim: set ts=4 sts=4 sw=4 et:
|
saullocastro/pyNastran | pyNastran/bdf/dev_vectorized/cards/elements/solid/chexa20.py | Python | lgpl-3.0 | 13,197 | 0.001667 | from six.moves import zip
from numpy import arange, searchsorted, array, eye, ones
from numpy.linalg import norm
from pyNastran.bdf.field_writer_8 import print_card_8
from pyNastran.bdf.bdf_interface.assign_type import integer, integer_or_blank
from pyNastran.bdf.dev_vectorized.cards.elements.solid.chexa8 import quad_area_centroid, volume8
from pyNastran.bdf.dev_vectorized.cards.elements.solid.solid_element import SolidElement
class CHEXA20(SolidElement):
type = 'CHEXA20'
nnodes = 20
def __init__(self, model):
"""
Defines the CHEXA object.
Parameters
----------
model : BDF
the BDF object
"""
SolidElement.__init__(self, model)
def add_card(self, card, comment=''):
i = self.i
eid = integer(card, 1, 'element_id')
if comment:
self.set_comment(eid, comment)
#: Element ID
self.element_id[i] = eid
#: Property ID
self.property_id[i] = integer(card, 2, 'property_id')
#: Node IDs
nids = array([
integer(card, 3, 'node_id_1'), integer(card, 4, 'node_id_2'),
integer(card, 5, 'node_id_3'), integer(card, 6, 'node_id_4'),
integer(card, 7, 'node_id_5'), integer(card, 8, 'node_id_6'),
integer(card, 9, 'node_id_7'), integer(card, 10, 'node_id_8'),
integer_or_blank(card, 11, 'node_id_9', 0),
integer_or_blank(card, 12, 'node_id_10', 0),
integer_or_blank(card, 13, 'node_id_11', 0),
integer_or_blank(card, 14, 'node_id_12', 0),
integer_or_blank(card, 15, 'node_id_13', 0),
integer_or_blank(card, 16, 'node_id_14', 0),
integer_or_blank(card, 17, 'node_id_15', 0),
integer_or_blank(card, 18, 'node_id_16', 0),
integer_or_blank(card, 19, 'node_id_17', 0),
integer_or_blank(card, 20, 'node_id_18', 0),
integer_or_blank(card, 21, 'node_id_19', 0),
integer_or_blank(card, 22, 'node_id_20', 0)
], dtype='int32')
self.node_ids[i, :] = nids
assert len(card) <= 23, 'len(CHEXA20 card) = %i\ncard=%s' % (len(card), card)
self.i += 1
def build(self):
if self.n:
i = self.element_id.argsort()
self.element_id = self.element_id[i]
self.property_id = self.property_id[i]
self.node_ids = self.node_ids[i, :]
self._cards = []
else:
self.element_id = array([], dtype='int32')
self.property_id = array([], dtype='int32')
def update(self, maps):
"""
maps = {
'node_id' : nid_map,
'property' : pid_map,
}
"""
if self.n:
eid_map = maps['element']
nid_map = maps['node']
pid_map = maps['property']
for i, (eid, pid, nids) in enumerate(zip(self.element_id, self.property_id, self.node_ids)):
print(self.print_card(i))
self.element_id[i] = eid_map[eid]
self.property_id[i] = pid_map[pid]
self.node_ids[i, 0] = nid_map[nids[0]]
self.node_ids[i, 1] = nid_map[nids[1]]
self.node_ids[i, 2] = nid_map[nids[2]]
self.node_ids[i, 3] = nid_map[nids[3]]
self.node_ids[i, 4] = nid_map[nids[4]]
self.node_ids[i, 5] = nid_map[nids[5]]
self.node_ids[i, 6] = nid_map[nids[6]]
self.node_ids[i, 7] = nid_map[nids[7]]
self.node_ids[i, 8] = nid_map[nids[8]]
self.node_ids[i, 9] = nid_map[nids[9]]
self.node_ids[i, 10] = nid_map[nids[10]]
self.node_ids[i, 11] = nid_map[nids[11]]
self.node_ids[i, 12] = nid_map[nids[12]]
self.node_ids[i, 13] = nid_map[nids[13]]
self.node_ids[i, 14] = nid_map[nids[14]]
self.node_ids[i, 15] = nid_map[nids[15]]
self.node_ids[i, 16] = nid_map[nids[16]]
self.node_ids[i, 17] = nid_map[nids[17]]
self.node_ids[i, 18] = nid_map[nids[18]]
self.node_ids[i, 19] = nid_map[nids[19]]
def get_mass_matrix(self, i, model, positions, index0s, is_lumped=True):
nnodes = 8
ndof = 3 * nnodes
pid = self.property_id[i]
rho = self.model.elements.properties_solid.psolid.get_density_by_property_id(pid)[0]
n0, n1, n2, n3, n4, n5, n6, n7 = self.node_ids[i, :]
V = volume8(positions[self.node_ids[i, 0]],
positions[self.node_ids[i, 1]],
positions[self.node_ids[i, 2]],
positions[self.node_ids[i, 3]],
positions[self.node_ids[i, 4]],
positions[self.node_ids[i, 5]],
positions[self.node_ids[i, 6]],
positions[self.node_ids[i, 7]],
)
mass = rho * V
if is_lumped:
mi = mass / 4.
nnodes = 4
M = eye(ndof, dtype='float32')
else:
mi = mass / 20.
M = ones((ndof, ndof), dtype='float32')
for i in range(nnodes):
j = i * 3
M[j:j+3, j:j+3] = 2.
M *= mi
dofs, nijv = self.get_dofs_nijv(index0s, n0, n1, n2, n3, n4, n5, n6, n7)
return M, dofs, nijv
def get_stiffness_matrix(self, i, model, positions, index0s):
return K, dofs, nijv
def get_dofs_nijv(self, index0s, n0, n1, n2, n3, n4, n5, n6, n7):
i0 = index0s[n0]
i1 = index0s[n1]
i2 = index0s[n2]
i3 = index0s[n3]
i4 = index0s[n4]
i5 = index0s[n5]
i6 = index0s[n6]
i7 = index0s[n7]
dofs = array([
i0, i0+1, i0+2,
i1, i1+1, i1+2,
i2, i2+1, i2+2,
i3, i3+1, i3+2,
i4, i4+1, i4+2,
i5, i5+1, i5+2,
i6, i6+1, i6+2,
i7, i7+1, i7+2,
], 'int32')
nijv = [
# translation
(n0, 1), (n0, 2), (n0, 3),
(n1, 1), (n1, 2), (n1, 3),
(n2, 1), (n2, 2), (n2, 3),
(n3, 1), (n3, 2), (n3, 3),
(n4, 1), (n4, 2), (n4, 3),
(n5, 1), (n5, 2), (n5, 3),
(n6, 1), (n6, 2), (n6, 3),
(n7, 1), (n7, 2), (n7, 3),
]
return dofs, nijv
def _verify(self, xref=True):
eid = self.eid
pid = self.Pid()
nids = self.node_ids
assert isinstance(eid, int)
assert isinstance(pid, int)
for i, nid in enumerate(nids):
assert isinstance(nid, int), 'nid%i is not an integer; nid=%s' %(i, nid)
if xref:
c = self.centroid()
v = self.volume()
assert isinstance(v, float)
for i in range(3):
assert isinstance(c[i], float)
def get_node_indicies(self, i=None):
if i is None:
i1 = self.model.grid.get_n | ode_index_by_node_id(self.node_ids[:, 0])
i2 = self.model.grid.get_node_index_by_node_id(self.node_ids[:, 1])
i3 = self.model.grid.get_node_index_by_node_id(self.node_ids[:, 2])
i4 = self.model.grid.get_node_index_by_node_id(self.node_ids[:, 3])
i5 | = self.model.grid.get_node_index_by_node_id(self.node_ids[:, 4])
i6 = self.model.grid.get_node_index_by_node_id(self.node_ids[:, 5])
i7 = self.model.grid.get_node_index_by_node_id(self.node_ids[:, 6])
i8 = self.model.grid.get_node_index_by_node_id(self.node_ids[:, 7])
else:
i1 = self.model.grid.get_node_index_by_node_id(self.node_ids[i, 0])
i2 = self.model.grid.get_node_index_by_node_id(self.node_ids[i, 1])
i3 = self.model.grid.get_node_index_by_node_id(self.node_ids[i, 2])
i4 = self.model.grid.get_node_index_by_node_id(self.node_ids[i, 3])
i5 = self.model.grid.get_node_index_by_node_id(self.node_ids[i, 4])
i6 = self.model.grid.get_node_index_by_node_id(self.node_ids[i |
realms-team/solmanager | libs/smartmeshsdk-REL-1.3.0.1/libs/VManagerSDK/vmanager/models/notification.py | Python | bsd-3-clause | 4,320 | 0.000926 | # coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class Notification(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
Notification - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'sys_time': 'datetime',
'type': 'str'
}
self.attribute_map = {
'sys_time': 'sysTime',
'type': 'type'
}
self._sys_time = None
self._type = None
@property
def sys_time(self):
"""
Gets the sys_time of this Notification.
Time of notification
:return: The sys_time of this Notification.
:rtype: datetime
"""
return self._sys_time
@sys_time.setter
def sys_time(self, sys_time):
"""
Sets the | sys_time of this Notification.
Time of notification
:param sys_time: The sys_time of this Notification.
:type: datetime
"""
self._sys_time = sys_time
@property
def type(self):
"""
Gets the type of this Notification.
Notification type
:return: The type of this Notification | .
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this Notification.
Notification type
:param type: The type of this Notification.
:type: str
"""
allowed_values = ["netStarted", "pathStateChanged", "pathAlert", "moteStateChanged", "joinFailed", "pingResponse", "invalidMIC", "dataPacketReceived", "ipPacketReceived", "packetSent", "cmdFinished", "configChanged", "configLoaded", "alarmOpened", "alarmClosed", "deviceHealthReport", "neighborHealthReport", "discoveryHealthReport", "rawMoteNotification", "serviceChanged", "apStateChanged", "managerStarted", "managerStopping", "optPhase", "pathAlert", "moteTrace", "frameCapacity", "apGpsSyncChanged"]
if type not in allowed_values:
raise ValueError(
"Invalid value for `type`, must be one of {0}"
.format(allowed_values)
)
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
mlperf/training_results_v0.7 | Fujitsu/benchmarks/resnet/implementations/implementation_open/mxnet/tests/python/unittest/test_contrib_operator.py | Python | apache-2.0 | 21,277 | 0.007943 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: skip-file
from __future__ import print_function
import numpy as np
import mxnet as mx
import random
import itertools
from numpy.testing import assert_allclose, assert_array_equal
from mxnet.test_utils import *
from common import with_seed
import unittest
def test_box_nms_op():
def test_box_nms_forward(data, expected, thresh=0.5, valid=0, topk=-1, coord=2, score=1, cid=0, bid=-1,
force=False, in_format='corner', out_format='corner'):
for dtype in ['float16', 'float32', 'float64']:
data = mx.nd.array(data, dtype=dtype)
out = mx.contrib.nd.box_nms(data, overlap_thresh=thresh, valid_thresh=valid, topk=topk,
coord_start=coord, score_index=score, id_index=cid, background_id=bid,
force_suppress=force, in_format=in_format, out_format=out_format)
assert_almost_equal(out.asnumpy(), expected.astype(dtype), rtol=1e-3, atol=1e-3)
def test_box_nms_backward(data, grad, expected, thresh=0.5, valid=0, topk=-1, coord=2, score=1,
cid=0, bid=-1, force=False, in_format='corner', out_format='corner'):
in_var = mx.sym.Variable('data')
arr_data = mx.nd.array(data)
arr_grad = mx.nd.empty(arr_data.shape)
op = mx.contrib.sym.box_nms(in_var, overlap_thresh=thresh, valid_thresh=valid, topk=topk,
coord_start=coord, score_index=score, id_index=cid, background_id=bid,
force_suppress=force, in_format=in_format, out_format=out_format)
exe = op.bind(ctx=default_context(), args=[arr_data], args_grad=[arr_grad])
exe.forward(is_train=True)
exe.backward(mx.nd.array(grad))
assert_almost_equal(arr_grad.asnumpy(), expected)
def corner_to_center(data):
out = np.reshape(data, (-1, 6)).copy()
out[:, 2] = (data[:, 2] + data[:, 4]) / 2.0
out[:, 3] = (data[:, 3] + data[:, 5]) / 2.0
out[:, 4] = data[:, 4] - data[:, 2]
out[:, 5] = data[:, 5] - data[:, 3]
invalid = np.where(data[:, 0] < 0)[0]
out[invalid, :] = -1
return out
def center_to_corner(data):
data = np.reshape(data, (-1, 6)).copy()
out[:, 2] = data[:, 2] - data[:, 4] / 2.0
out[:, 3] = data[:, 3] - data[:, 5] / 2.0
out[:, 4] = data[:, 2] + data[:, 4] / 2.0
out[:, 5] = data[:, 3] + data[:, 5] / 2.0
invalid = np.where(data[:, 0] < 0)[0]
out[invalid, :] = -1
return out
def swap_position(data, expected, coord=2, score=1, cid=0, new_col=0):
data = np.reshape(data, (-1, 6))
expected = np.reshape(expected, (-1, 6))
new_coord = random.randint(0, 6 + new_col - 4)
others = list(range(new_coord)) + list(range(new_coord + 4, 6 + new_col))
random.shuffle(others)
new_score = others[0]
new_cid = others[1]
new_data = np.full((data.shape[0], data.shape[1] + new_col), -1.0)
new_expected = np.full((expected.shape[0], expected.shape[1] + new_col), -1.0)
new_data[:, new_coord:new_coord+4] = data[:, coord:coord+4]
new_data[:, new_score] = data[:, score]
new_data[:, new_cid] = data[:, cid]
new_expected[:, new_coord:new_coord+4] = expected[:, coord:coord+4]
new_expected[:, new_score] = expected[:, score]
new_expected[:, new_cid] = expected[:, cid]
return new_data, new_expected, new_coord, new_score, new_cid
# manually set up test cases
boxes = [[0, 0.5, 0.1, 0.1, 0.2, 0.2], [1, 0.4, 0.1, 0.1, 0.2, 0.2],
[0, 0.3, 0.1, 0.1, 0.14, 0.14], [2, 0.6, 0.5, 0.5, 0.7, 0.8]]
# case1
force = True
thresh = 0.5
expected = [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
[0, 0.3, 0.1, 0.1, 0.14, 0.14], [-1, -1, -1, -1, -1, -1]]
grad = np.random.rand(4, 6)
expected_in_grad = grad[(1, 3, 2, 0), :]
expected_in_grad[1, :] = 0
test_box_nms_forward(np.array(boxes), np.array(expected), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes), grad, expected_in_grad, force=force, thresh=thresh)
# case2: multi batch
boxes2 = [boxes] * 3
expected2 = [expected] * 3
grad2 = np.array([grad.tolist()] * 3)
expected_in_grad2 = np.array([expected_in_grad.tolist()] * 3)
test_box_nms_forward(np.array(boxes2), np.array(expected2), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes2), grad2, expected_in_grad2, force=force, thresh=thresh)
# another new dim
boxes2 = [boxes2] * 2
expected2 = [expected2] * 2
grad2 = np.array([grad2.tolist()] * 2)
expected_in_grad2 = np.array([expected_in_grad2.tolist()] * 2)
test_box_nms_forward(np.array(boxes2), np.array(expected2), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes2), grad2, expected_in_grad2, force=force, thresh=thresh)
# case3: thresh
thresh = 0.1
boxes3 = boxes
expected3 = [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
[-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, -1, -1]]
grad3 = np.random.rand(4, 6)
expected_in_gra | d3 = grad3[(1, 3, 2, 0), :]
expected_in_grad3[(1, 2), :] = 0
test_box_nms_forward(np.array(boxes3), np.array(expected3), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes3), grad3, expected_in_grad3, force=force, thresh=th | resh)
# case4: non-force
boxes4 = boxes
force = False
expected4 = [[2, 0.6, 0.5, 0.5, 0.7, 0.8], [0, 0.5, 0.1, 0.1, 0.2, 0.2],
[1, 0.4, 0.1, 0.1, 0.2, 0.2], [-1, -1, -1, -1, -1, -1]]
grad4 = np.random.rand(4, 6)
expected_in_grad4 = grad4[(1, 2, 3, 0), :]
expected_in_grad4[2, :] = 0
test_box_nms_forward(np.array(boxes4), np.array(expected4), force=force, thresh=thresh)
test_box_nms_backward(np.array(boxes4), grad4, expected_in_grad4, force=force, thresh=thresh)
# case5: different coding
boxes5 = corner_to_center(np.array(boxes4))
test_box_nms_forward(np.array(boxes5), np.array(expected4), force=force, thresh=thresh,
in_format='center')
expected5 = corner_to_center(np.array(expected4))
test_box_nms_forward(np.array(boxes4), np.array(expected5), force=force, thresh=thresh,
out_format='center')
test_box_nms_forward(np.array(boxes5), np.array(expected5), force=force, thresh=thresh,
in_format='center', out_format='center')
# case6: different position
boxes6, expected6, new_coord, new_score, new_id = swap_position(np.array(boxes4),
np.array(expected4), new_col=2)
test_box_nms_forward(np.array(boxes6), np.array(expected6), force=force, thresh=thresh,
coord=new_coord, score=new_score, cid=new_id)
# case7: no id, should be same with force=True
force = False
thresh = 0.5
test_box_nms_forward(np.array(boxes), np.array(expected), force=force, thresh=thresh, cid=-1)
# case8: multi-batch thresh + topk
boxes8 = [[[1, 1, 0, 0, 10, 10], [1, 0.4, 0, 0, 10, 10], [1, 0.3, 0, 0, 10, 10]],
[[2, 1, 0, 0, 10, 10], [2, 0.4, 0, 0, 10, 10], [2, 0.3, 0, 0, 10, 10]],
[[3, 1, 0, 0, 10, 10], [3, 0.4, 0, 0, 10, 10], [3, 0.3, 0, 0, 10, 10]]]
expected8 = [[[1, 1, 0, 0, 10, 10], [-1, -1, -1, -1, -1, -1], [-1, -1, -1, -1, |
Coelhon/MasterRepo.repository | plugin.video.psychoxxx/resources/lib/modules/client.py | Python | gpl-2.0 | 14,581 | 0.01015 | # -*- coding: utf-8 -*-
import re,sys,cookielib,urllib,urllib2,urlparse,HTMLParser,time,random,base64
from resources.lib.modules import cache
from resources.lib.modules import workers
def request(url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, limit=None, referer=None, cookie=None, output='', timeout='30'):
try:
handlers = []
if not proxy == None:
handlers += [urllib2.ProxyHandler({'http':'%s' % (proxy)}), urllib2.HTTPHandler]
opener = urllib2.build_opener(*handlers)
opener = urllib2.install_opener(opener)
if output == 'cookie' or output == 'extended' or not close == True:
cookies = cookielib.LWPCookieJar()
handlers += [urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies)]
opener = urllib2.build_opener(*handlers)
opener = urllib2.install_opener(opener)
try:
if sys.version_info < (2, 7, 9): raise Exception()
import ssl; ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
handlers += [urllib2.HTTPSHandler(context=ssl_context)]
opener = urllib2.build_opener(*handlers)
opener = urllib2.install_opener(opener)
except:
pass
try: headers.update(headers)
except: headers = {}
if 'User-Agent' in headers:
pass
elif not mobile == True:
#headers['User-Agent'] = agent()
headers['User-Agent'] = cache.get(randomagent, 1)
else:
headers['User-Agent'] = 'Apple-iPhone/701.341'
if 'Referer' in headers:
pass
elif referer == None:
headers['Referer'] = '%s://%s/' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
else:
headers['Referer'] = referer
if not 'Accept-Language' in headers:
headers['Accept-Language'] = 'en-US'
if 'Cookie' in headers:
pass
elif not cookie == None:
headers['Cookie'] = cookie
if redirect == False:
class NoRedirection(urllib2.HTTPErrorProcessor):
def http_response(self, request, response): return response
opener = urllib2.build_opener(NoRedirection)
opener = urllib2.install_opener(opener)
try: del headers['Referer']
except: pass
request = urllib2.Request(url, data=post, headers=headers)
try:
response = urllib2.urlopen(request, timeout=int(timeout))
except urllib2.HTTPError as response:
if response.code == 503:
if 'cf-browser-verification' in response.read(5242880):
netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
ua = headers['User-Agent']
cf = cache.get(cfcookie().get, 168, netloc, ua, timeout)
headers['Cookie'] = cf
request = urllib2.Request(url, data=post, headers=headers)
response = urllib2.urlopen(request, timeout=int(timeout))
elif error == False:
return
elif error == False:
return
if output == 'cookie':
try: result = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
except: pass
try: result = cf
except: pass
if close == True: response.close()
return result
elif output == 'geturl':
result = response.geturl()
if close == True: response.close()
return result
elif output == 'headers':
result = response.headers
if close == True: response.close()
r | eturn result
elif output == 'chunk':
try: content = int(response.headers['Content-Length'])
except: content = (2049 * 1024)
if content < (2048 * 1024): return
result = response.read(16 * 1024)
if close == True: response.close()
return result
if l | imit == '0':
result = response.read(224 * 1024)
elif not limit == None:
result = response.read(int(limit) * 1024)
else:
result = response.read(5242880)
if 'sucuri_cloudproxy_js' in result:
su = sucuri().get(result)
headers['Cookie'] = su
request = urllib2.Request(url, data=post, headers=headers)
response = urllib2.urlopen(request, timeout=int(timeout))
if limit == '0':
result = response.read(224 * 1024)
elif not limit == None:
result = response.read(int(limit) * 1024)
else:
result = response.read(5242880)
if output == 'extended':
response_headers = response.headers
response_code = str(response.code)
try: cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
except: pass
try: cookie = cf
except: pass
if close == True: response.close()
return (result, response_code, response_headers, headers, cookie)
else:
if close == True: response.close()
return result
except:
return
def parseDOM(html, name=u"", attrs={}, ret=False):
# Copyright (C) 2010-2011 Tobias Ussing And Henrik Mosgaard Jensen
if isinstance(html, str):
try:
html = [html.decode("utf-8")]
except:
html = [html]
elif isinstance(html, unicode):
html = [html]
elif not isinstance(html, list):
return u""
if not name.strip():
return u""
ret_lst = []
for item in html:
temp_item = re.compile('(<[^>]*?\n[^>]*?>)').findall(item)
for match in temp_item:
item = item.replace(match, match.replace("\n", " "))
lst = []
for key in attrs:
lst2 = re.compile('(<' + name + '[^>]*?(?:' + key + '=[\'"]' + attrs[key] + '[\'"].*?>))', re.M | re.S).findall(item)
if len(lst2) == 0 and attrs[key].find(" ") == -1:
lst2 = re.compile('(<' + name + '[^>]*?(?:' + key + '=' + attrs[key] + '.*?>))', re.M | re.S).findall(item)
if len(lst) == 0:
lst = lst2
lst2 = []
else:
test = range(len(lst))
test.reverse()
for i in test:
if not lst[i] in lst2:
del(lst[i])
if len(lst) == 0 and attrs == {}:
lst = re.compile('(<' + name + '>)', re.M | re.S).findall(item)
if len(lst) == 0:
lst = re.compile('(<' + name + ' .*?>)', re.M | re.S).findall(item)
if isinstance(ret, str):
lst2 = []
for match in lst:
attr_lst = re.compile('<' + name + '.*?' + ret + '=([\'"].[^>]*?[\'"])>', re.M | re.S).findall(match)
if len(attr_lst) == 0:
attr_lst = re.compile('<' + name + '.*?' + ret + '=(.[^>]*?)>', re.M | re.S).findall(match)
for tmp in attr_lst:
cont_char = tmp[0]
if cont_char in "'\"":
if tmp.find('=' + cont_char, tmp.find(cont_char, 1)) > -1:
tmp = tmp[:tmp.find('=' + cont_char, tmp.find(cont_char, 1))]
if tmp.rfind(cont_char, 1) > -1:
tmp = tmp[1:tmp.rfind(cont_char)]
else:
if tmp.find(" ") > 0:
tmp = tmp[:tmp.find(" ")]
elif tmp.find("/") > 0:
tmp = tmp[:tmp.find("/")]
elif tmp.find(">") > 0:
tmp = tmp[:tmp.find(">")]
lst2.append(tmp.strip())
|
mbayon/TFG-MachineLearning | venv/lib/python3.6/site-packages/numpy/core/defchararray.py | Python | mit | 67,393 | 0.000401 | """
This module contains a set of functions for vectorized string
operations and methods.
.. note::
The `chararray` class exists for backwards compatibility with
Numarray, it is not recommended for new development. Starting from numpy
1.4, if one needs arrays of strings, it is recommended to use arrays of
`dtype` `object_`, `string_` or `unicode_`, and use the free functions
in the `numpy.char` module for fast vectorized string operations.
Some methods will only be available if the corresponding string method is
available in your version of Python.
The preferred alias for `defchararray` is `numpy.char`.
"""
from __future__ import division, absolute_import, print_function
import sys
from .numerictypes import string_, unicode_, integer, object_, bool_, character
from .numeric import ndarray, compare_chararrays
from .numeric import array as narray
from numpy.core.multiarray import _vec_string
from numpy.compat import asbytes, long
import numpy
__all__ = [
'chararray', 'equal', 'not_equal', 'greater_equal', 'less_equal',
'greater', 'less', 'str_len', 'add', 'multiply', 'mod', 'capitalize',
'center', 'count', 'decode', 'encode', 'endswith', 'expandtabs',
'find', 'index', 'isalnum', 'isalpha', 'isdigit', 'islower', 'isspace',
'istitle', 'isupper', 'join', 'ljust', 'lower', 'lstrip', 'partition',
'replace', 'rfind', 'rindex', 'rjust', 'rpartition', 'rsplit',
'rstrip', 'split', 'splitlines', 'startswith', 'strip', 'swapcase',
'title', 'translate', 'upper', 'zfill', 'isnumeric', 'isdecimal',
'array', 'asarray'
]
_globalvar = 0
if sys.version_info[0] >= 3:
_unicode = str
_bytes = bytes
else:
_unicode = unicode
_bytes = str
_len = len
def _use_unicode(*args):
"""
Helper function for determining the output type of some string
operations.
For an operation on two ndarrays, if at least one is unicode, the
result should be unicode.
"""
for x in args:
if (isinstance(x, _unicode) or
issubclass(numpy.asarray(x).dtype.type, unicode_)):
return unicode_
return string_
def _to_string_or_unicode_array(result):
"""
Helper function to cast a result back into a string or unicode array
if an object array must be used as an intermediary.
"""
return numpy.asarray(result.tolist())
def _clean_args(*args):
"""
Helper function for delegating arguments to Python string
functions.
Many of the Python string operations that have optional arguments
do not use 'None' to indicate a default value. In these cases,
we need to remove all `None` arguments, and those following them.
"""
newargs = []
for chk in args:
if chk is None:
break
newargs.append(chk)
return newargs
def _get_num_chars(a):
"""
Helper function that returns the number of characters per field in
a string or unicode array. This is to abstract out the fact that
for a unicode array this is itemsize / 4.
"""
if issubclass(a.dtype.type, unicode_):
return a.itemsize // 4
return a.itemsize
def equal(x1, x2):
"""
Return (x1 == x2) element-wise.
Unlike `numpy.equal`, this comparison is performed by first
stripping whitespace characters from the end of the string. This
behavior is provided for backward-compatibility with numarray.
Parameters
----------
x1, x2 : array_like of str or unicode
Input arrays of the same shape.
Returns
-------
out : ndarray or bool
Output array of bools, or a single bool if x1 and x2 are scalars.
See Also
--------
not_equal, greater_equal, less_equal, greater, less
"""
return compare_chararrays(x1, x2, '==', True)
def not_equal(x1, x2):
"""
Return (x1 != x2) element-wise.
Unlike `numpy.not_equal`, this comparison is performed by first
stripping whitespace characters from the end of the string. This
behavior is provided for backward-compatibility with numarray.
Parameters
----------
x1, x2 : array_like of str or unicode
Input arrays of the same shape.
Returns
-------
out : ndarray or bool
Output array of bools, or a single bool if x1 and x2 are scalars.
See Also
--------
equal, greater_equal, less_equal, greater, less
"""
return compare_chararrays(x1, x2, '!=', True)
def greater_equal(x1, x2):
"""
Return (x1 >= x2) element-wise.
Unlike `numpy.greater_equal`, this comparison is performed by
first stripping whitespace characters from the end of the string.
This behavior is provided for backward-compatibility with
numarray.
Parameters
----------
x1, x2 : array_like of str or unicode
Input arrays of the same shape.
Returns
-------
out : ndarray o | r bool
Output array of bools, or a single bool if x1 and x2 are scalars.
See Also
--------
| equal, not_equal, less_equal, greater, less
"""
return compare_chararrays(x1, x2, '>=', True)
def less_equal(x1, x2):
"""
Return (x1 <= x2) element-wise.
Unlike `numpy.less_equal`, this comparison is performed by first
stripping whitespace characters from the end of the string. This
behavior is provided for backward-compatibility with numarray.
Parameters
----------
x1, x2 : array_like of str or unicode
Input arrays of the same shape.
Returns
-------
out : ndarray or bool
Output array of bools, or a single bool if x1 and x2 are scalars.
See Also
--------
equal, not_equal, greater_equal, greater, less
"""
return compare_chararrays(x1, x2, '<=', True)
def greater(x1, x2):
"""
Return (x1 > x2) element-wise.
Unlike `numpy.greater`, this comparison is performed by first
stripping whitespace characters from the end of the string. This
behavior is provided for backward-compatibility with numarray.
Parameters
----------
x1, x2 : array_like of str or unicode
Input arrays of the same shape.
Returns
-------
out : ndarray or bool
Output array of bools, or a single bool if x1 and x2 are scalars.
See Also
--------
equal, not_equal, greater_equal, less_equal, less
"""
return compare_chararrays(x1, x2, '>', True)
def less(x1, x2):
"""
Return (x1 < x2) element-wise.
Unlike `numpy.greater`, this comparison is performed by first
stripping whitespace characters from the end of the string. This
behavior is provided for backward-compatibility with numarray.
Parameters
----------
x1, x2 : array_like of str or unicode
Input arrays of the same shape.
Returns
-------
out : ndarray or bool
Output array of bools, or a single bool if x1 and x2 are scalars.
See Also
--------
equal, not_equal, greater_equal, less_equal, greater
"""
return compare_chararrays(x1, x2, '<', True)
def str_len(a):
"""
Return len(a) element-wise.
Parameters
----------
a : array_like of str or unicode
Returns
-------
out : ndarray
Output array of integers
See also
--------
__builtin__.len
"""
return _vec_string(a, integer, '__len__')
def add(x1, x2):
"""
Return element-wise string concatenation for two arrays of str or unicode.
Arrays `x1` and `x2` must have the same shape.
Parameters
----------
x1 : array_like of str or unicode
Input array.
x2 : array_like of str or unicode
Input array.
Returns
-------
add : ndarray
Output array of `string_` or `unicode_`, depending on input types
of the same shape as `x1` and `x2`.
"""
arr1 = numpy.asarray(x1)
arr2 = numpy.asarray(x2)
out_size = _get_num_chars(arr1) + _get_num_chars(arr2)
dtype = _use_unicode(arr1, arr2)
return _vec_string(arr1, (dtype, out_size), '__add__', (arr2,))
def multiply(a, i):
"""
Return (a * i), that is string multiple concatenation,
element-wise.
Values in `i` of less than 0 are treated a |
teknologkoren/Strequelistan | password_reset_email/migrations/0001_initial.py | Python | mpl-2.0 | 946 | 0.002114 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-11-01 19:27
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MOD | EL),
]
operations = [
migrations.CreateModel(
name='PasswordResetEmail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(max_length=100)),
('timestamp', models.Date | TimeField(auto_now_add=True)),
('used', models.BooleanField(default=False)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
just-kir/just-kir.github.io | game_test.py | Python | mit | 295 | 0.00678 | import pygame
pygame.init()
WIDTH=600
HEIGHT=480
S | CREEN = pygame.display.set_mode((WIDTH, HEIGHT))
while True:
events = pygame.event.get()
for event in events:
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_1:
pri | nt('this DOES work! :)')
|
Hbrinj/mraa | tests/mock/i2c_checks_read.py | Python | mit | 2,420 | 0.005372 | #!/usr/bin/env python
# Author: Alex Tereschenko <alext.mkrs@gmail.com>
# Copyright (c) 2016 Alex Tereschenko.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import mraa as m
import unittest as u
from i2c_checks_shared import *
class I2cChecksRead(u.TestCase):
def setUp(self):
self.i2c = m.I2c(MRAA_I2C_BUS_NUM)
def tearDown(self):
del self.i2c
def test_i2c_read_full_reg_range(self):
self.i2c.address(MRAA_MOCK_I2C_ADDR)
| expected_res = bytearray([MRAA_MOCK_I2C_DATA_INIT_BYTE for i in range(MRAA_MOCK_I2C_DATA_LEN)])
res = self.i2c.read(MRAA_MOCK_I2C_DATA_LEN)
self.assertEqual(res, expected_res, "I2C read() of full register range returned unexpected data")
def test_i2c_read_part_reg_range(self):
self.i2c.address(MRAA_MOCK_I2C_ADDR)
expected_res = bytearray([MRAA_MOCK_I2C_DATA_INIT_BYTE for i in range(MRAA_MOCK_I2C_DATA_LEN - 1)])
res = self.i2c.read(MRAA_MOCK_I2C_DATA_LEN - 1)
self.assertEqual(res, expected_res, "I2C read() of partial register range returned unexpected data")
def test_i2c_read_invalid_addr(self):
self.i2c.address(MRAA_MOCK_I2C_ADDR - 1)
self.assertRaises(IOError, self.i2c.read, MRAA_MOCK_I2C_DATA_LEN)
def test_i2c_read_invalid_len_bigger_than_max(self):
self.i2c.address(MRAA_MOCK_I2C_ADDR)
self.assertRaises(IOError, self.i2c.read, MRAA_MOCK_I2C_DATA_LEN + 1)
if __name__ == "__main__":
u.main()
|
DarthMaulware/EquationGroupLeaks | Leak #5 - Lost In Translation/windows/Resources/Ops/PyScripts/pythonPreCompiler.py | Python | unlicense | 87 | 0.011494 |
import compile | all
import dsz.lp
compileall.compile_dir(dsz.lp.GetResourcesD | irectory()) |
jkwill87/mapi | mapi/endpoints.py | Python | mit | 13,575 | 0.000221 | # coding=utf-8
from re import match
from mapi.exceptions import (
MapiNetworkException,
MapiNotFoundException,
MapiProviderException,
)
from mapi.utils import clean_dict, request_json
__all__ = [
"omdb_search",
"omdb_title",
"tmdb_find",
"tmdb_movies",
"tmdb_search_movies",
"tvdb_episodes_id",
"tvdb_login",
"tvdb_refresh_token",
"tvdb_search_series",
"tvdb_series_id",
"tvdb_series_id_episodes",
"tvdb_series_id_episodes_query",
]
OMDB_MEDIA_TYPES = {"episode", "movie", "series"}
OMDB_PLOT_TYPES = {"short", "long"}
TVDB_LANGUAGE_CODES = [
"cs",
"da",
"de",
"el",
"en",
"es",
"fi",
"fr",
"he",
"hr",
"hu",
"it",
"ja",
"ko",
"nl",
"no",
"pl",
"pt",
"ru",
"sl",
"sv",
"tr",
"zh",
]
def omdb_title(
api_key,
id_imdb=None,
media_type=None,
title=None,
season=None,
episode=None,
year=None,
plot=None,
cache=True,
):
"""
Lookup media using the Open Movie Database.
Online docs: http://www.omdbapi.com/#parameters
"""
if (not title and not id_imdb) or (title and id_imdb):
raise MapiProviderException("either id_imdb or title must be specified")
elif media_type and media_type not in OMDB_MEDIA_TYPES:
raise MapiProviderException(
"media_type must be one of %s" % ",".join(OMDB_MEDIA_TYPES)
)
elif plot and plot not in OMDB_PLOT_TYPES:
raise MapiProviderException(
"plot must be one of %s" % ",".join(OMDB_PLOT_TYPES)
)
url = "http://www.omdbapi.com"
parameters = {
"apikey": api_key,
"i": id_imdb,
"t": title,
"y": year,
"season": season,
"episode": episode,
"type": media_type,
"plot": plot,
}
parameters = clean_dict(parameters)
status, content = request_json(url, parameters, cache=cache)
error = content.get("Error") if isinstance(content, dict) else None
if status == 401:
raise MapiProviderException("invalid API key")
elif status != 200 or not isinstance(content, dict):
raise MapiNetworkException("OMDb down or unavailable?")
elif error:
raise MapiNotFoundException(error)
return content
def omdb_search(api_key, query, year=None, media_type=None, page=1, cache=True):
"""
Search for media using the Open Movie Database.
Online docs: http://www.omdbapi.com/#parameters.
"""
if media_type and media_type not in OMDB_MEDIA_TYPES:
raise MapiProviderException(
"media_type must be one of %s" % ",".join(OMDB_MEDIA_TYPES)
)
if 1 > page > 100:
raise MapiProviderException("page must be between 1 and 100")
url = "http://www.omdbapi.com"
parameters = {
"apikey": api_key,
"s": query,
"y": year,
"type": media_type,
"page": page,
}
parameters = clean_dict(parameters)
status, content = request_json(url, parameters, cache=cache)
if status == 401:
raise MapiProviderException("invalid API key")
elif content and not content.get("totalResults"):
raise MapiNotFoundException()
elif not content or status != 200: # pragma: no cover
raise MapiNetworkException("OMDb down or unavailable?")
return content
def tmdb_find(
api_key, external_source, external_id, language="en-US", cache=True
):
"""
Search for The Movie Database objects using another DB's foreign key.
Note: language codes aren't checked on this end or by TMDb, so if you
enter an invalid language code your search itself will succeed, but
certain fields like synopsis will just be empty.
Online docs: developers.themoviedb.org/3/find.
"""
sources = ["imdb_id", "freebase_mid", "freebase_id", "tvdb_id", "tvrage_id"]
if external_source not in sources:
raise MapiProviderException("external_source must be in %s" % sources)
if external_source == "imdb_id" and not match(r"tt\d+", external_id):
raise MapiProviderException("invalid imdb tt-const value")
url = "https://api.themoviedb.org/3/find/" + external_id or ""
parameters = {
"api_key": api_key,
"external_source": external_source,
"language": language,
}
keys = [
"movie_results",
"person_results",
"tv_episode_results",
"tv_results",
"tv_season_results",
]
status, content = request_json(url, parameters, cache=cache)
if status == 401:
raise MapiProviderException("invalid API key")
elif status != 200 or not any(content.keys()): # pragma: no cover
raise MapiNetworkException("TMDb down or unavailable?")
elif status == 404 or not any(content.get(k, {}) for k in keys):
raise MapiNotFoundException
return content
def tmdb_movies(api_key, id_tmdb, language="en-US", cache=True):
"""
Lookup a movie item using The Movie Database.
Online docs: developers.themoviedb.org/3/movies.
"""
try:
url = "https://api.themoviedb.org/3/movie/%d" % int(id_tmdb)
except ValueError:
raise MapiProviderException("id_tmdb must be numeric")
parameters = {"api_key": api_key, "language": language}
status, content = request_json(url, parameters, cache=cache)
if status == 401:
raise MapiProviderException("invalid API key")
elif status == 404:
raise MapiNotFoundException
elif status != 200 or not any(content.keys()): # pragma: no cover
raise MapiNetworkException("TMDb down or unavailable?")
return content
def tmdb_search_movies(
api_key, title, year=None, adult=False, region=None, page=1, cache=True
):
"""
Search for movies using The Movie Database.
Online docs: developers.themoviedb.org/3/search/search-movies.
"""
url = "https://api.themov | iedb.org/3/search/movie"
try:
if year:
year = int(year)
except ValueError:
raise MapiProviderException("year must be numeric")
parameters = {
"api_key": api_key,
"query": title,
"page": page,
"include_adult": adult,
"region": region,
"year": year,
}
status, content = request_json(url, parameters, cache=cache)
if status == 401:
raise MapiProviderExc | eption("invalid API key")
elif status != 200 or not any(content.keys()): # pragma: no cover
raise MapiNetworkException("TMDb down or unavailable?")
elif status == 404 or status == 422 or not content.get("total_results"):
raise MapiNotFoundException
return content
def tvdb_login(api_key):
"""
Logs into TVDb using the provided api key.
Note: You can register for a free TVDb key at thetvdb.com/?tab=apiregister
Online docs: api.thetvdb.com/swagger#!/Authentication/post_login.
"""
url = "https://api.thetvdb.com/login"
body = {"apikey": api_key}
status, content = request_json(url, body=body, cache=False)
if status == 401:
raise MapiProviderException("invalid api key")
elif status != 200 or not content.get("token"): # pragma: no cover
raise MapiNetworkException("TVDb down or unavailable?")
return content["token"]
def tvdb_refresh_token(token):
"""
Refreshes JWT token.
Online docs: api.thetvdb.com/swagger#!/Authentication/get_refresh_token.
"""
url = "https://api.thetvdb.com/refresh_token"
headers = {"Authorization": "Bearer %s" % token}
status, content = request_json(url, headers=headers, cache=False)
if status == 401:
raise MapiProviderException("invalid token")
elif status != 200 or not content.get("token"): # pragma: no cover
raise MapiNetworkException("TVDb down or unavailable?")
return content["token"]
def tvdb_episodes_id(token, id_tvdb, lang="en", cache=True):
"""
Returns the full information for a given episode id.
Online docs: https://api.thetvdb.com/swagger#!/Episodes.
"""
if lang not in TVDB_LANGUAGE_CODES:
raise MapiProviderException(
"'lang' must be one of |
franciscod/python-telegram-bot | telegram/location.py | Python | gpl-2.0 | 1,534 | 0 | #!/usr/bin/env python
#
# A library that provides a Python | interface to the Telegram Bot API
# Copyright (C) 2015-2016
# Leandro Toledo de Souza <devs@python-telegram- | bot.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains a object that represents a Telegram Location."""
from telegram import TelegramObject
class Location(TelegramObject):
"""This object represents a Telegram Location.
Attributes:
longitude (float):
latitude (float):
Args:
longitude (float):
latitude (float):
"""
def __init__(self, longitude, latitude, **kwargs):
# Required
self.longitude = float(longitude)
self.latitude = float(latitude)
@staticmethod
def de_json(data):
"""
Args:
data (str):
Returns:
telegram.Location:
"""
if not data:
return None
return Location(**data)
|
jkereako/flask-skeleton | app/templates/__init__.py | Python | mit | 242 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Templates
~~~~~~~~~
View files
|
:author: Jeff Kereakoglow
:date: 2014-11-09
:copyright: (c) 2014 by Alexis Digital
:license: MIT, see LICENSE for more | details
"""
|
brightchen/h2o-3 | h2o-py/tests/testdir_hdfs/pyunit_HDFS_kmeans.py | Python | apache-2.0 | 1,795 | 0.015599 | #----------------------------------------------------------------------
# Purpose: This tests k-means on a large dataset.
#----------------------------------------------------------------------
import sys
sys.path.insert(1, "../../")
import h2o, tests
def hdfs_kmeans():
# Check if we are running inside the H2O network by seeing if we can touch
# the namenode.
running_inside_h2o = tests.is_running_internal_to_h2o()
if running_inside_h2o:
hdfs_name_node = tests.get_h2o_internal_hdfs_name_node()
hdfs_iris_file = "/datasets/runit/iris_wheader.csv"
hdfs_covtype_file = "/datasets/runit/covtype.data"
print "Import iris_wheader.csv from HDFS"
url = "hdfs://{0}{1}".format(hdfs_name_node, hdfs_iris_file)
iris_h2o = h2o.import_file(url)
n = iris_h2o.nrow
print "rows: {0}".format(n)
assert n == 150, "Wrong number of rows. Got {0}. Should have got {1}".format(n, 150)
print "Running KMeans on iris"
iris_km = h2o.kmeans(training_frame = iris_h2o, k = 3, x = iris_h2o[0:4], max_iterations = 10)
print iris_km
print "Importing covtype.data from HDFS"
url = "hdfs://{0}{1}".format(hdfs_name_node, hdfs_covtype_file)
covtype_h2o = h2o.im | port_file(url)
n = covtype_h2o.nrow
print "rows: {0}".format(n)
assert n == 581012, "Wrong number of rows. Got {0}. Should have got {1}".format(n, 581012)
print "Running KMeans on covtype"
covtype_km = h2o.kmeans(training_frame = covtype_h2o, x = covtype_h2o[0:55], k = 8, max_iterations = 10)
print c | ovtype_km
else:
print "Not running on H2O internal network. No access to HDFS."
if __name__ == "__main__":
tests.run_test(sys.argv, hdfs_kmeans)
|
peret/visualize-bovw | util/classifierloader.py | Python | gpl-2.0 | 1,518 | 0.003294 | from sklearn.externals import joblib
import time
from . import folder_name
class ClassifierLoader(object):
"""Handles saving and loading of trained classifiers transparently."""
def __init__(self):
super(ClassifierLoader, self).__init__()
def dump_object(self, obj, classifier, category="", **kwargs):
self.logger.info("Writing object to disk")
t2 = time.time()
try:
folder = folder_name(self.datamanager.PATHS["CLASSIFIER"], category, classifier)
if not os.path.isdir(folder):
os.makedirs(folder)
joblib.dump(obj, os.path.join(folder, fname), compress=3)
except Exception as e:
self.logger.error("Joblib failed: %s" % e)
self.logger.info("%f seconds\n" % (time.time() - t2))
def load_object(self, fname, category="", classifier=None):
self.logger.info("Reading object from disk")
t2 = time.time()
if classifier == None:
classifier = self.classifier
try:
folder = folder_name(self.datamanager.PATHS["CLASSIFIER"], category, cla | ssifier)
if not os.path.isdir(folder):
self.logger.info("Object's path doesn't exist")
return None
obj = joblib.load(os.path.join(folder, fname))
self.logger.info("% | f seconds\n" % (time.time() - t2))
return obj
except Exception as e:
self.logger.error("Joblib failed: %s" % e)
return None |
jamslevy/gsoc | app/django/forms/forms.py | Python | apache-2.0 | 17,780 | 0.00225 | """
Form classes
"""
from copy import deepcopy
from django.utils.dat | astructures import SortedDict
from django.utils.html import esca | pe
from django.utils.encoding import StrAndUnicode, smart_unicode, force_unicode
from django.utils.safestring import mark_safe
from fields import Field, FileField
from widgets import Media, media_property, TextInput, Textarea
from util import flatatt, ErrorDict, ErrorList, ValidationError
__all__ = ('BaseForm', 'Form')
NON_FIELD_ERRORS = '__all__'
def pretty_name(name):
"Converts 'first_name' to 'First name'"
name = name[0].upper() + name[1:]
return name.replace('_', ' ')
def get_declared_fields(bases, attrs, with_base_fields=True):
"""
Create a list of form field instances from the passed in 'attrs', plus any
similar fields on the base classes (in 'bases'). This is used by both the
Form and ModelForm metclasses.
If 'with_base_fields' is True, all fields from the bases are used.
Otherwise, only fields in the 'declared_fields' attribute on the bases are
used. The distinction is useful in ModelForm subclassing.
Also integrates any additional media definitions
"""
fields = [(field_name, attrs.pop(field_name)) for field_name, obj in attrs.items() if isinstance(obj, Field)]
fields.sort(lambda x, y: cmp(x[1].creation_counter, y[1].creation_counter))
# If this class is subclassing another Form, add that Form's fields.
# Note that we loop over the bases in *reverse*. This is necessary in
# order to preserve the correct order of fields.
if with_base_fields:
for base in bases[::-1]:
if hasattr(base, 'base_fields'):
fields = base.base_fields.items() + fields
else:
for base in bases[::-1]:
if hasattr(base, 'declared_fields'):
fields = base.declared_fields.items() + fields
return SortedDict(fields)
class DeclarativeFieldsMetaclass(type):
"""
Metaclass that converts Field attributes to a dictionary called
'base_fields', taking into account parent class 'base_fields' as well.
"""
def __new__(cls, name, bases, attrs):
attrs['base_fields'] = get_declared_fields(bases, attrs)
new_class = super(DeclarativeFieldsMetaclass,
cls).__new__(cls, name, bases, attrs)
if 'media' not in attrs:
new_class.media = media_property(new_class)
return new_class
class BaseForm(StrAndUnicode):
# This is the main implementation of all the Form logic. Note that this
# class is different than Form. See the comments by the Form class for more
# information. Any improvements to the form API should be made to *this*
# class, not to the Form class.
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=':',
empty_permitted=False):
self.is_bound = data is not None or files is not None
self.data = data or {}
self.files = files or {}
self.auto_id = auto_id
self.prefix = prefix
self.initial = initial or {}
self.error_class = error_class
self.label_suffix = label_suffix
self.empty_permitted = empty_permitted
self._errors = None # Stores the errors after clean() has been called.
self._changed_data = None
# The base_fields class attribute is the *class-wide* definition of
# fields. Because a particular *instance* of the class might want to
# alter self.fields, we create self.fields here by copying base_fields.
# Instances should always modify self.fields; they should not modify
# self.base_fields.
self.fields = deepcopy(self.base_fields)
def __unicode__(self):
return self.as_table()
def __iter__(self):
for name, field in self.fields.items():
yield BoundField(self, field, name)
def __getitem__(self, name):
"Returns a BoundField with the given name."
try:
field = self.fields[name]
except KeyError:
raise KeyError('Key %r not found in Form' % name)
return BoundField(self, field, name)
def _get_errors(self):
"Returns an ErrorDict for the data provided for the form"
if self._errors is None:
self.full_clean()
return self._errors
errors = property(_get_errors)
def is_valid(self):
"""
Returns True if the form has no errors. Otherwise, False. If errors are
being ignored, returns False.
"""
return self.is_bound and not bool(self.errors)
def add_prefix(self, field_name):
"""
Returns the field name with a prefix appended, if this Form has a
prefix set.
Subclasses may wish to override.
"""
return self.prefix and ('%s-%s' % (self.prefix, field_name)) or field_name
def add_initial_prefix(self, field_name):
"""
Add a 'initial' prefix for checking dynamic initial values
"""
return u'initial-%s' % self.add_prefix(field_name)
def _html_output(self, normal_row, error_row, row_ender, help_text_html, errors_on_separate_row):
"Helper function for outputting HTML. Used by as_table(), as_ul(), as_p()."
top_errors = self.non_field_errors() # Errors that should be displayed above all fields.
output, hidden_fields = [], []
for name, field in self.fields.items():
bf = BoundField(self, field, name)
bf_errors = self.error_class([escape(error) for error in bf.errors]) # Escape and cache in local variable.
if bf.is_hidden:
if bf_errors:
top_errors.extend([u'(Hidden field %s) %s' % (name, force_unicode(e)) for e in bf_errors])
hidden_fields.append(unicode(bf))
else:
if errors_on_separate_row and bf_errors:
output.append(error_row % force_unicode(bf_errors))
if bf.label:
label = escape(force_unicode(bf.label))
# Only add the suffix if the label does not end in
# punctuation.
if self.label_suffix:
if label[-1] not in ':?.!':
label += self.label_suffix
label = bf.label_tag(label) or ''
else:
label = ''
if field.help_text:
help_text = help_text_html % force_unicode(field.help_text)
else:
help_text = u''
output.append(normal_row % {'errors': force_unicode(bf_errors), 'label': force_unicode(label), 'field': unicode(bf), 'help_text': help_text})
if top_errors:
output.insert(0, error_row % force_unicode(top_errors))
if hidden_fields: # Insert any hidden fields in the last row.
str_hidden = u''.join(hidden_fields)
if output:
last_row = output[-1]
# Chop off the trailing row_ender (e.g. '</td></tr>') and
# insert the hidden fields.
if not last_row.endswith(row_ender):
# This can happen in the as_p() case (and possibly others
# that users write): if there are only top errors, we may
# not be able to conscript the last row for our purposes,
# so insert a new, empty row.
last_row = normal_row % {'errors': '', 'label': '', 'field': '', 'help_text': ''}
output.append(last_row)
output[-1] = last_row[:-len(row_ender)] + str_hidden + row_ender
else:
# If there aren't any rows in the output, just append the
# hidden fields.
output.append(str_hidden)
return mark_safe(u'\n'.join(output))
def as_table(self):
"Returns this form rendered as HTML <tr>s -- excluding the <table></table>."
return self._html_output(u'<tr><th>%(label)s</th><td>%(err |
mitsuhiko/babel | tests/test_smoke.py | Python | bsd-3-clause | 1,162 | 0 | # -- encoding: UTF-8 --
"""
These tests do not verify any results and should not be run when
looking at improving test coverage. They just verify that basic
operations don't fail due to odd corner cases on any locale that
we ship. |
"""
from datetime import datetime
import pytest
from babel import Locale
from babel import dates
from babel import numbers
from babel._compat import decimal
@pytest.mark.all_locales
def test_smoke_dates(locale):
locale = Locale.parse(locale)
instant = datetime.now()
for width in ("full", "long", "medium", "short"):
assert dates.format_date(instant, format=width, locale=locale)
assert dates.format_datetime(i | nstant, format=width, locale=locale)
assert dates.format_time(instant, format=width, locale=locale)
@pytest.mark.all_locales
def test_smoke_numbers(locale):
locale = Locale.parse(locale)
for number in (
decimal.Decimal("-33.76"), # Negative Decimal
decimal.Decimal("13.37"), # Positive Decimal
1.2 - 1.0, # Inaccurate float
10, # Plain old integer
0, # Zero
):
assert numbers.format_decimal(number, locale=locale)
|
wasit7/book_pae | pae/forcast/src/csv/CS_table_No2_No4.py | Python | mit | 1,865 | 0.017158 | # -*- coding: utf-8 -*-
"""
Created on Wed Sep 09 14:51:02 2015
@author: Methinee
"""
import pandas as pd
import numpy as np
from collections import defaultdict
from astropy.table import Table, Column
df = pd.read_csv('../data/CS_table_No2_No4_new.csv',delimiter=";", skip_blank_lines = True,
error_bad_lines=False)
headers=list(df.columns.values)
subjects = {'courseId':[]}
students = {'studentId':[]}
years = [52,53,54,55,56]
semester = [1,2]
key_sub = defaultdict(list)
key_std = defaultdict(list)
key=[]
countSub = 0
countStd = 0
#Create dictionary of list subjects
for sub in df[headers[4]]:
if sub not in subjects['courseId']:
subjects['courseId'].append(sub)
countSub = countSub+1
for keyCol in subjects['courseId']:
key_sub[countSub] = keyCol
#print subjects["courseId"]
#print "number of subjects are ",countSub
print "-----------------------------------------------"
print | key_sub
print "-----------------------------------------------"
#Create dictionary of list students
for std in df[headers[0]]:
if std not in students['studentId']:
students['studentId'].append(std)
countStd = countStd+1
# for keyRow in students['studentId']:
# for y in years:
# students['studentId'].append(y)
#print students['studentId']
#print "number of students are ",countStd
print "------------------------------------------ | -----"
#create table row are stdId+years+semester, column is key of subjects
column = key_sub
t = Table(column , names=(subjects['courseId']))
firstCol = students
t = Table(firstCol, names=(firstCol))
print t
"""table_No2_No4_out = pd.DataFrame(subjects)
writer = pd.ExcelWriter("table_No2_No4_fomat.xlsx")
table_No2_No4_out.to_excel(writer,"grade")
writer.save()"""
|
mcasl/AMORE | pyAmore/tests/cython_tests/test_network_predict_strategies.py | Python | gpl-3.0 | 2,946 | 0.001358 | import unittest
from pyAmore.cython.interface import *
from pyAmore.cython.network_predict_strategies import NetworkPredictStrategy
from pyAmore.cython.materials import *
from pyAmore.cython.network_predict_strategies import MlpNetworkPredictStrategy
class TestNetworkPredictStrategy(unittest.TestCase):
def test_call(self):
factory = AdaptiveGradientDescentMaterialsFactory()
neural_network = factory.make_primitive_network()
network_predict_strategy = factory.make_network_predict_strategy(neural_network)
self.assertRaises(NotImplementedError, NetworkPredictStrategy.__call__, network_predict_strategy)
def test_activate_neurons(self):
factory = AdaptiveGradientDescentMaterialsFactory()
neural_network = factory.make_primitive_network()
| network_predict_strategy = factory.make_network_predict_strategy(neural_network)
self.assertRaises(NotImplementedError, NetworkPredictStrategy.activate_neurons, network_predict_strategy)
class TestMlpPredictStrategy(unittest.TestCase):
""" Unit tests for MLpPredictStrategy
"""
def test_init(self):
factory = AdaptiveGradientDescentMaterialsF | actory()
neural_network = factory.make_primitive_network()
predict_strategy = MlpNetworkPredictStrategy(neural_network)
self.assertEqual(predict_strategy.neural_network, neural_network)
def test_call_wrong_dimensions(self):
neural_network = mlp_network([3, 2, 1], 'tanh', 'tanh')
predict_strategy = neural_network.predict_strategy
input_data = np.random.rand(4, 4)
self.assertRaises(ValueError, predict_strategy, input_data)
def test_call__(self):
neural_network = mlp_network([3, 2, 1], 'tanh', 'tanh')
predict_strategy = neural_network.predict_strategy
input_data = np.random.rand(4, 3)
result = np.zeros((4, 1))
for row, data in enumerate(input_data):
neural_network.poke_inputs(data)
neural_network.predict_strategy.activate_neurons()
result[row, :] = neural_network.pick_outputs()
self.assertTrue((neural_network(input_data) == result).all)
def test_activate_neurons(self):
factory = AdaptiveGradientDescentMaterialsFactory()
neural_network = mlp_network([3, 2, 1], 'tanh', 'tanh')
input_data = np.random.rand(2, 3)
neural_network.poke_inputs(input_data[1, :])
neural_network.predict_strategy.activate_neurons()
input_layer = neural_network.layers[0]
hidden_layer = neural_network.layers[1]
output_layer = neural_network.layers[2]
neuron_a = hidden_layer[0]
neuron_b = hidden_layer[1]
neuron_c = output_layer[0]
a = neuron_a()
b = neuron_b()
c = neuron_c()
result = c
self.assertEqual(neural_network.pick_outputs(), [result])
if __name__ == '__main__':
unittest.main()
|
PyThaiNLP/pythainlp | pythainlp/tag/orchid.py | Python | apache-2.0 | 3,490 | 0 | # -*- coding: utf-8 -*-
"""
Data preprocessing for ORCHID corpus
"""
from typing import List, Tuple
# defined strings for special characters,
# from Table 4 in ORCHID paper
CHAR_TO_ESCAPE = {
" ": "<space>",
"+": "<plus>",
"-": "<minus>",
"=": "<equal>",
",": "<comma>",
"$": "<dollar>",
".": "<full_stop>",
"(": "<left_parenthesis>",
")": "<right_parenthesis>",
'"': "<quotation>",
"@": "<at_mark>",
"&": "<ampersand>",
"{": "<left_curly_bracket>",
"^": "<circumflex_accent>",
"?": "<question_mark>",
"<": "<less_than>",
">": "<greater_than>",
"!": "<exclamation>",
"’": "<apostrophe>",
":": "<colon>",
"*": "<asterisk>",
";": "<semi_colon>",
"/": "<slash>",
}
ESCAPE_TO_CHAR = dict((v, k) for k, v in CHAR_TO_ESCAPE.items())
# map from ORCHID POS tag to Universal POS tag
# from Korakot Chaovavanich
TO_UD = {
"": "",
# NOUN
"NOUN": "NOUN",
"NCMN": "NOUN",
"NTTL": "NOUN",
"CNIT": "NOUN",
"CLTV": "NOUN",
"CMTR": "NOUN",
"CFQC": "NOUN",
"CVBL": "NOUN",
# VERB
"VACT": "VERB",
"VSTA": "VERB",
# PROPN
"PROPN": "PROPN",
"NPRP": "PROPN",
# ADJ
"ADJ": "ADJ",
"NONM": "ADJ",
"VATT": "ADJ",
"DONM": "ADJ",
# ADV
"ADV": "ADV",
"ADVN": "ADV",
"ADVI": "ADV",
"ADVP": "ADV",
"ADVS": "ADV",
# INT
| "INT": "INTJ",
# PRON
"PRON": "PRON",
"PPRS": "PRON",
"PDMN": "PRON",
"PNTR": "PRON",
# DET
"DET": "DET",
"DDAN": "DET",
"DDAC": "DET",
"DDBQ": "DET",
"DDAQ": "DET",
"DIAC": "DET",
"DIBQ": "DET",
"DIAQ": "DET",
# NUM
"NUM": "NUM",
"NCNM": "NUM",
"NLBL": "NUM",
"DCNM": "NUM",
# AUX
"AUX": "AUX",
"XVBM": "AUX",
"XVAM": "AUX",
"XVMM": "AUX",
"XVBB": "AUX",
"XVA | E": "AUX",
# ADP
"ADP": "ADP",
"RPRE": "ADP",
# CCONJ
"CCONJ": "CCONJ",
"JCRG": "CCONJ",
# SCONJ
"SCONJ": "SCONJ",
"PREL": "SCONJ",
"JSBR": "SCONJ",
"JCMP": "SCONJ",
# PART
"PART": "PART",
"FIXN": "PART",
"FIXV": "PART",
"EAFF": "PART",
"EITT": "PART",
"AITT": "PART",
"NEG": "PART",
# PUNCT
"PUNCT": "PUNCT",
"PUNC": "PUNCT",
}
def ud_exception(w: str, tag: str) -> str:
if w == "การ" or w == "ความ":
return "NOUN"
return tag
def pre_process(words: List[str]) -> List[str]:
"""
Convert signs and symbols with their defined strings.
This function is to be used as a preprocessing step,
before the actual POS tagging.
"""
keys = CHAR_TO_ESCAPE.keys()
words = [CHAR_TO_ESCAPE[word] if word in keys else word for word in words]
return words
def post_process(
word_tags: List[Tuple[str, str]], to_ud: bool = False
) -> List[Tuple[str, str]]:
"""
Convert defined strings back to corresponding signs and symbols.
This function is to be used as a post-processing step,
after the actual POS tagging.
"""
keys = ESCAPE_TO_CHAR.keys()
if not to_ud:
word_tags = [
(ESCAPE_TO_CHAR[word], tag) if word in keys else (word, tag)
for word, tag in word_tags
]
else:
word_tags = [
(ESCAPE_TO_CHAR[word], ud_exception(word, TO_UD[tag]))
if word in keys
else (word, ud_exception(word, TO_UD[tag]))
for word, tag in word_tags
]
return word_tags
|
elmiko/data-goblin | datagoblin/datagoblin/wsgi.py | Python | mit | 397 | 0 | """
WSGI | config for datagoblin project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https: | //docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "datagoblin.settings")
application = get_wsgi_application()
|
ezralanglois/arachnid | arachnid/core/parallel/setup.py | Python | gpl-2.0 | 412 | 0.007282 | ''' Setup for core modules
'''
def configuration(parent | _package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('parallel', parent_package, top_path)
config.set_options(quiet=True)
config.add_subpackage('core')
return config
if __name__ == '__main__':
fro | m numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
Jwely/pivpr | py/controler/__init__.py | Python | mit | 203 | 0 | __author_ | _ = 'Jwely'
from build_tex_figs_by_run import *
from build_tex_tables import *
from synthesize_piv_uncertainty_images import *
from test_piv_dynam | ic_plots import *
from test_piv_plots import *
|
jacksonwilliams/arsenalsuite | python/scripts/conference_logger.py | Python | gpl-2.0 | 1,265 | 0.058498 | #!/usr/bin/python
# -*- coding: koi8-r -*-
import blur.Stone
import PyQt4.QtCore
from xmpp import *
import time,os,sys
app = PyQt4.QtCore.QCoreApplication(sys.argv)
blur.Stone.initConfig( 'conference_logger.ini', 'it@conference.blur.com.log' )
blur.RedirectOutputToLog()
#BOT=(botjid,password)
BOT=('thepipe@jabber.blur.com','thePIpe')
#CONF=(confjid,password)
CONF=('it@conference.jabber.blur.com','')
def LOG(stanza,nick,text):
print nick, text
def messageCB(sess,mess):
fro = mess.getFrom()
print fro
nick=fro.getResource()
text=mess.getBody()
LOG(mess,nick,text)
roster=[]
def presenceCB(sess,pr | es):
nick=pres.getFrom().getResource()
text=''
if pres.getType()=='unavailable':
if nick in roster:
text=nick+' offline | '
roster.remove(nick)
else:
if nick not in roster:
text=nick+' online'
roster.append(nick)
if text: LOG(pres,nick,text)
if 1:
cl=Client(JID(BOT[0]).getDomain(),debug=[])
cl.connect()
cl.RegisterHandler('message',messageCB)
cl.RegisterHandler('presence',presenceCB)
cl.auth(JID(BOT[0]).getNode(),BOT[1])
p=Presence(to='%s/logger'%CONF[0])
p.setTag('x',namespace=NS_MUC).setTagData('password',CONF[1])
p.getTag('x').addChild('history',{'maxchars':'0','maxstanzas':'0'})
cl.send(p)
while 1:
cl.Process(1)
|
fred49/linshare-cli | linsharecli/user/shared_space_audit.py | Python | gpl-3.0 | 7,635 | 0.001048 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""TODO"""
# This file is part of Linshare cli.
#
# LinShare cli is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# LinShare cli is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with LinShare cli. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2019 Frédéric MARTIN
#
# Contributors list :
#
# Frédéric MARTIN frederic.martin.fma@gmail.com
#
import copy
import json
from argparse import RawTextHelpFormatter
from linshareapi.cache import Time
from vhatable.cell import CellBuilder
from vhatable.cell import ComplexCell
from vhatable.cell import ComplexCellBuilder
from vhatable.filters import PartialOr
from linsharecli.user.core import DefaultCommand as Command
from linsharecli.common.core import add_list_parser_options
from linsharecli.common.cell import ActorCell
from linsharecli.common.cell import AuthUserCell
from linsharecli.common.tables import TableBuilder
class DefaultCommand(Command):
"""TODO"""
IDENTIFIER = "name"
MSG_RS_UPDATED = "The shared space member '%(account)s' (%(uuid)s) was successfully updated."
MSG_RS_CREATED = "The shared space member '%(account)s' (%(uuid)s) was successfully created."
CFG_DELETE_MODE = 1
CFG_DELETE_ARG_ATTR = "ss_uuid"
def complete(self, args, prefix):
super(DefaultCommand, self).__call__(args)
json_obj = self.ls.shared_spaces.list()
return (v.get(self.RESOURCE_IDENTIFIER)
for v in json_obj if v.get(self.RESOURCE_IDENTIFIER).startswith(prefix))
def complete_shared_spaces(self, args, prefix):
"""TODO"""
super(DefaultCommand, self).__call__(args)
json_obj = self.ls.shared_spaces.list()
return (v.get(self.RESOURCE_IDENTIFIER)
for v in json_obj if v.get(self.RESOURCE_IDENTIFIER).startswith(prefix))
class SharedSpaceCompleter(object):
"""TODO"""
# pylint: disable=too-few-public-methods
def __init__(self, config):
self.config = config
def __call__(self, prefix, **kwargs):
from argcomplete import debug
try:
debug("\n------------ SharedSpaceCompleter -----------------")
debug("Kwargs content :")
for i, j in list(kwargs.items()):
debug("key : " + str(i))
debug("\t - " + str(j))
debug("\n------------ SharedSpaceCompleter -----------------\n")
args = kwargs.get('parsed_args')
cmd = DefaultCommand(self.config)
return cmd.complete_shared_spaces(args, prefix)
# pylint: disable=broad-except
except Exception as ex:
debug("\nERROR:An exception was caught :" + str(ex) + "\n")
import traceback
traceback.print_exc()
debug("\n------\n")
return ["comlete-error"]
class ResourceCell(ComplexCell):
"""TODO"""
_format_filter = '{uuid}'
def __unicode__(self):
if self.raw:
return str(self.value)
if self.value is None:
return self.none
action = self.row['action']
resource_type = self.row['type']
fmt = 'Missing format. {raw}'
data = {}
data['action'] = action
data['raw'] = "?"
if self.extended:
fmt = 'Missing format.\n{raw}'
data['raw'] = json.dumps(
copy.deepcopy(self.value),
sort_keys=True, indent=2
)
if resource_type == "WORKGROUP":
if action == "CREATE":
fmt = 'New workGroup : {name} ({uuid:.8})'
data.update(self.value)
elif resource_type == "WORKGROUP_MEMBER":
if action == "CREATE":
fmt = 'New member : {name} ({uuid:.8})'
if self.vertical:
fmt = 'New member : {name} ({uuid})'
data.update(self.value['user'])
elif resource_type == "WORKGROUP_FOLDER":
if action == "CREATE":
fmt = 'New folder : {name} ({uuid:.8})'
if self.vertical:
fmt = 'New folder : {name} ({uuid})'
data.update(self.value)
elif resource_type == "WORKGROUP_DOCUMENT":
if action == "CREATE":
fmt = 'New document : {name} ({uuid:.8})'
if self.vertical:
fmt = 'New document : {name} ({uuid})'
data.update(self.value)
elif resource_type == "WORKGROUP_DOCUMENT_REVISION":
if action == "CREATE":
fmt = 'New version : {name} ({uuid:.8})'
if self.vertical:
fmt = 'New version : {name} ({uuid})'
data.update(self.value)
return fmt.format(**data)
class ListCommand(Command):
""" List all Jwt token."""
IDENTIFIER = "creationDate" |
RESOURCE_IDENTIFIER = "uuid"
@Time('linsharecli.shared_spaces.audit', label='Global time : %(time)s')
def __call__(self, args):
super(ListCommand, self).__call__(args)
en | dpoint = self.ls.shared_spaces.audit
tbu = TableBuilder(self.ls, endpoint, self.DEFAULT_SORT)
tbu.load_args(args)
tbu.add_filters(
PartialOr(self.IDENTIFIER, args.identifiers, True),
PartialOr(self.RESOURCE_IDENTIFIER, args.uuids, True, match_raw=True),
PartialOr("resource", [args.resource], True, match_raw=False),
)
tbu.add_custom_cell("actor", ActorCell)
tbu.add_custom_cell("authUser", AuthUserCell)
tbu.add_custom_cell("uuid", CellBuilder('{value:.8}', '{value}'))
tbu.add_custom_cell("resource", ResourceCell)
tbu.add_custom_cell(
"workGroup",
ComplexCellBuilder(
'{name}\n({uuid:.8})',
'{name} ({uuid:})',
'{name}',
)
)
table = tbu.build().load_v2(endpoint.list(args.ss_uuid))
table.align['resource'] = "l"
return table.render()
def complete_fields(self, args, prefix):
"""TODO"""
# pylint: disable=unused-argument
super(ListCommand, self).__call__(args)
cli = self.ls.shared_spaces.audit
return cli.get_rbu().get_keys(True)
def add_parser(subparsers, name, desc, config):
"""TODO"""
parser_tmp = subparsers.add_parser(name, help=desc)
parser_tmp.add_argument(
'ss_uuid',
help="shared_space uuid"
).completer = SharedSpaceCompleter(config)
subparsers2 = parser_tmp.add_subparsers()
# command : list
parser = subparsers2.add_parser(
'list',
formatter_class=RawTextHelpFormatter,
help="list shared space audit traces")
parser.add_argument('identifiers', nargs="*", help="filter by fragments of date")
parser.add_argument('-u', '--uuid', dest="uuids", action="append",
help="Filter by uuid fragments.")
parser.add_argument('-e', '--resource', action="store",
help="Filter by resource uuid")
add_list_parser_options(parser, cdate=True)
parser.set_defaults(__func__=ListCommand(config))
|
GaretJax/storm-erp | cmd.py | Python | mit | 125 | 0.008 | #!/usr/bin/python3
| import sys
import os
sys.path.insert(0, os.getcwd())
from storm.cli import main
mai | n(prog_name='storm')
|
chromium/chromium | content/test/gpu/gpu_tests/maps_integration_test.py | Python | bsd-3-clause | 7,116 | 0.007729 | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
im | port os
import sys
from gpu_tests import common_browser_args as cba
from gpu_tests import color_profile_manager
from gpu_tests import gpu_integration_test
from gpu_tests import pixel_test_pages
from gpu_tests import expected_col | or_test
import gpu_path_util
from py_utils import cloud_storage
from telemetry.util import image_util
_MAPS_PERF_TEST_PATH = os.path.join(gpu_path_util.TOOLS_PERF_DIR, 'page_sets',
'maps_perf_test')
_DATA_PATH = os.path.join(gpu_path_util.GPU_DIR, 'gpu_tests')
_TEST_NAME = 'Maps_maps'
class MapsIntegrationTest(expected_color_test.ExpectedColorTest):
"""Google Maps pixel tests.
This is an expected color test instead of a regular pixel test because the
captured image is incredibly noisy.
Note: this test uses the same WPR as the smoothness.maps benchmark
in tools/perf/benchmarks. See src/tools/perf/page_sets/maps.py for
documentation on updating the WPR archive.
"""
@classmethod
def Name(cls):
return 'maps'
@classmethod
def SetUpProcess(cls):
options = cls.GetParsedCommandLineOptions()
color_profile_manager.ForceUntilExitSRGB(
options.dont_restore_color_profile_after_test)
super(MapsIntegrationTest, cls).SetUpProcess()
cls.CustomizeBrowserArgs([
cba.ENSURE_FORCED_COLOR_PROFILE,
cba.FORCE_BROWSER_CRASH_ON_GPU_CRASH,
cba.FORCE_COLOR_PROFILE_SRGB,
])
cloud_storage.GetIfChanged(
os.path.join(_MAPS_PERF_TEST_PATH, 'load_dataset'),
cloud_storage.PUBLIC_BUCKET)
cls.SetStaticServerDirs([_MAPS_PERF_TEST_PATH])
cls.StartBrowser()
@classmethod
def TearDownProcess(cls):
super(cls, MapsIntegrationTest).TearDownProcess()
cls.StopWPRServer()
@classmethod
def GenerateGpuTests(cls, options):
cls.SetParsedCommandLineOptions(options)
# The maps_pixel_expectations.json contain the actual image expectations. If
# the test fails, with errors greater than the tolerance for the run, then
# the logs will report the actual failure.
#
# There will also be a Skia Gold Triage link, this will be used to store the
# artifact of the failure to help with debugging. There are no accepted
# positive baselines recorded in Skia Gold, so its diff will not be
# sufficient to debugging the failure.
yield ('Maps_maps', 'file://performance.html', ())
def RunActualGpuTest(self, test_path, *args):
tab = self.tab
action_runner = tab.action_runner
action_runner.Navigate(test_path)
action_runner.WaitForJavaScriptCondition('window.startTest != undefined')
action_runner.EvaluateJavaScript('window.startTest()')
action_runner.WaitForJavaScriptCondition('window.testDone', timeout=320)
# Wait for the page to process immediate work and load tiles.
action_runner.EvaluateJavaScript("""
window.testCompleted = false;
requestIdleCallback(
() => window.testCompleted = true,
{ timeout : 10000 })""")
action_runner.WaitForJavaScriptCondition('window.testCompleted', timeout=30)
expected = _ReadPixelExpectations('maps_pixel_expectations.json')
page = _GetMapsPageForUrl(test_path, expected)
# Special case some tests on Fuchsia that need to grab the entire contents
# in the screenshot instead of just the visible portion due to small screen
# sizes.
if (MapsIntegrationTest.browser.platform.GetOSName() == 'fuchsia'
and page.name in pixel_test_pages.PROBLEMATIC_FUCHSIA_TESTS):
screenshot = tab.FullScreenshot(5)
else:
screenshot = tab.Screenshot(5)
if screenshot is None:
self.fail('Could not capture screenshot')
dpr = tab.EvaluateJavaScript('window.devicePixelRatio')
print("Maps' devicePixelRatio is %s" % dpr)
# The bottom corners of Mac screenshots have black triangles due to the
# rounded corners of Mac windows. So, crop the bottom few rows off now to
# get rid of those. The triangles appear to be 5 pixels wide and tall
# regardless of DPI, so 10 pixels should be sufficient. However, when
# running under Python 3, 10 isn't quite enough for some reason, so use
# 20 instead.
if self.browser.platform.GetOSName() == 'mac':
img_height = image_util.Height(screenshot)
img_width = image_util.Width(screenshot)
screenshot = image_util.Crop(screenshot, 0, 0, img_width, img_height - 20)
x1, y1, x2, y2 = _GetCropBoundaries(screenshot)
screenshot = image_util.Crop(screenshot, x1, y1, x2 - x1, y2 - y1)
self._ValidateScreenshotSamplesWithSkiaGold(tab, page, screenshot, dpr)
@classmethod
def ExpectationsFiles(cls):
return [
os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'test_expectations',
'maps_expectations.txt')
]
def _ReadPixelExpectations(expectations_file):
expectations_path = os.path.join(_DATA_PATH, expectations_file)
with open(expectations_path, 'r') as f:
json_contents = json.load(f)
return json_contents
def _GetMapsPageForUrl(url, expected_colors):
page = expected_color_test.ExpectedColorPixelTestPage(
url=url,
name=_TEST_NAME,
# Exact test_rect is arbitrary, just needs to encapsulate all pixels
# that are tested.
test_rect=[0, 0, 1000, 800],
tolerance=10,
expected_colors=expected_colors)
return page
def _GetCropBoundaries(screenshot):
"""Returns the boundaries to crop the screenshot to.
Specifically, we look for the boundaries where the white background
transitions into the (non-white) content we care about.
Args:
screenshot: A screenshot returned by Tab.Screenshot() (numpy ndarray?)
Returns:
A 4-tuple (x1, y1, x2, y2) denoting the top left and bottom right
coordinates to crop to.
"""
img_height = image_util.Height(screenshot)
img_width = image_util.Width(screenshot)
def RowIsWhite(row):
for col in range(img_width):
pixel = image_util.GetPixelColor(screenshot, col, row)
if pixel.r != 255 or pixel.g != 255 or pixel.b != 255:
return False
return True
def ColumnIsWhite(column):
for row in range(img_height):
pixel = image_util.GetPixelColor(screenshot, column, row)
if pixel.r != 255 or pixel.g != 255 or pixel.b != 255:
return False
return True
x1 = y1 = 0
x2 = img_width
y2 = img_height
for column in range(img_width):
if not ColumnIsWhite(column):
x1 = column
break
for row in range(img_height):
if not RowIsWhite(row):
y1 = row
break
for column in range(x1 + 1, img_width):
if ColumnIsWhite(column):
x2 = column
break
for row in range(y1 + 1, img_height):
if RowIsWhite(row):
y2 = row
break
return x1, y1, x2, y2
def load_tests(loader, tests, pattern):
del loader, tests, pattern # Unused.
return gpu_integration_test.LoadAllTestsInModule(sys.modules[__name__])
|
NorThanapon/dict-definition | definition/readers/wordnet.py | Python | gpl-3.0 | 10,980 | 0 | from neobunch import Bunch
import os
from nltk.corpus import wordnet as wn
import re
from itertools import izip
from definition.words.word_sampler import lemmatize_all
class NLTKWordNetParser(object):
def __init__(self, opt):
self.opt = opt
def to_list(self, entry,
order_keys=['word', 'pos', 'sense_id',
'wn_id', 'proper_noun',
'lemma_freq', 'definition']):
output = [u'{}'.format(entry[k]) for k in order_keys]
return output
def parse_synset_name(self, name):
parts = name.split('.')
return Bunch(pos=parts[-2],
sense_id=int(parts[-1]), wn_id=name)
def get_entry(self, word, sense):
synset = self.parse_synset_name(sense.name())
synset.word = word
synset.definition = sense.definition()
synset.proper_noun = self.is_proper_noun(sense)
freq = 0
for lemma in sense.lemmas():
freq += lemma.count()
synset.lemma_freq = freq
return synset
def is_proper_noun(self, sense):
cap = 0
for lemma in sense.lemmas():
if lemma.name() != lemma.name().lower():
cap += 1
return cap == len(sense.lemmas())
def get_entries(self, word):
entries = []
query = word
senses = wn.synsets(query)
for sense in senses:
entries.append(self.get_entry(word, sense))
entries.sort(key=lambda x: x.lemma_freq, reverse=True)
return entries
def one_sense_per_pos(self, entries, pref=['v', 'a', 's', 'n', 'r']):
new_entries = []
for p in pref:
for entry in entries:
if entry.pos == p:
new_entries.append(entry)
break
return new_entries
def select_top_entry(self, entries):
if len(entries) < 2:
return entries
if entries[0].lemma_freq > entries[1].lemma_freq:
return [entries[0]]
| top_freq = entries[0].lemma_freq
entries = filter(lambda e: e.lemma_freq == top_freq, entries)
entries = self.one_sense_per_pos(entries)
return [entries[0]]
def remove_self_ref(self, word, entries):
new_entries = []
p = re.compile(r' ' + word + r'[ ,:;"\']')
for entry in entries:
if p.search(entry.definition) is | None:
new_entries.append(entry)
return new_entries
def preprocess(self, ifp, ofp):
for line in ifp:
word = line.strip()
entries = self.get_entries(word)
entries = self.remove_self_ref(word, entries)
if self.opt.only_first_sense and len(entries) > 1:
entries = self.select_top_entry(entries)
for entry in entries:
ofp.write(u'{}\n'.format(u'\t'.join(self.to_list(entry))))
class DBWordNetParser(object):
def __init__(self, opt):
self.opt = opt
self.dir = opt.wndb_dir
self.lexnames = DBWordNetParser.read_lexname(self.dir)
self.idx = DBWordNetParser.read_index_files(self.dir)
self.data = DBWordNetParser._read_data_files(self.dir, self.lexnames)
self.sense_numbers = DBWordNetParser.read_sense_file(self.dir)
_POS_FILE_MAP_ = {'n': 'noun', 'v': 'verb', 'a': 'adj', 'r': 'adv'}
_SYNSET_TYPE_MAP_ = [None, 'n', 'v', 'a', 'r', 's']
_PROPER_NOUN_REGEX_ = re.compile(r'^(([A-Z]|_(for|the|of|to)_)[^_]+_?)+$')
def to_list(self, word, entry, inflected=False, maybe_proper_noun=False):
source = 'lemma'
# proper_noun = 'regular_word'
if inflected:
source = 'inflection'
# if maybe_proper_noun:
# proper_noun = 'could_be_proper_nound'
if not inflected:
for eword in entry.words:
if eword.lower() == word:
word = eword
break
else:
if entry.lemma == word[:-1] and word[-1] == 's':
for eword in entry.words:
if eword.lower() == entry.lemma:
word = eword + 's'
break
output = [word, entry.lemma, entry.synset_type,
str(entry.sense_number), entry.pos, source,
','.join(entry.words), entry.gloss]
return output
def get_idx_entries(self, word, try_lemma=True):
if word in self.idx:
return self.idx[word], False
if try_lemma:
out_entries = []
tagged_lemmas = lemmatize_all(word)
for lemma, pos in tagged_lemmas:
for e in self.idx.get(lemma, []):
if e.pos == pos and lemma != word:
out_entries.append(e)
return out_entries, True
def get_entries(self, idx_entries):
out_entries = []
for idx_entry in idx_entries:
for synset_offset in idx_entry.synset_offsets:
data_entry = self.data[synset_offset]
sense_key = '{}-{}-{}'.format(
idx_entry.lemma, data_entry.synset_type, data_entry.offset)
entry = Bunch(lemma=idx_entry.lemma,
pos=idx_entry.pos,
synset_type=data_entry.synset_type,
sense_number=self.sense_numbers[sense_key],
gloss=data_entry.gloss,
words=[e.word for e in data_entry.words])
out_entries.append(entry)
return out_entries
def preprocess(self, ifp, ofp):
for line in ifp:
word = line.strip()
idx_entries, inflected = self.get_idx_entries(word)
entries = self.get_entries(idx_entries)
maybe_proper_noun = False
for entry in entries:
for other_word in entry.words:
if DBWordNetParser.is_proper_noun(other_word):
maybe_proper_noun = True
break
for entry in entries:
if inflected and DBWordNetParser.is_entry_proper_noun(entry):
continue
ofp.write(u'{}\n'.format(
u'\t'.join(self.to_list(
word, entry, inflected, maybe_proper_noun))))
@staticmethod
def is_entry_proper_noun(entry):
for word in entry.words:
if not DBWordNetParser.is_proper_noun(word):
return False
return True
@staticmethod
def is_proper_noun(word):
m = DBWordNetParser._PROPER_NOUN_REGEX_.match(word)
return m is not None
@staticmethod
def read_lexname(wndb_path):
lexnames = {}
lexname_path = os.path.join(wndb_path, 'lexnames')
with open(lexname_path) as ifp:
for line in ifp:
if line.startswith(' '):
continue
part = line.strip().split('\t')
lexnames[part[0]] = part[1]
return lexnames
@staticmethod
def read_sense_file(wndb_path):
sense_numbers = {}
idx_sense_path = os.path.join(wndb_path, 'index.sense')
with open(idx_sense_path) as ifp:
for line in ifp:
if line.startswith(' '):
continue
part = line.strip().split(' ')
lemma, key = part[0].split('%')
synset_type = DBWordNetParser._SYNSET_TYPE_MAP_[int(key[0])]
offset = part[1]
number = int(part[2])
sense_key = '{}-{}-{}'.format(lemma, synset_type, offset)
sense_numbers[sense_key] = number
return sense_numbers
@staticmethod
def read_index_files(wndb_path, pos_files=['noun', 'verb', 'adj', 'adv']):
entries = {}
for pos_file in pos_files:
idx_path = os.path.join(wndb_path, 'index.' + pos_file)
with open(idx_path) as ifp:
for line in ifp:
if line.startswith(' '):
|
mlampros/textTinyPy | setup.py | Python | gpl-3.0 | 3,293 | 0.01822 |
from setuptools import setup, Extension
from Cython.Build import cythonize
#from Cython.Compiler.Options import directive_defaults
#------------------------------------------------------------------------
# http://stackoverflow.com/questions/8106258/cc1plus-warning-command-line-option-wstrict-prototypes-is-valid-for-ada-c-o
import os
from distutils.sysconfig import get_config_vars
(opt,) = get_config_vars('OPT')
os.environ['OPT'] = " ".join(
flag for flag in opt.split() if flag != '-Wstrict-prototypes'
)
#------------------------------------------------------------------------
# open readme-file
def readme():
with open('README.rst') as f:
return f.read()
#------------------------------------------------------------------------
# specify compiling-linking arguments
tmp_comp_args = ["-std=c++11", "-lboost_locale", "-lboost_system", "-fopenmp"]
tmp_link_args = ["-std=c++11", "-lboost_locale", "-lboost_system", "-fopenmp"]
#------------------------------------------------------------------------
ext = Extension("*",
sources=["textTinyPy/textTinyPy.pyx", "textTinyPy/cpp_src/batch_tokenization.cpp", "textTinyPy/cpp_src/ngram_stemmer.cpp",
"textTinyPy/cpp_src/porter2_stemmer.cpp", "textTin | yPy/cpp_src/term_matrix.cpp", "textTinyPy/cpp_src/token_big_files.cpp",
"textTinyPy/cpp_src/tokenization.cpp", "textTinyPy/cpp_src/token_stats.cpp"] | ,
extra_compile_args = tmp_comp_args,
extra_link_args = tmp_link_args,
libraries = ['armadillo'],
language="c++",
include_dirs = ['.'])
setup(name="textTinyPy",
version = '0.0.4',
author = 'Lampros Mouselimis',
author_email = 'mouselimislampros@gmail.com',
url='https://github.com/mlampros/textTinyPy',
description = 'text processing functions for small or big data files',
long_description=readme(),
license = 'GNU General Public License (GPL) + COPYRIGHTS.txt',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: End Users/Desktop',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
ext_modules=cythonize(ext),
packages=['textTinyPy'], # add the next three lines for the package data [ https://docs.python.org/3/distutils/setupscript.html#installing-package-data ]
package_dir={'textTinyPy': 'textTinyPy', 'test': 'test'},
package_data={'textTinyPy': ['stopwords/*.txt', 'locale/locale_stopword_encoding.csv'], 'test' : ['tests_load_folder/*.txt', 'tests_load_folder/*.xml',
'tests_save_folder/*.txt', 'parse_loader/*.txt', 'VOCAB/*.txt', 'VOCAB_token_stats/*.txt', 'tests_load_folder/term_matrix_file.csv']},
setup_requires=['pytest-runner', "Cython >= 0.23.5"],
tests_require=['pytest'],
install_requires=[ "Cython >= 0.23.5", "pandas >= 0.21.0", "scipy >= 0.13.0", "numpy >= 1.11.2", "future >= 0.15.2" ],)
|
yelizariev/addons-yelizariev | ir_attachment_url/tests/test_product_tmpl_image.py | Python | lgpl-3.0 | 3,397 | 0.002355 | # Copyright 2019 Rafis Bikbov <https://it-projects.info/team/RafiZz>
# Copyright 2019 Alexandr Kolushov <https://it-projects.info/team/KolushovAlexandr>
# Copyright 2019 Eugene Molotov <https://it-projects.info/team/em230418>
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
import logging
from odoo import api, conf
from odoo.tests.common import HttpCase, tagged
_logger = logging.getLogger(__name__)
@tagged("post_install", "-at_install")
class TestProductTmplImage(HttpCase):
def _get_original_image_url(self, px=1024):
return "https://upload.wikimedia.org/wikipedia/commons/thumb/1/1e/Gullfoss%2C_an_iconic_waterfall_of_Iceland.jpg/{}px-Gullfoss%2C_an_iconic_waterfall_of_Iceland.jpg".format(
px
)
def _get_odoo_image_url(self, model, record_id, field):
return "/web/image?model={}&id={}&field={}".format(model, record_id, field)
def test_getting_product_variant_image_fields_urls(self):
assert (
"ir_attachment_url" in conf.server_wide_modules
), "ir_attachment_url is not in server_wide_modules. Please add it via --load parameter"
env = api.Environment(self.registry.test_cr, self.uid, {})
env["ir.config_parameter"].set_param("ir_attachment_url.storage", "url")
product_tmpl = env["product.template"].create(
{
"name": "Test template",
"image": self._get_original_image_url(1024),
"image_medium": self._get_original_image_url(128),
"image_small": self._get_original_image_url(64),
}
)
product_product = env["product.product"].create(
{
| "name": "Test product",
"image": False,
"image_medium": | False,
"image_small": False,
"product_tmpl_id": product_tmpl.id,
}
)
odoo_image_url = self._get_odoo_image_url(
"product.product", product_product.id, "image"
)
odoo_image_medium_url = self._get_odoo_image_url(
"product.product", product_product.id, "image_medium"
)
odoo_image_small_url = self._get_odoo_image_url(
"product.product", product_product.id, "image_small"
)
product_tmpl_image_attachment = env["ir.http"].find_field_attachment(
env, "product.template", "image", product_tmpl
)
product_tmpl_image_medium_attachment = env["ir.http"].find_field_attachment(
env, "product.template", "image_medium", product_tmpl
)
product_tmpl_image_small_attachment = env["ir.http"].find_field_attachment(
env, "product.template", "image_small", product_tmpl
)
self.assertTrue(product_tmpl_image_attachment)
self.assertTrue(product_tmpl_image_medium_attachment)
self.assertTrue(product_tmpl_image_small_attachment)
self.authenticate("demo", "demo")
self.assertEqual(
self.url_open(odoo_image_url).url, product_tmpl_image_attachment.url
)
self.assertEqual(
self.url_open(odoo_image_medium_url).url,
product_tmpl_image_medium_attachment.url,
)
self.assertEqual(
self.url_open(odoo_image_small_url).url,
product_tmpl_image_small_attachment.url,
)
|
mycodeday/crm-platform | website_forum/tests/common.py | Python | gpl-3.0 | 3,442 | 0.000291 | # -*- coding: utf-8 -*-
from openerp.tests import common
KARMA = {
'ask': 5, 'ans': 10,
'com_own': 5, 'com_all': 10,
'com_conv_all': 50,
'upv': 5, 'dwv': 10,
'edit_own': 10, 'edit_all': 20,
'close_own': 10, 'close_all': 20,
'unlink_own': 10, 'unlink_all': 20,
'gen_que_new': 1, 'gen_que_upv': 5, 'gen_que_dwv': -1 | 0,
'gen_ans_upv': 10, 'gen_ans_dwv': -20,
}
class TestForumCommon(common.TransactionCase):
| def setUp(self):
super(TestForumCommon, self).setUp()
Forum = self.env['forum.forum']
Post = self.env['forum.post']
# Test users
TestUsersEnv = self.env['res.users'].with_context({'no_reset_password': True})
group_employee_id = self.ref('base.group_user')
group_portal_id = self.ref('base.group_portal')
group_public_id = self.ref('base.group_public')
self.user_employee = TestUsersEnv.create({
'name': 'Armande Employee',
'login': 'Armande',
'alias_name': 'armande',
'email': 'armande.employee@example.com',
'karma': 0,
'groups_id': [(6, 0, [group_employee_id])]
})
self.user_portal = TestUsersEnv.create({
'name': 'Beatrice Portal',
'login': 'Beatrice',
'alias_name': 'beatrice',
'email': 'beatrice.employee@example.com',
'karma': 0,
'groups_id': [(6, 0, [group_portal_id])]
})
self.user_public = TestUsersEnv.create({
'name': 'Cedric Public',
'login': 'Cedric',
'alias_name': 'cedric',
'email': 'cedric.employee@example.com',
'karma': 0,
'groups_id': [(6, 0, [group_public_id])]
})
# Test forum
self.forum = Forum.create({
'name': 'TestForum',
'karma_ask': KARMA['ask'],
'karma_answer': KARMA['ans'],
'karma_comment_own': KARMA['com_own'],
'karma_comment_all': KARMA['com_all'],
'karma_answer_accept_own': 9999,
'karma_answer_accept_all': 9999,
'karma_upvote': KARMA['upv'],
'karma_downvote': KARMA['dwv'],
'karma_edit_own': KARMA['edit_own'],
'karma_edit_all': KARMA['edit_all'],
'karma_close_own': KARMA['close_own'],
'karma_close_all': KARMA['close_all'],
'karma_unlink_own': KARMA['unlink_own'],
'karma_unlink_all': KARMA['unlink_all'],
'karma_comment_convert_all': KARMA['com_conv_all'],
'karma_gen_question_new': KARMA['gen_que_new'],
'karma_gen_question_upvote': KARMA['gen_que_upv'],
'karma_gen_question_downvote': KARMA['gen_que_dwv'],
'karma_gen_answer_upvote': KARMA['gen_ans_upv'],
'karma_gen_answer_downvote': KARMA['gen_ans_dwv'],
'karma_gen_answer_accept': 9999,
'karma_gen_answer_accepted': 9999,
})
self.post = Post.create({
'name': 'TestQuestion',
'content': 'I am not a bird.',
'forum_id': self.forum.id,
'tag_ids': [(0, 0, {'name': 'Tag0', 'forum_id': self.forum.id})]
})
self.answer = Post.create({
'name': 'TestAnswer',
'content': 'I am an anteater.',
'forum_id': self.forum.id,
'parent_id': self.post.id,
})
|
OCA/vertical-isp | connector_equipment_service/models/agreement_serviceprofile.py | Python | agpl-3.0 | 7,516 | 0 | # Copyright (C) 2019 Open Source Integrators
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, api, models
class AgreementServiceProfile(models.Model):
_inherit = ['agreement.serviceprofile']
@api.model
def create(self, vals):
# If SP is created with a managed equipment
if vals.get('equipment_id', False):
new = self.env['maintenance.equipment'].\
browse(vals.get('equipment_id'))
new._connect('add_service',
serviceprofiles=self)
return super().create(vals)
@api.multi
def write(self, vals):
for sp in self:
equip_id = sp.get_equip(vals)
# Add Service
# If equipment was empty and now set to managed or stage in draft
if equip_id and (not sp.equipment_id or sp.
get_next_stage(vals) == 'draft'):
equip_id._connect('add_service', serviceprofiles=sp)
sp.message_post(body=_('Added Service'))
# Update Service
# If SP is changed but not the managed equipment
# Don't call update if stage_id is all that is changed
if (equip_id and (len(vals) > 1 or 'stage_id' not in vals)):
# If equipment was changed, handle old equipment accordingly
if vals.get('equipment_id', False):
sp.equip_changed(vals)
sp.equipment_id._connect('update_service',
serviceprofiles=sp)
sp.message_post(body=_('Updated Service'))
# Activate Service (Provision?)
# If SP state -> In Progress and equipment is managed
if sp.get_next_stage(vals) == 'in_progress' and equip_id:
equip_id._connect('activate_service',
serviceprofiles=sp)
sp.message_post(body=_('Activated Service'))
# Suspend Service
# If SP state -> Suspend and equipment is managed
if sp.get_next_stage(vals) == 'suspend' and equip_id:
equip_id._connect('suspend_service',
serviceprofiles=sp)
sp.message_post(body=_('Suspended Service'))
# Suspend/Remove Service
# If SP state -> Closed or Cancelled and equipment is managed
if sp.get_next_stage(vals) in ['close', 'cancel'] and equip_id:
equip_id._connect('suspend_service',
serviceprofiles=sp)
equip_id._connect('remove_service',
serviceprofiles=sp)
sp.message_post(body=_('Suspended Service'))
sp.message_post(body=_('Removed Service'))
return super().write(vals)
# This method handles the old equipment if it is changed
def equip_changed(self, vals):
# Was the old Equipment Managed?
if self.equipment_id.managed:
# Is the SP In Progress (or going to be)
if self.get_stage(vals) in ['in_progress', 'to_renew']:
# Suspend
self.equipment_id._connect('suspend_service',
serviceprofiles=self)
self.message_post(body=_('Previous Service Suspended'))
# SP is not In Progress (or going to be)
else:
# Remove
self.equipment_id._connect('remove_service',
serviceprofiles=self)
self.message_post(body=_('Previous Service Removed'))
# This method returns the final equipment on the form
# If there is a managed equipment in vals, use it
# If there is not, check self for managed equipment
# If neither, return False
def get_equip(self, vals):
equip = vals.get('equipment_id', False)
if equip:
equip = self.env['maintenance.equipment'].\
browse(vals.get('equipment_id | '))
if equip.managed:
return equip
else:
if self.equipment_id.managed:
return self.equipment_id
return False
# This method returns the appriopriate stage_id
# If there is a stage in vals, use it
# If there is no stage in vals, use the current stage
def get_stage(self, vals):
x = ''
if ((vals.get('stage | _id', False) == self.env.
ref('agreement_serviceprofile.servpro_stage_draft').id) or
(not vals.get('stage_id', False) and
self.stage_id.id == self.env.
ref('agreement_serviceprofile.servpro_stage_draft').id)):
x = 'draft'
if ((vals.get('stage_id', False) == self.env.
ref('agreement_serviceprofile.servpro_stage_progress').id) or
(not vals.get('stage_id', False) and
self.stage_id.id == self.env.
ref('agreement_serviceprofile.servpro_stage_progress').id)):
x = 'in_progress'
if ((vals.get('stage_id', False) == self.env.
ref('agreement_serviceprofile.servpro_stage_suspend').id) or
(not vals.get('stage_id', False) and
self.stage_id.id == self.env.
ref('agreement_serviceprofile.servpro_stage_suspend').id)):
x = 'suspend'
if ((vals.get('stage_id', False) == self.env.
ref('agreement_serviceprofile.servpro_stage_renew').id) or
(not vals.get('stage_id', False) and
self.stage_id.id == self.env.
ref('agreement_serviceprofile.servpro_stage_renew').id)):
x = 'to_renew'
if ((vals.get('stage_id', False) == self.env.
ref('agreement_serviceprofile.servpro_stage_close').id) or
(not vals.get('stage_id', False) and
self.stage_id.id == self.env.
ref('agreement_serviceprofile.servpro_stage_close').id)):
x = 'closed'
if ((vals.get('stage_id', False) == self.env.
ref('agreement_serviceprofile.servpro_stage_cancel').id) or
(not vals.get('stage_id', False) and
self.stage_id.id == self.env.
ref('agreement_serviceprofile.servpro_stage_cancel').id)):
x = 'cancel'
return x
# Check to see if the stage is being changed
def get_next_stage(self, vals):
if (vals.get('stage_id', False) == self.env.
ref('agreement_serviceprofile.servpro_stage_draft').id):
return 'draft'
if (vals.get('stage_id', False) == self.env.
ref('agreement_serviceprofile.servpro_stage_progress').id):
return 'in_progress'
if (vals.get('stage_id', False) == self.env.
ref('agreement_serviceprofile.servpro_stage_suspend').id):
return 'suspend'
if (vals.get('stage_id', False) == self.env.
ref('agreement_serviceprofile.servpro_stage_renew').id):
return 'renew'
if (vals.get('stage_id', False) == self.env.
ref('agreement_serviceprofile.servpro_stage_close').id):
return 'close'
if (vals.get('stage_id', False) == self.env.
ref('agreement_serviceprofile.servpro_stage_cancel').id):
return 'cancel'
return False
|
novoid/Memacs | memacs/tests/photos_test.py | Python | gpl-3.0 | 888 | 0 | # -*- coding: utf-8 -*-
# Time-stamp: <2014-05-03 17:46:44 vk>
import os
import unittest
from memacs.photos import PhotosMemacs
class TestPhotoMemacs(unittest.TestCase):
def test_from_file(self):
test_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'data'
)
argv = "-s -f " + test_path
memacs = PhotosMemacs(argv=argv.split())
data = memacs.test_get_entries()
filename = 'fujifilm-finepix40i.jpg'
path = os.path.join(test_path, filename)
self.assertEqual(
data[0],
"** <2000-08-04 Fri 18:22> [[%s][%s]]" % (p | ath, filename)
)
self.assertEqual(data[1], " :PROPERTIES:")
self.assertEqual(
data[2],
| " :ID: c2833ac1c683dea5b600ac4f303a572d2148e1e7"
)
self.assertEqual(data[3], " :END:")
|
hassaanm/stock-trading | src/pybrain/structure/networks/swiping.py | Python | apache-2.0 | 6,210 | 0.004992 | __author__ = 'Tom Schaul, tom@idsia.ch'
from pybrain.structure.networks.feedforward import FeedForwardNetwork
from pybrain.structure.connections.shared import MotherConnection, SharedFullConnection
from pybrain.utilities import iterCombinations
# TODO: special treatment for multi-dimensional lstm cells: identity connections on state buffers
class SwipingNetwork(FeedForwardNetwork):
""" A network architecture that establishes shared connections between ModuleMeshes (of identical dimensions)
so that the behavior becomes equivalent to one unit (in+hidden+out components at the same coordinate) swiping
over a multidimensional input space and producing a multidimensional output. """
# if all dimensions should be considered symmetric, their weights are shared
symmetricdimensions = True
# should the forward and backward directions be symmetric (for each dimension)?
symmetricdirections = True
# dimensions of the swiping grid
dims = None
def __init__(self, inmesh=None, hiddenmesh=None, outmesh=None, predefined=None, **args):
if predefined != None:
self.predefined = predefined
else:
self.predefined = {}
super(SwipingNetwork, self).__init__(**args)
# determine the dimensions
if inmesh != None:
self.setArgs(dims=inmesh.dims)
elif self.dims == None:
raise Exception('No dimensions specified, or derivable')
self.swipes = 2 ** len(self.dims)
if inmesh != None:
self._buildSwipingStructure(inmesh, hiddenmesh, outmesh)
self.sortModules()
def _verifyDimensions(self, inmesh, hiddenmesh, outmesh):
""" verify dimension matching between the meshes """
assert self.dims == inmesh.dims
assert outmesh.dims == self.dims
assert tuple(hiddenmesh.dims[:-1]) == self.dims, '%s <-> %s' % (
hiddenmesh.dims[:-1], self.dims)
assert hiddenmesh.dims[-1] == self.swipes
assert min(self.dims) > 1
def _buildSwipingStructure(self, inmesh, hiddenmesh, outmesh):
"""
:key inmesh: a mesh of input units
:key hiddenmesh: a mesh of hidden units
:key outmesh: a mesh of output units
"""
self._verifyDimensions(inmesh, hiddenmesh, outmesh)
# add the modules
for c in inmesh:
self.addInputModule(c)
for c in outmesh:
self.addOutputModule(c)
for c in hiddenmesh:
self.addModule(c)
# create the motherconnections if they are not provided
if 'inconn' not in self.predefined:
self.predefined['inconn'] = MotherConnection(in | mesh.componentOutdim * hiddenmesh.componentIndim, name='inconn')
if 'outconn' not in self.predefined:
self.predefined['outconn'] = MotherConnection(outmesh.componentIndim * hiddenmesh.co | mponentOutdim, name='outconn')
if 'hconns' not in self.predefined:
self.predefined['hconns'] = {}
for s in range(len(self.dims)):
if self.symmetricdirections:
if s > 0 and self.symmetricdimensions:
self.predefined['hconns'][s] = self.predefined['hconns'][0]
else:
self.predefined['hconns'][s] = MotherConnection(hiddenmesh.componentIndim *
hiddenmesh.componentOutdim, name='hconn' + str(s))
else:
for dir in ['-', '+']:
if s > 0 and self.symmetricdimensions:
self.predefined['hconns'][(s, dir)] = self.predefined['hconns'][(0, dir)]
else:
self.predefined['hconns'][(s, dir)] = MotherConnection(hiddenmesh.componentIndim *
hiddenmesh.componentOutdim, name='hconn' + str(s) + dir)
# establish the connections
for unit in self._iterateOverUnits():
for swipe in range(self.swipes):
hunit = tuple(list(unit) + [swipe])
self.addConnection(SharedFullConnection(self.predefined['inconn'], inmesh[unit], hiddenmesh[hunit]))
self.addConnection(SharedFullConnection(self.predefined['outconn'], hiddenmesh[hunit], outmesh[unit]))
# one swiping connection along every dimension
for dim, maxval in enumerate(self.dims):
# determine where the swipe is coming from in this direction:
# swipe directions are towards higher coordinates on dim D if the swipe%(2**D) = 0
# and towards lower coordinates otherwise.
previousunit = list(hunit)
if (swipe / 2 ** dim) % 2 == 0:
previousunit[dim] -= 1
dir = '+'
else:
previousunit[dim] += 1
dir = '-'
if self.symmetricdirections:
hconn = self.predefined['hconns'][dim]
else:
hconn = self.predefined['hconns'][(dim, dir)]
previousunit = tuple(previousunit)
if previousunit[dim] >= 0 and previousunit[dim] < maxval:
self.addConnection(SharedFullConnection(hconn, hiddenmesh[previousunit], hiddenmesh[hunit]))
def _iterateOverUnits(self):
""" iterate over the coordinates defines by the ranges of self.dims. """
return iterCombinations(self.dims)
def _printPredefined(self, dic=None, indent=0):
""" print the weights of the Motherconnections in the self.predefined dictionary (recursively)"""
if dic == None:
dic = self.predefined
for k, val in sorted(dic.items()):
print ' ' * indent, k,
if isinstance(val, dict):
print ':'
self._printPredefined(val, indent + 2)
elif isinstance(val, MotherConnection):
print val.params
else:
print val
|
itsff/Robinhood | example.py | Python | mit | 826 | 0.01816 | from Robinhood import Robinhood
#Setup
my_trader = Robinhood(username="YOUR_USERNAME", password="YOUR_PASSWORD");
#Get stock information
#Note: Sometimes more than one instrum | ent may be retur | ned for a given stock symbol
stock_instrument = my_trader.instruments("GEVO")[0]
#Get a stock's quote
my_trader.print_quote("AAPL")
#Prompt for a symbol
my_trader.print_quote();
#Print multiple symbols
my_trader.print_quotes(stocks=["BBRY", "FB", "MSFT"])
#View all data for a given stock ie. Ask price and size, bid price and size, previous close, adjusted previous close, etc.
quote_info = my_trader.quote_data("GEVO")
print(quote_info);
#Place a buy order (uses market bid price)
buy_order = my_trader.place_buy_order(stock_instrument, 1)
#Place a sell order
sell_order = my_trader.place_sell_order(stock_instrument, 1)
|
jmanday/Master | SIGE/Practicas/Practica2/resizeImages.py | Python | apache-2.0 | 1,028 | 0.001946 | import sys, os, re, traceback
from PIL import Image
from skimage.io import imread, imsave
from resizeimage import resizeimage
cwd = os.getcwd()
rootDir = cwd + '/imagenes'
for file_name in os.listdir(rootDir):
folderDir = rootDir + '/' + file_name
if (os.path.isdir(folderDir)):
fileImages = os.listdir(folderDir)
for fImage in fileImages: # para cada imagen
if os.path.splitext(fImage)[1] == '.jpg':
nameFileDir = folderDir + '/' + fImage
# redimensiono la imagen a | 256x256
print(nameFileDir)
with open(nameFileDir, 'r+b') as f:
with Image.open(f) as image:
| cover = resizeimage.resize_cover(image, [256, 256])
cover.save(nameFileDir, image.format)
else:
with open(folderDir, 'r+b') as f:
with Image.open(f) as image:
cover = resizeimage.resize_cover(image, [256, 256])
cover.save(folderDir, image.format)
|
nke001/attention-lvcsr | libs/blocks/tests/bricks/test_bricks.py | Python | mit | 15,113 | 0 | import numpy
import six
import theano
from numpy.testing import assert_allclose, assert_raises
from theano import tensor
from blocks.bricks import (Identity, Linear, Maxout, LinearMaxout, MLP, Tanh,
Sequence, Random)
from blocks.bricks.base import application, Brick, lazy, NoneAllocation
from blocks.bricks.parallel import Parallel, Fork
from blocks.filter import get_application_call, get_brick
from blocks.initialization import Constant
from blocks.utils import shared_floatx
class TestBrick(Brick):
@lazy(allocation=['config'])
def __init__(self, config, **kwargs):
super(TestBrick, self).__init__(**kwargs)
self.config = config
@application
def apply(self, x, y=1, **kwargs):
if isinstance(x, list):
x = x[0]
return [x, y] + list(kwargs.values())
@application(inputs=['x'], outputs=['y'])
def second_apply(self, x):
return x + 1
@second_apply.property('all')
def second_apply_all(self):
return self.second_apply.inputs + self.second_apply.outputs
@application
def delegated_apply(self, x, w):
pass
@delegated_apply.delegate
def delegate(self):
return self.second_apply
@application
def access_application_call(self, x, application_call | ):
application_call.add_auxiliary_variable(shared_floatx(numpy.ones((1,)),
name='test_val'))
return x
class ParentBrick(Brick):
def __init__(self, child=None, **kwargs):
super(ParentBrick, self).__init__(**kwargs)
self.child = child
if child is None:
child = TestBrick(0)
self.child | ren = [child]
@application
def apply(self, *args, **kwargs):
return self.child.apply(*args, **kwargs)
@application
def second_apply(self, x):
return x - 1
@second_apply.property('inputs')
def second_apply_inputs(self):
return self.child.second_apply.all
@second_apply.delegate
def second_apply_delegate(self):
return self.child.delegated_apply
class BrokenAllocateBrick(Brick):
def _push_allocation_config(self):
raise AttributeError
def _allocate(self):
raise AttributeError
class BrokenInitializeBrick(Brick):
def _initialize(self):
raise AttributeError
class ParameterBrick(Brick):
def _allocate(self):
self.parameters.append(
theano.shared(numpy.zeros((10, 10), dtype=theano.config.floatX)))
def test_super():
brick = TestBrick()
assert isinstance(brick.name, six.string_types)
assert brick.children == []
assert not any([brick.allocated, brick.allocation_config_pushed,
brick.initialized, brick.initialization_config_pushed])
parent_brick = ParentBrick()
assert len(parent_brick.children) == 1
brick = TestBrick(name='test_name')
assert brick.name == 'test_name'
def test_repr():
brick = TestBrick()
assert 'name=testbrick' in repr(brick)
assert hex(id(brick)) in repr(brick)
assert str(brick) == repr(brick)
def test_lazy():
linear = Linear()
assert linear.allocation_args == ['input_dim', 'output_dim']
brick = TestBrick()
assert brick.config is NoneAllocation
brick = TestBrick(config='config')
assert brick.config == 'config'
assert_raises(ValueError, TestBrick, 'config', config='config')
def test_allocate():
brick = TestBrick(0)
brick.allocate()
assert brick.allocated
assert brick.allocation_config_pushed
parent_brick = ParentBrick()
parent_brick.allocate()
assert parent_brick.children[0].allocated
assert parent_brick.children[0].allocation_config_pushed
parameter_brick = ParameterBrick()
assert not hasattr(parameter_brick, 'parameters')
parameter_brick.allocate()
assert len(parameter_brick.parameters) == 1
parameter_brick.parameters[0].set_value(
numpy.ones((10, 10), dtype=theano.config.floatX))
parameter_brick.allocate()
assert numpy.all(parameter_brick.parameters[0].get_value() == 0)
broken_parent_brick = ParentBrick(BrokenAllocateBrick())
assert_raises(AttributeError, broken_parent_brick.allocate)
assert not broken_parent_brick.allocation_config_pushed
assert not broken_parent_brick.allocated
broken_parent_brick = ParentBrick(BrokenAllocateBrick())
assert_raises(AttributeError, broken_parent_brick.allocate)
assert not broken_parent_brick.allocation_config_pushed
assert not broken_parent_brick.allocated
def test_initialize():
brick = TestBrick(0)
brick.initialize()
parent_brick = ParentBrick()
parent_brick.initialize()
broken_parent_brick = ParentBrick(BrokenInitializeBrick())
assert_raises(AttributeError, broken_parent_brick.initialize)
broken_parent_brick = ParentBrick(BrokenInitializeBrick())
assert_raises(AttributeError, broken_parent_brick.initialize)
def test_tagging():
brick = TestBrick(0)
x = tensor.vector('x')
y = tensor.vector('y')
z = tensor.vector('z')
def check_output_variable(o):
assert get_application_call(o).application.brick is brick
assert (get_application_call(o.owner.inputs[0]).application.brick
is brick)
# Case 1: both positional arguments are provided.
u, v = brick.apply(x, y)
for o in [u, v]:
check_output_variable(o)
# Case 2: `b` is given as a keyword argument.
u, v = brick.apply(x, y=y)
for o in [u, v]:
check_output_variable(o)
# Case 3: two positional and one keyword argument.
u, v, w = brick.apply(x, y, z=z)
for o in [u, v, w]:
check_output_variable(o)
# Case 4: one positional argument.
u, v = brick.apply(x)
check_output_variable(u)
assert v == 1
# Case 5: variable was wrapped in a list. We can not handle that.
u, v = brick.apply([x])
assert_raises(AttributeError, check_output_variable, u)
def test_apply_not_child():
child = TestBrick()
parent = ParentBrick(child)
parent.children = []
assert_raises(ValueError, parent.apply, tensor.matrix())
def test_request_unknown_dimension():
brick = TestBrick()
assert_raises(ValueError, brick.get_dim, 'unknown')
def test_application():
brick = TestBrick()
assert brick.second_apply.inputs == ['x']
assert brick.second_apply.outputs == ['y']
assert brick.delegated_apply.inputs == ['x']
assert brick.delegated_apply.outputs == ['y']
assert brick.second_apply.all == ['x', 'y']
brick.second_apply.inputs = ['x', 'z']
assert brick.second_apply.inputs == ['x', 'z']
assert brick.second_apply.all == ['x', 'z', 'y']
brick.delegated_apply.outputs = ['z']
assert brick.delegated_apply.outputs == ['z']
assert brick.delegated_apply.inputs == ['x', 'z']
parent_brick = ParentBrick(brick)
parent_brick.second_apply.inputs = ['x', 'z', 'y']
parent_brick.second_apply.inputs == ['x', 'z']
assert_raises(AttributeError, setattr, TestBrick.second_apply, 'all', 'w')
TestBrick.delegated_apply.inputs = ['w']
assert TestBrick.delegated_apply.inputs == ['w']
test_brick = TestBrick()
assert test_brick.delegated_apply.inputs == ['w']
test_brick.delegated_apply.inputs = ['x']
assert test_brick.delegated_apply.inputs == ['x']
assert TestBrick.delegated_apply.inputs == ['w']
def test_apply():
brick = TestBrick(0)
assert TestBrick.apply(brick, [0]) == [0, 1]
if six.PY2:
assert_raises(TypeError, TestBrick.apply, [0])
def test_rng():
linear = Linear()
assert isinstance(linear.rng, numpy.random.RandomState)
linear = Linear(seed=1)
assert linear.rng.rand() == numpy.random.RandomState(1).rand()
linear = Linear()
linear2 = Linear()
assert linear.seed != linear2.seed
def test_random_brick():
random = Random()
# This makes sure that a Random brick doesn't instantiate more than one
# Theano RNG during its lifetime (see PR #485 on Github)
assert random.theano_rng is random.theano_rng
def test_linear():
x = tensor.ma |
ttm/gmaneLegacy | gmaneLegacy/networkDrawer.py | Python | unlicense | 7,293 | 0.029073 | import sys
import collections as c
from scipy import special, stats
import numpy as n, pylab as p, networkx as x
class NetworkDrawer:
drawer_count=0
def __init__(self,metric="strength"):
| self.drawer_count+=1
metric_=self.standardizeName(metric)
self.metric_=metric_
self.draw_count=0
| def standardizeName(self,name):
if name in (["s","strength","st"]+["f","força","forca","fo"]):
name_="s"
if name in (["d","degree","dg"]+["g","grau","gr"]):
name_="d"
return name_
def makeLayout(self,network_measures,network_partitioning=None):
"""Delivers a sequence of user_ids and (x,y) pos.
"""
self.network_measures=network_measures
if self.metric_=="s":
measures_=network_measures.strengths
elif self.metric_=="d":
measures_=network_measures.degrees
else:
print("not known metric to make layout")
self.ordered_measures=ordered_measures = c.OrderedDict(sorted(measures_.items(), key=lambda x: x[1]))
self.measures=measures=list(ordered_measures.values())
self.authors=authors= list(ordered_measures.keys())
total=network_measures.N
if not network_partitioning:
self.k1=k1=round(total*.80)
self.k2=k2=round(total*.95)
self.periphery=authors[:k1]
self.intermediary=authors[k1:k2]
self.hubs=authors[k2:]
else:
sectors=network_partitioning.sectorialized_agents__
self.k1=k1=len(sectors[0])
self.k2=k2=k1+len(sectors[1])
self.periphery,self.intermediary,self.hubs=sectors
print("fractions ={:0.4f}, {:0.4f}, {:0.4f}".format(k1/total, (k2-k1)/total, 1-k2/total))
self.makeXY()
def drawNetwork(self, network,network_measures,filename="example.png",label="auto",network_partitioning=None):
p.clf()
if self.metric_=="s":
measures_=network_measures.strengths
elif self.metric_=="d":
measures_=network_measures.degree
else:
print("not known metric to make layout")
ordered_measures = c.OrderedDict(sorted(measures_.items(), key=lambda x: x[1]))
measures=list(ordered_measures.values())
authors= list(ordered_measures.keys())
total=network_measures.N
if not network_partitioning:
k1=k1=round(total*.80)
k2=k2=round(total*.95)
periphery=authors[:k1]
intermediary=authors[k1:k2]
hubs=authors[k2:]
else:
sectors=network_partitioning.sectorialized_agents__
k1=k1=len(sectors[0])
k2=k2=k1+len(sectors[1])
periphery,intermediary,hubs=(set(iii) for iii in sectors)
in_measures=network_measures.in_strengths
min_in=max(in_measures.values())/3+0.1
out_measures=network_measures.out_strengths
min_out=max(out_measures.values())/3+.1
self.clustering=clustering=network_measures.weighted_clusterings
A=x.drawing.nx_agraph.to_agraph(network.g)
A.node_attr['style']='filled'
A.graph_attr["bgcolor"]="black"
A.graph_attr["pad"]=.1
#A.graph_attr["size"]="9.5,12"
A.graph_attr["fontsize"]="25"
if label=="auto":
label=self.makeLabel()
A.graph_attr["label"]=label
A.graph_attr["fontcolor"]="white"
cm=p.cm.Reds(range(2**10)) # color table
self.cm=cm
nodes=A.nodes()
self.colors=colors=[]
self.inds=inds=[]
self.poss=poss=[]
for node in nodes:
n_=A.get_node(node)
ind_author=self.authors.index(n_)
inds.append(inds)
colors.append( '#%02x%02x%02x' % tuple([int(255*i) for i in cm[int(clustering[n_]*255)][:-1]]))
#n_.attr['fillcolor']= '#%02x%02x%02x' % tuple([255*i for i in cm[int(clustering[n_]*255)][:-1]])
n_.attr['fillcolor']= colors[-1]
n_.attr['fixedsize']=True
n_.attr['width']= abs(.6*(in_measures[n_]/min_in+ .05))
n_.attr['height']= abs(.6*(out_measures[n_]/min_out+.05))
if n_ in hubs:
n_.attr["shape"] = "hexagon"
elif n_ in intermediary:
pass
else:
n_.attr["shape"] = "diamond"
pos="%f,%f"%tuple(self.posXY[ind_author])
poss.append(pos)
n_.attr["pos"]=pos
n_.attr["pin"]=True
n_.attr["fontsize"]=25
n_.attr["fontcolor"]="white"
n_.attr["label"]=""
weights=[s[2]["weight"] for s in network_measures.edges]
self.weights=weights
max_weight=max(weights)
self.max_weight=max_weight
self.weights_=[]
edges=A.edges()
for e in edges:
factor=float(e.attr['weight'])
self.weights_.append(factor)
e.attr['penwidth']=.34*factor
e.attr["arrowsize"]=1.5
e.attr["arrowhead"]="lteeoldiamond"
w=factor/max_weight # factor em [0-1]
cor=p.cm.Spectral(int(w*255))
self.cor=cor
cor256=255*n.array(cor[:-1])
r0=int(cor256[0]/16)
r1=int(cor256[0]-r0*16)
r=hex(r0)[-1]+hex(r1)[-1]
g0=int(cor256[1]/16)
g1=int(cor256[1]-g0*16)
g=hex(g0)[-1]+hex(g1)[-1]
b0=int(cor256[2]/16)
b1=int(cor256[2]-b0*16)
b=hex(b0)[-1]+hex(b1)[-1]
#corRGB="#"+r+g+b+":#"+r+g+b
corRGB="#"+r+g+b
e.attr["color"]=corRGB
A.draw(filename, prog="neato") # twopi ou circo
################
self.A=A
self.draw_count+=1
def makeLabel(self):
label=""
if "window_size" in dir(self):
label+="w: {}, ".format(self.window_size)
#m: %i, N = %i, E = %i"%(self.draw_count*self.step_size,self.network_measures.N,self.network_measures.E)
if "step_size" in dir(self):
label+="m: {} ,".format(self.draw_count*self.step_size+self.offset)
else:
label+="m: %i, ".format(self.draw_count)
#self.network_measures.N,self.network_measures.E)
label+="N = %i, E = %i"%(self.network_measures.N,self.network_measures.E)
return label
def updateNetwork(self,network,networkMeasures=None):
pass
def makeXY(self):
size_periphery=self.k1
size_intermediary=self.k2-self.k1
size_hubs=self.network_measures.N-self.k2
if size_hubs%2==1:
size_hubs+=1
size_intermediary-=1
xh=n.linspace(0,0.5,size_hubs,endpoint=False)[::-1]
thetah=2*n.pi*xh
yh=n.sin(thetah)
xi=n.linspace(1,0.5, size_intermediary, endpoint=True)
thetai=2*n.pi*xi
yi=n.sin(thetai)
xp=n.linspace(.95,0.4, size_periphery)[::-1]
yp=n.linspace(.1,1.25, size_periphery)[::-1]
self.pos=((xp,yp),(xi,yi),(xh,yh))
XFACT=7
YFACT=3
self.posX=posX=n.hstack((xp,xi,xh))*XFACT
self.posY=posY=n.hstack((yp,yi,yh))*YFACT
self.posXY=n.vstack((posX.T,posY.T)).T
|
jonathanendersby/SimpleChat | SimpleChat/SimpleChat/settings.py | Python | gpl-2.0 | 2,172 | 0.00046 | """
Django settings for SimpleChat project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for produc | tion
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '3o-kw!!=*b7o3mz6nmbllne##wiu7m_lz | k%9j&p@@(ecsue&f7'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'chat',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'SimpleChat.urls'
WSGI_APPLICATION = 'SimpleChat.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
BASE_URL = "http://192.168.0.60:9000"
try:
from local_settings import *
except ImportError:
pass |
procangroup/edx-platform | lms/djangoapps/certificates/api.py | Python | agpl-3.0 | 21,092 | 0.002134 | """Certificates API
This is a Python API for generating certificates asynchronously.
Other Django apps should use the API functions defined in this module
rather than importing Django models directly.
"""
import logging
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db.models import Q
from opaque_keys.edx.django.models import CourseKeyField
from opaque_keys.edx.keys import CourseKey
from branding import api as branding_api
from lms.djangoapps.certificates.models import (
CertificateGenerationConfiguration,
CertificateGenerationCourseSetting,
CertificateInvalidation,
CertificateStatuses,
CertificateTemplate,
CertificateTemplateAsset,
ExampleCertificateSet,
GeneratedCertificate,
certificate_status_for_student
)
from lms.djangoapps.certificates.queue import XQueueCertInterface
from eventtracking import tracker
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from util.organizations_helpers import get_course_organization_id
from xmodule.modulestore.django import modulestore
log = logging.getLogger("edx.certificate")
MODES = GeneratedCertificate.MODES
def is_passing_status(cert_status):
"""
Given the status of a certificate, return a boolean indicating whether
the student passed the course. This just proxies to the classmethod
defined in models.py
"""
return CertificateStatuses.is_passing_status(cert_status)
def format_certificate_for_user(username, cert):
"""
Helper function to serialize an user certificate.
Arguments:
username (unicode): The identifier of the user.
cert (GeneratedCertificate): a user certificate
Returns: dict
"""
return {
"username": username,
"course_key": cert.course_id,
"type": cert.mode,
"status": cert.status,
"grade": cert.grade,
"created": cert.created_date,
"modified": cert.modified_date,
"is_passing": is_passing_status(cert.status),
# NOTE: the download URL is not currently being set for webview certificates.
# In the future, we can update this to construct a URL to the webview certificate
# for courses that have this feature enabled.
"download_url": (
cert.download_url or get_certificate_url(cert.user.id, cert.course_id)
if cert.status == CertificateStatuses.downloadable
else None
),
}
def get_certificates_for_user(username):
"""
Retrieve certificate information for a particular user.
Arguments:
username (unicode): The identifier of the user.
Returns: list
Example Usage:
>>> get_certificates_for_user("bob")
[
{
"username": "bob",
"course_key": CourseLocator('edX', 'DemoX', 'Demo_Course', None, None),
"type": "verified",
"status": "downloadable",
"download_url": "http://www.example.com/cert.pdf",
"grade": "0.98",
"created": 2015-07-31T00:00:00Z,
"modified": 2015-07-31T00:00:00Z
}
]
"""
return [
format_certificate_for_user(username, cert)
for cert in GeneratedCertificate.eligible_certificates.filter(user__username=username).order_by("course_id")
]
def get_certificate_for_user(username, course_key):
"""
Retrieve certificate information for a particular user for a specific course.
Arguments:
username (unicode): The identifier of the user.
course_key (CourseKey): A Course Key.
Returns: dict
"""
try:
cert = GeneratedCertificate.eligible_certificates.get(
user__username=username,
course_id=course_key
)
except GeneratedCertificate.DoesNotExist:
return None
return format_certificate_for_user(username, cert)
def generate_user_certificates(student, course_key, course=None, insecure=False, generation_mode='batch',
forced_grade=None):
"""
It will add the add-cert request into the xqueue.
A new record will be created to track the certificate
generation task. If an error occurs while adding the certificate
to the queue, the task will have status 'error'. It also emits
`edx.certificate.created` event for analytics.
Args:
student (User)
course_key (CourseKey)
Keyword Arguments:
course (Course): Optionally provide the course object; if not provided
it will be loaded.
insecure - (Boolean)
generation_mode - who has requested certificate generation. Its value should `batch`
in case of django command and `self` if student initiated the request.
forced_grade - a string indicating to replace grade parameter. if present grading
will be skipped.
"""
xqueue = XQueueCertInterface()
if insecure:
xqueue.use_https = False
if not course:
course = modulestore().get_course(course_key, depth=0)
generate_pdf = not has_html_certificates_enabled(course)
cert = xqueue.add_cert(
student,
course_key,
course=course,
generate_pdf=generate_pdf,
forced_grade=forced_grade
)
# If cert_status is not present in certificate valid_statuses (for example unverified) then
# add_cert returns None and raises AttributeError while accesing cert attributes.
if cert is None:
return
if CertificateStatuses.is_passing_status(cert.status):
emit_certificate_event('created', student, course_key, course, {
'user_id': student.id,
'course_id': unicode(course_key),
'certificate_id': cert.verify_uuid,
'enrollment_mode': cert.mode,
'generation_mode': generation_mode
})
return cert.status
def regenerate_user_certificates(student, course_key, course=None,
forced_grade=None, template_file=None, insecure=False):
"""
It will add the regen-cert request into the xqueue.
A new record will be created to trac | k the certificate
generation task. If an error occurs while adding the certificate |
to the queue, the task will have status 'error'.
Args:
student (User)
course_key (CourseKey)
Keyword Arguments:
course (Course): Optionally provide the course object; if not provided
it will be loaded.
grade_value - The grade string, such as "Distinction"
template_file - The template file used to render this certificate
insecure - (Boolean)
"""
xqueue = XQueueCertInterface()
if insecure:
xqueue.use_https = False
if not course:
course = modulestore().get_course(course_key, depth=0)
generate_pdf = not has_html_certificates_enabled(course)
return xqueue.regen_cert(
student,
course_key,
course=course,
forced_grade=forced_grade,
template_file=template_file,
generate_pdf=generate_pdf
)
def certificate_downloadable_status(student, course_key):
"""
Check the student existing certificates against a given course.
if status is not generating and not downloadable or error then user can view the generate button.
Args:
student (user object): logged-in user
course_key (CourseKey): ID associated with the course
Returns:
Dict containing student passed status also download url, uuid for cert if available
"""
current_status = certificate_status_for_student(student, course_key)
# If the certificate status is an error user should view that status is "generating".
# On the back-end, need to monitor those errors and re-submit the task.
response_data = {
'is_downloadable': False,
'is_generating': True if current_status['status'] in [CertificateStatuses.generating,
CertificateStatuses.error] else False,
'is_unverified': True if current_status['status'] == CertificateStatuses.unverified else False,
|
barbour-em/osf.io | tests/test_views.py | Python | apache-2.0 | 162,417 | 0.001102 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Views tests for the OSF.'''
from __future__ import absolute_import
import unittest
import json
import datetime as dt
import mock
import httplib as http
from nose.tools import * # noqa PEP8 asserts
from tests.test_features import requires_search
from modularodm import Q
from dateutil.parser import parse as parse_date
from framework import auth
from framework.exceptions import HTTPError
from framework.auth import User, Auth
from framework.auth.utils import impute_names_model
from framework.auth.exceptions import InvalidTokenError
from website import mailchimp_utils
from website.views import _rescale_ratio
from website.util import permissions
from website.models import Node, Pointer, NodeLog
from website.project.model import ensure_schemas, has_anonymous_link
from website.project.views.contributor import (
send_claim_email,
deserialize_contributors,
send_claim_registered_email,
)
from website.profile.utils import add_contributor_json, serialize_unregistered
from website.profile.views import fmt_date_or_none
from website.util import api_url_for, web_url_for
from website import mails, settings
from website.util import rubeus
from website.project.views.node import _view_project, abbrev_authors, _should_show_wiki_widget
from website.project.views.comment import serialize_comment
from website.project.decorators import check_can_access
from website.addons.github.model import AddonGitHubOauthSettings
from tests.base import (
OsfTestCase,
fake,
capture_signals,
assert_is_redirect,
assert_datetime_equal,
)
from tests.factories import (
UserFactory, ApiKeyFactory, ProjectFactory, WatchConfigFactory,
NodeFactory, NodeLogFactory, AuthUserFactory, UnregUserFactory,
RegistrationFactory, CommentFactory, PrivateLinkFactory, UnconfirmedUserFactory, DashboardFactory, FolderFactory,
ProjectWithAddonFactory,
)
from website.settings import ALL_MY_REGISTRATIONS_ID, ALL_MY_PROJECTS_ID
class TestViewingProjectWithPrivateLink(OsfTestCase):
def setUp(self):
super(TestViewingProjectWithPrivateLink, self).setUp()
self.user = AuthUserFactory() # Is NOT a contributor
self.project = ProjectFactory(is_public=False)
self.link = PrivateLinkFactory()
self.link.nodes.append(self.project)
self.link.save()
self.project_url = self.project.web_url_for('view_project')
def test_not_anonymous_for_public_project(self):
anonym | ous_link = PrivateLinkFactory(anonymous=True)
anonymous_link.nodes.append(self.project)
anonymous_link.save()
self.project.set_privacy('public')
self.project.save()
self.project.reload()
| auth = Auth(user=self.user, private_key=anonymous_link.key)
assert_false(has_anonymous_link(self.project, auth))
def test_has_private_link_key(self):
res = self.app.get(self.project_url, {'view_only': self.link.key})
assert_equal(res.status_code, 200)
def test_not_logged_in_no_key(self):
res = self.app.get(self.project_url, {'view_only': None})
assert_is_redirect(res)
res = res.follow(expect_errors=True)
assert_equal(res.status_code, 301)
assert_equal(
res.request.path,
'/login'
)
def test_logged_in_no_private_key(self):
res = self.app.get(self.project_url, {'view_only': None}, auth=self.user.auth,
expect_errors=True)
assert_equal(res.status_code, http.FORBIDDEN)
def test_logged_in_has_key(self):
res = self.app.get(
self.project_url, {'view_only': self.link.key}, auth=self.user.auth)
assert_equal(res.status_code, 200)
@unittest.skip('Skipping for now until we find a way to mock/set the referrer')
def test_prepare_private_key(self):
res = self.app.get(self.project_url, {'key': self.link.key})
res = res.click('Registrations')
assert_is_redirect(res)
res = res.follow()
assert_equal(res.status_code, 200)
assert_equal(res.request.GET['key'], self.link.key)
def test_check_can_access_valid(self):
contributor = AuthUserFactory()
self.project.add_contributor(contributor, auth=Auth(self.project.creator))
self.project.save()
assert_true(check_can_access(self.project, contributor))
def test_check_user_access_invalid(self):
noncontrib = AuthUserFactory()
with assert_raises(HTTPError):
check_can_access(self.project, noncontrib)
def test_check_user_access_if_user_is_None(self):
assert_false(check_can_access(self.project, None))
class TestProjectViews(OsfTestCase):
def setUp(self):
super(TestProjectViews, self).setUp()
ensure_schemas()
self.user1 = AuthUserFactory()
self.user1.save()
self.consolidate_auth1 = Auth(user=self.user1)
self.auth = self.user1.auth
self.user2 = UserFactory()
# A project has 2 contributors
self.project = ProjectFactory(
title="Ham",
description='Honey-baked',
creator=self.user1
)
self.project.add_contributor(self.user2, auth=Auth(self.user1))
self.project.save()
def test_can_view_nested_project_as_admin(self):
self.parent_project = NodeFactory(
title='parent project',
category='project',
parent=self.project,
is_public=False
)
self.parent_project.save()
self.child_project = NodeFactory(
title='child project',
category='project',
parent=self.parent_project,
is_public=False
)
self.child_project.save()
url = self.child_project.web_url_for('view_project')
res = self.app.get(url, auth=self.auth)
assert_not_in('Private Project', res.body)
assert_in('parent project', res.body)
def test_edit_description(self):
url = "/api/v1/project/{0}/edit/".format(self.project._id)
self.app.post_json(url,
{"name": "description", "value": "Deep-fried"},
auth=self.auth)
self.project.reload()
assert_equal(self.project.description, "Deep-fried")
def test_project_api_url(self):
url = self.project.api_url
res = self.app.get(url, auth=self.auth)
data = res.json
assert_equal(data['node']['category'], 'Project')
assert_equal(data['node']['node_type'], 'project')
assert_equal(data['node']['title'], self.project.title)
assert_equal(data['node']['is_public'], self.project.is_public)
assert_equal(data['node']['is_registration'], False)
assert_equal(data['node']['id'], self.project._primary_key)
assert_equal(data['node']['watched_count'], 0)
assert_true(data['user']['is_contributor'])
assert_equal(data['node']['description'], self.project.description)
assert_equal(data['node']['url'], self.project.url)
assert_equal(data['node']['tags'], [t._primary_key for t in self.project.tags])
assert_in('forked_date', data['node'])
assert_in('watched_count', data['node'])
assert_in('registered_from_url', data['node'])
# TODO: Test "parent" and "user" output
def test_api_get_folder_pointers(self):
dashboard = DashboardFactory(creator=self.user1)
project_one = ProjectFactory(creator=self.user1)
project_two = ProjectFactory(creator=self.user1)
url = dashboard.api_url_for("get_folder_pointers")
dashboard.add_pointer(project_one, auth=self.consolidate_auth1)
dashboard.add_pointer(project_two, auth=self.consolidate_auth1)
res = self.app.get(url, auth=self.auth)
pointers = res.json
assert_in(project_one._id, pointers)
assert_in(project_two._id, pointers)
assert_equal(len(pointers), 2)
def test_api_get_folder_pointers_from_non_folder(self):
project_one = ProjectFactory(creator=self.user1)
project_two = P |
kvoss/lsystem | example-plant.py | Python | bsd-2-clause | 787 | 0.012706 | import turtle
from lsystem import LSystem
plant = dict(
axiom = 'X',
productions = {'X': 'F-[[X]+X]+F[+FX]-X', 'F':'FF'})
plantL = LSystem(**plant)
q = []
def restore():
pos, angl = q.pop()
turtle.up()
turtle.setposition(pos)
turtle.seth(angl)
turtle.down()
methods = {
'F': lambda: turtle.fd(3),
'-': lambda: turtle.left(25),
'+': lambda: turtle.right(25),
'[': la | mbda: q.append((turtle.pos(), turtle.heading())),
']': restore,
}
turtle.screensize(8 | 00,1200)
turtle.ht()
turtle.pencolor('green')
turtle.delay(0)
turtle.seth(75)
for c in plantL[6]:
try:
methods[c]()
except KeyError:
pass
ts = turtle.getscreen()
ts.getcanvas().postscript(file='plant6.eps')
#turtle.exitonclick()
|
joegomes/deepchem | examples/uv/UV_tf_singletask.py | Python | mit | 3,308 | 0.013301 | """
Script that trains Tensorflow Multitask models on UV dataset.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import os
import numpy as np
import tempfile
import shutil
import deepchem as dc
from UV_datasets import load_uv
###Load data###
shard_size = 2000
num_trials = 1
print("About to load UV data.")
UV_tasks, datasets, transformers = load_uv(shard_size=shard_size)
train_dataset, valid_dataset, test_dataset = datasets
print("Number of compounds in train set")
print(len(train_dataset))
print("Number of compounds in validation set")
print(len(valid_dataset))
print("Number of compounds in test set")
print(len(test_dataset))
metric = dc.metrics.Metric(dc.metrics.pearson_r2_score, task_averager=np.mean)
###Create model###
n_layers = 3
nb_epoch = 30
n_features = train_dataset.get_data_shape()[0]
def task_model_builder(m_dir):
return dc.models.TensorflowMultiTaskRegressor(
n_tasks=1, n_features=n_features, logdir=m_dir,
layer_sizes=[1000]*n_layers, dropouts=[.25]*n_layers,
weight_init_stddevs=[.02]*n_layers, bias_init_consts=[1.]*n_layers,
learning_rate=.0003, penalty=.0001, penalty_type="l2", optimizer="adam",
batch_size=100)
all_results = []
for trial in range(num_trials):
model = dc.models.SingletaskToMultitask(UV_tasks, task_model_builder,
model_dir="UV_tf_singletask")
print("Fitting Model")
model.fit(train_dataset, nb_epoch=nb_epoch)
print("Evaluating models")
train_score, train_task_scores = model.evaluate(
train_dataset, [metric], transformers, per_task_metrics=True)
valid_score, valid_task_scores = model.evaluate(
valid_dataset, [metric], transformers, per_task_metrics=True)
test_score, test_task_scores = model.evaluate(
test_dataset, [metric], transformers, per_task_metrics=True)
all_results.append((train_score, train_task_scores,
valid_score, valid_task_scores,
test_score, test_task_scores))
print("----------------------------------------------------------------")
print("Scores for trial %d" % trial)
print("----------------------------------------------------------------")
print("train_task_scores")
print(train_task_scores)
print("Mean Train score")
print(train_score)
print("valid_task_scores")
print(valid_task_scores)
print("Mean Validation score")
print(valid_score)
print(" | test_task_scores")
print(test_task_scores)
prin | t("Mean Test score")
print(test_score)
print("####################################################################")
for trial in range(num_trials):
(train_score, train_task_scores, valid_score, valid_task_scores,
test_score, test_task_scores) = all_results[trial]
print("----------------------------------------------------------------")
print("Scores for trial %d" % trial)
print("----------------------------------------------------------------")
print("train_task_scores")
print(train_task_scores)
print("Mean Train score")
print(train_score)
print("valid_task_scores")
print(valid_task_scores)
print("Mean Validation score")
print(valid_score)
print("test_task_scores")
print(test_task_scores)
print("Mean Test score")
print(test_score)
|
alanrogers/ldpsiz | src/ini.py | Python | gpl-2.0 | 4,407 | 0.003857 | ###
# @file ini.py
# @page ini
# @author Alan R. Rogers
# @brief Functions for objects of class Ini, which reads parameters
# from an initialization file
#
# @copyright Copyright (c) 2014, Alan R. Rogers
# <rogers@anthro.utah.edu>. This file is released under the Internet
# Systems Consortium License, which can be found in file "LICENSE".
# Parameters Used in
#
# PopHist fitld predld
# blocksize eld
# bootfilename eld fitld
# bootreps eld
# c_per_nucleotide eld fitld
# confidence fitld
# doEquilibria predld
# hiC predld
# loC predld
# methods fitld predld
# nbins eld predld
# nthreads eld fitld
# samplingInterval eld
# twoNsmp fitld predld
# u fitld predld
# verbose eld
# windowsize_cm eld
#
# @param[in] ifname A string, the name of the input file. For the
# format of this file, see ldpsiz.ini.
#
# @returns A dictionary containing the names and values of all
# assigned variables.
import sys
def isfinite(x):
return (x < 1+x)
### Represents an epoch of population history, within which the
### population's characteristics do not change.
class Epoch:
def __init__(self, line):
"""
Input should be a list with two entries, each of which is a string.
The first of these it interpreted as the length, t,
of the Epoch in generations. The second is the number, twoN,
of gene copies within the population during that Epoch.
"""
if(len(line) != 2):
print "Epoch: bad input: len(line)=%d" % len(line)
sys.exit(1)
### length of epoch in generations
self.t = float(line[0])
### haploid population size during epoch
self.twoN = float(line[1])
def __str__(self):
s = "[t=%g, twoN=%g]" % (self.t, self.twoN)
return s
def readIni(ifname):
inPopHist = False
ifp = open(ifname, "r")
ph = []
assignments = {}
for lineno, line in enumerate(ifp):
# strip comment
i = line.find('#')
if i >= 0:
line = line[0:i]
# strip blank lines
line = line.strip()
if len(line) == 0:
continue
# remove tabs and convert to lower case
line = line.replace('\t',' ')
if inPopHist:
line = line.split()
if len(line) != 2:
print "ERR@%s:%d:PopHist lines must contain exactly two entries" % \
(ifname, lineno+1)
print "line:", line
print "len(line):", len(line)
sys.exit(1)
ph.append(Epoch(line))
elif line.find('=') >= 0: # assignment statement
line = line.split('=')
if len(line) != 2:
print "Broken assignment @%s:%d" % (ifname, lineno+1)
print "line:", line
sys.exit(1)
line[0] = line[0].strip()
line[1] = line[1].strip()
assignments[line[0]] = line[1]
else:
line = line.split(' ')
line[0] = line[0].strip()
i | f len(line) != 1:
print "Broken command @%s:%d. inPopHist=%d" % (ifname, lineno+1, inPopHist)
sys.exit( | 1)
if line[0] == "PopHist":
inPopHist = True
else:
assignments[line[0]] = "1"
return assignments, ph
if __name__ == '__main__':
a, ph = readIni("ldpsiz.ini")
print "basepairs:", a["basepairs"]
print "blocksize:", a["blocksize"]
print "recombination:", a["recombination"]
print "bootfilename:", a["bootfilename"]
print "bootreps:", a["bootreps"]
print "mutation:", a["mutation"]
print "confidence:", a["confidence"]
print "loCm:", a["loCm"]
print "hiCm:", a["hiCm"]
print "hiCm:", a["hiCm"]
print "methods:", a["methods"]
print "nbins:", a["nbins"]
print "nthreads:", a["nthreads"]
print "samplingInterval:", a["samplingInterval"]
print "twoNsmp:", a["twoNsmp"]
print "windowCm:", a["windowCm"]
print "PopHist:"
for i in range(len(ph)):
s = "Epoch %2d:" % i
print s, ph[i]
# Local Variables:
# mode: python
# End:
|
shubhamgupta123/erpnext | erpnext/accounts/utils.py | Python | gpl-3.0 | 31,708 | 0.025483 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, erpnext
import frappe.defaults
from frappe.utils import nowdate, cstr, flt, cint, now, getdate
from frappe import throw, _
from frappe.utils import formatdate, get_number_format_info
from six import iteritems
# imported to enable erpnext.accounts.utils.get_account_currency
from erpnext.accounts.doctype.account.account import get_account_currency
class FiscalYearError(frappe.ValidationError): pass
@frappe.whitelist()
def get_fiscal_year(date=None, fiscal_year=None, label="Date", verbose=1, company=None, as_dict=False):
return get_fiscal_years(date, fiscal_year, label, verbose, company, as_dict=as_dict)[0]
def get_fiscal_years(transaction_date=None, fiscal_year=None, label="Date", verbose=1, company=None, as_dict=False):
fiscal_years = frappe.cache().hget("fiscal_years", company) or []
if not fiscal_years:
# if year start date is 2012-04-01, year end date should be 2013-03-31 (hence subdate)
cond = ""
if fiscal_year:
cond += " and fy.name = {0}".format(frappe.db.escape(fiscal_year))
if company:
cond += """
and (not exists (select name
from `tabFiscal Year Company` fyc
where fyc.parent = fy.name)
or exists(select company
from `tabFiscal Year Company` fyc
where fyc.parent = fy.name
and fyc.company=%(company)s)
)
"""
fiscal_years = frappe.db.sql("""
select
fy.name, fy.year_start_date, fy.year_end_date
from
`tabFiscal Year` fy
where
disabled = 0 {0}
order by
fy.year_start_date desc""".format(cond), {
"company": company
}, as_dict=True)
frappe.cache().hset("fiscal_years", company, fiscal_years)
if transaction_date:
transaction_date = getdate(transaction_date)
for fy in fiscal_years:
matched = False
if fiscal_year and fy.name == fiscal_year:
matched = True
if (transaction_date and getdate(fy.year_start_date) <= transaction_date
and getdate(fy.year_end_date) >= transaction_date):
matched = True
if matched:
if as_dict:
return (fy,)
else:
return ((fy.name, fy.year_start_date, fy.year_end_date),)
error_msg = _("""{0} {1} not in any active Fiscal Year.""").format(label, formatdate(transaction_date))
if verbose==1: frappe.msgprint(error_msg)
raise FiscalYearError(error_msg)
def validate_fiscal_year(date, fiscal_year, company, label="Date", doc=None):
years = [f[0] for f in get_fiscal_years(date, label=_(label), company=company)]
if fiscal_year not in years:
if doc:
doc.fiscal_year = years[0]
else:
throw(_("{0} '{1}' not in Fiscal Year {2}").format(label, formatdate(date), fiscal_year))
@frappe.whitelist()
def get_balance_on(account=None, date=None, party_type=None, party=None, company=None, in_account_currency=True, cost_center=None):
if not account and frappe.form_dict.get("account"):
account = frappe.form_dict.get("account")
if not date and frappe.form_dict.get("date"):
date = frappe.form_dict.get("date")
if not party_type and frappe.form_dict.get("party_type"):
party_type = frappe.form_dict.get("party_type")
if not party and frappe.form_dict.get("party"):
party = frappe.form_dict.get("party")
if not | cost_center and frappe.form_dict.get("cost_center"):
cost_center = frappe.form_dict.get("cost_center")
cond = []
if date:
cond.append("posting_date <= '%s'" % frappe.db.escape(cstr(date)))
else:
# get balance of all entries that exist
date = nowdate()
if account:
acc = frappe.get_doc("Account", account)
try:
year_start_date = get_fiscal_year(date, verbose=0)[1]
except FiscalYearError:
if getdate(date) > getdate(nowdate()):
# if fiscal year not found and the date is gr | eater than today
# get fiscal year for today's date and its corresponding year start date
year_start_date = get_fiscal_year(nowdate(), verbose=1)[1]
else:
# this indicates that it is a date older than any existing fiscal year.
# hence, assuming balance as 0.0
return 0.0
allow_cost_center_in_entry_of_bs_account = get_allow_cost_center_in_entry_of_bs_account()
if account:
report_type = acc.report_type
else:
report_type = ""
if cost_center and (allow_cost_center_in_entry_of_bs_account or report_type =='Profit and Loss'):
cc = frappe.get_doc("Cost Center", cost_center)
if cc.is_group:
cond.append(""" exists (
select 1 from `tabCost Center` cc where cc.name = gle.cost_center
and cc.lft >= %s and cc.rgt <= %s
)""" % (cc.lft, cc.rgt))
else:
cond.append("""gle.cost_center = "%s" """ % (frappe.db.escape(cost_center, percent=False), ))
if account:
if not frappe.flags.ignore_account_permission:
acc.check_permission("read")
if report_type == 'Profit and Loss':
# for pl accounts, get balance within a fiscal year
cond.append("posting_date >= '%s' and voucher_type != 'Period Closing Voucher'" \
% year_start_date)
# different filter for group and ledger - improved performance
if acc.is_group:
cond.append("""exists (
select name from `tabAccount` ac where ac.name = gle.account
and ac.lft >= %s and ac.rgt <= %s
)""" % (acc.lft, acc.rgt))
# If group and currency same as company,
# always return balance based on debit and credit in company currency
if acc.account_currency == frappe.get_cached_value('Company', acc.company, "default_currency"):
in_account_currency = False
else:
cond.append("""gle.account = "%s" """ % (frappe.db.escape(account, percent=False), ))
if party_type and party:
cond.append("""gle.party_type = "%s" and gle.party = "%s" """ %
(frappe.db.escape(party_type), frappe.db.escape(party, percent=False)))
if company:
cond.append("""gle.company = "%s" """ % (frappe.db.escape(company, percent=False)))
if account or (party_type and party):
if in_account_currency:
select_field = "sum(debit_in_account_currency) - sum(credit_in_account_currency)"
else:
select_field = "sum(debit) - sum(credit)"
bal = frappe.db.sql("""
SELECT {0}
FROM `tabGL Entry` gle
WHERE {1}""".format(select_field, " and ".join(cond)))[0][0]
# if bal is None, return 0
return flt(bal)
def get_count_on(account, fieldname, date):
cond = []
if date:
cond.append("posting_date <= '%s'" % frappe.db.escape(cstr(date)))
else:
# get balance of all entries that exist
date = nowdate()
try:
year_start_date = get_fiscal_year(date, verbose=0)[1]
except FiscalYearError:
if getdate(date) > getdate(nowdate()):
# if fiscal year not found and the date is greater than today
# get fiscal year for today's date and its corresponding year start date
year_start_date = get_fiscal_year(nowdate(), verbose=1)[1]
else:
# this indicates that it is a date older than any existing fiscal year.
# hence, assuming balance as 0.0
return 0.0
if account:
acc = frappe.get_doc("Account", account)
if not frappe.flags.ignore_account_permission:
acc.check_permission("read")
# for pl accounts, get balance within a fiscal year
if acc.report_type == 'Profit and Loss':
cond.append("posting_date >= '%s' and voucher_type != 'Period Closing Voucher'" \
% year_start_date)
# different filter for group and ledger - improved performance
if acc.is_group:
cond.append("""exists (
select name from `tabAccount` ac where ac.name = gle.account
and ac.lft >= %s and ac.rgt <= %s
)""" % (acc.lft, acc.rgt))
else:
cond.append("""gle.account = "%s" """ % (frappe.db.escape(account, percent=False), ))
entries = frappe.db.sql("""
SELECT name, posting_date, account, party_type, party,debit,credit,
voucher_type, voucher_no, against_voucher_type, against_voucher
FROM `tabGL Entry` gle
WHERE {0}""".format(" and ".join(cond)), as_dict=True)
count = 0
for gle in entries:
if fieldname not in ('invoiced_amount','payables'):
count += 1
else:
dr_or_cr = "debit" if fieldname == "invoiced_amount" else "credit"
cr_or_dr = "credit" if fieldname == "invoiced_amount" else "debit"
select_fields = "ifnull(sum(credit-debit),0)" \
if fi |
dschien/energy-aggregator | ep/tests/test_docker.py | Python | mit | 1,639 | 0.00061 | import unittest
import json
import time
from celery import current_app
from django.conf import settings
from django.utils import timezone
from ep.models import DPMeasurements, DeviceParameter
from ep.tasks import send_msg
from django.test import TestCase, modify_settings, override_settings
from ep.tests.static_factories import SiteFactory
from ep_secure_importer.controllers.secure_client import secure_site_name
__author__ = 'schien'
@override_setting | s(IODICUS_MESSAGING_HOST='messaging.iodicus.net')
class TaskTest(TestCase):
def test_messaging(self):
print(settings.IODICUS_MESSAGING_HOST)
# print(settings.BROKER_URL)
self.assertTrue(send_msg.delay(json.dumps({'test': 1})))
class LocalTaskTest(TestCase):
def test_messaging(self):
print(settings.IODICUS_MESSAGING_HOST)
print(settings.BROKER_URL)
self.assertTrue(send_msg.delay(json.dumps({'te | st': 1})))
# @override_settings(INFLUXDB_HOST='52.49.171.8')
class InfluxDBTest(TestCase):
@classmethod
def setUpTestData(cls):
SiteFactory.create(name=secure_site_name)
# @unittest.skip
def test_simple_add(self):
print(settings.INFLUXDB_HOST)
m = DPMeasurements(device_parameter=DeviceParameter.objects.first())
before = len(list(m.all()))
print(before)
m.add(time=timezone.now(), value=255)
m.add(time=timezone.now(), value=0)
m.add(time=timezone.now(), value=20.5)
time.sleep(5)
after = len(list(m.all()))
print(after)
self.assertTrue(before + 3 == after)
if __name__ == '__main__':
unittest.main()
|
valhallasw/phabricator-tools | py/phl/phlsys_signal.py | Python | apache-2.0 | 1,522 | 0 | """Helpers for process signals."""
# =============================================================================
# CONTENTS
# -----------------------------------------------------------------------------
# phlsys_signal
#
# Public Functions:
# set_exit_on_sigterm
#
# -----------------------------------------------------------------------------
# (this contents block is generated, edits will be lost)
# =============================================================================
from __future__ import absolute_import
import signal
import sys
def set_exit_on_sigterm():
def HandleSigterm(unused1, unused2):
# raises 'SystemExit' exception, which will allow us to clean up
sys.exit(1)
signal.signal(signal.SIGTERM, HandleSigterm)
# -----------------------------------------------------------------------------
# Copyright (C) 2013-2014 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:/ | /www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writi | ng, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
|
gloaec/trifle | src/trifle/managers/server.py | Python | gpl-3.0 | 3,973 | 0.001007 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from trifle.managers.command import Command
from trifle.managers.option import Option
from trifle.managers.group import Group
class Server(Command):
"""
Runs the Flask development server i.e. app.run()
:param host: server host
:param port: server port
:param use_debugger: if False, will no longer use Werkzeug debugger.
This can be overriden in the command line
by passing the **-d** flag.
:param use_reloader: if False, will no longer use auto-reloader.
This can be overriden in the command line by
passing the **-r** flag.
:param threaded: should the process handle each request in a separate
thread?
:param processes: number of processes to spawn
:param passthrough_errors: disable the error catching. This means that the server will die on errors but it can be useful to hook debuggers in (pdb etc.)
:param options: :func:`werkzeug.run_simple` options.
"""
help = description = 'Runs the Flask development server i.e. app.run()'
def __init__(self, host='0.0.0.0', port=8080, use_debugger=True,
use_reloader=True, threaded=False, processes=1,
passthrough_errors=False, **options):
self.port = port
self.host = host
self.use_debugger = use_debugger
self.use_reloader = use_reloader
self.server_options = options
self.threaded = threaded
self.processes = processes
self.passthrough_errors = passthrough_errors
def get_options(self):
options = (
Option('-t', '--host',
dest='host',
default=self.host),
Option('-p', '--port',
dest='port',
type=int,
default=self.port),
Option('--threaded',
dest='threaded',
action='store_true',
default=self.threaded),
Option('--processes',
dest='processes',
type=int,
default=self.processes),
Option('--passthrough-errors',
action='store_true',
dest='passthrough_errors',
default=self.passthrough_errors),
)
if self.use_debugger:
options += (Option('-d', '--no-debug',
action='store_false',
dest='use_debugger',
default=self.use_debugger),)
else:
options += (Option('-d', '--debug',
action='store_true',
dest='use_debugger',
default=self.use_debugger),)
if self.use_reloader:
options += (Option('-r', '--no-reload',
action='store_false',
dest='use_reloader',
default=self. | use_reloader),)
else:
options += (Option('-r', '--reload',
action='store_true',
dest='use_reloader',
| default=self.use_reloader),)
return options
def handle(self, app, host, port, use_debugger, use_reloader,
threaded, processes, passthrough_errors):
# we don't need to run the server in request context
# so just run it directly
app.run(host=host,
port=port,
debug=use_debugger,
use_debugger=use_debugger,
use_reloader=use_reloader,
threaded=threaded,
processes=processes,
passthrough_errors=passthrough_errors,
**self.server_options)
|
neurophysik/periodicitytest | tests/periodicitytest_test.py | Python | bsd-3-clause | 4,691 | 0.056065 | #! /usr/bin/env python
# Tests the Python module.
from __future__ import print_function, division
from math import *
import numpy as np
from periodicitytest import periodicitytest
import random
def pertest_noise_wrapper(T, max_tau):
noisy_T = T + np.random.uniform(0.0, nu, len(T))
return periodicitytest(noisy_T, max_tau, nu)
# Test whether a period length makes the test (but not what is tested) faulty due to rounding errors.
is_problematic = lambda period,n: not (1e-8 < period*n*8 % 1 < 1-1e-8)
def closest_fractions (x, n):
p,q,r,s = 0,1,1,0
while p+r < n:
if p+r < x*(q+s):
p += r
q += s
else:
r += p
s += q
return p,q,r,s
def contains_Interval (A, B):
return (A[0]*B[1] <= B[0]*A[1]) and (B[2]*A[3] <= A[2]*B[3])
def nasty_function(t):
"""
A 2-pi-periodic function that looks like this:
_
2 | /| /\ /|
_| / | __ __/ \__/ | __
1 | | / | / | /
_| |/ |/ |/
0 ---------------------------
| /\ |
0 pi 2pi
"""
x = 4/pi * (t%(2*pi))
if x<1: return x
elif x<2: return 1
elif x<3: return x-2
elif x<4: return 1
elif x<5: return x-3
elif x<6: return -x+7
elif x<7: return 1
else : return x-6
def mostly_constant_function (t):
x = 4/pi * (t%(2*pi))
#return 1 if x<0.283 else 0
return 1 if x<0.283 else 0
def test_half_timeseries_length (f):
max_n = 2000
for n in np.random.randint(100, max_n, 10):
for period in np.random.uniform(n//2-1, n//2, 5):
if is_problematic(period, n):
continue
T = np.array(list(map(lambda i: f(i*2*pi/period), range(max_n))))
result = pertest_noise_wrapper(T[:n], n-1)
assert result[0]
control = closest_fractions(period, n)
assert contains_Interval(result[1],control)
def test_varying_periods (f):
n = 1000
for period in np.random.uniform(28.3,n-1,20):
if is_problematic(period, n):
continue
T = np.array(list(map(lambda i: f(i*2*pi/period), range(n))))
result = pertest_noise_wrapper(T[:n], n-1)
assert result[0]
control = closest_fractions(period, n)
assert contains_Interval(result[1],control)
def test_random_timeseries ():
n = 1000
T = np.random.rand(n)
assert not pertest_noise_wrapper(T, n//2)[0]
def test_constant_timeseries ():
n = 1000
T = np.ones(n)*np.random.rand()
for max_tau in np.random.randint(5,n,30):
result = pertest_noise_wrapper(T, max_tau)
assert result[0]
control = (2,1,max_tau,1)
assert result[1]==control
def test_period_k_timeseries (k):
max_n = 2000
some_numbers = np.arange(0,100,max(0.01, 2*nu))
for n in np.random.randint(100,max_n,20):
np.random.shuffle(some_numbers)
T = np.array(list(map(lambda i: some_numbers[i%k], range(n))))
result = pertest_noise_wrapper(T, n-1)
assert result[0]
def test(i):
if k%i==0:
return contains_Interval(result[1], (k//i,1,k/i,1))
else:
return contains_Interval(result[1], closest_fractions(k/i, n))
assert any(test(i) for i in range(1,k))
def test_varying_amplitude (f):
eps = 0.001
n = 10000
T = np.array( list(map(lambda i: (1+eps*i)*f(i), range(n))) )
assert not pertest_noise_wrapper(T,n//2)[0]
def test_varying_frequency (f):
eps = 0.00001
n = 10000
T = np.array( list(map(lambda i: f((1+eps*i)*i), range(n))) )
assert not pertest_noise_wrapper(T,n//2)[0]
for nu in list(np.arange(0,0.1,0.02))+[1e-100]:
test_half_timeseries_length(sin)
test_half_timeseries_length(cos)
test_half_timeseries_length(nasty_function)
test_half_timeseries_length(mostly_constant_function)
test_half_timeseries_length(lambda x: x%(2*pi))
test_half_timeseries_length(lambda x: round(100*sin(x)))
test_half_timeseries_length(lambda x: sin(x)+sin(2*x))
test_varying_periods(sin)
test_varying_periods(cos)
test_varying_periods(lambda x: tan(x/2))
test_varying_periods(nasty_function)
test_varying_periods(mostly_constant_function)
test_varying_periods(lambda x: x%(2*pi) | )
test_varying_periods(lambda x: round(100*sin(x)))
test_varying_periods(lambda x: sin(x)+sin(2*x))
test_random_timeseries()
test_constant_timeseries()
for k in range(2,20):
test_period_k_timeseries(20)
test_varying_amplitude(sin)
test_varying_amplitude(cos)
test_varying_amplitude(nasty_function)
test_varying_amplitude(lambda x: x%(2*pi))
test_varying | _amplitude(lambda x: round(100*sin(x)))
test_varying_amplitude(lambda x: sin(x)+sin(2*x))
test_varying_frequency(sin)
test_varying_frequency(cos)
test_varying_frequency(nasty_function)
test_varying_frequency(lambda x: x%(2*pi))
test_varying_frequency(lambda x: round(100*sin(x)))
test_varying_frequency(lambda x: sin(x)+sin(2*x))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.