repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
eyecatchup/gomoz | win/winsetup.py | Python | gpl-3.0 | 689 | 0.063861 | from distutils.core import setup
import py2exe
opts = {
"py2exe": {
"compressed": 1,
"optimize": 2,
"ascii": 1,
"bundle_files": 1,
"packages": ["encodings"],
"dist_dir": "dist"
}
}
setup (name = "Gomoz",
fullname = | "Gomoz web scanner",
version = "1.0.1",
description = "Gomoz scanner web application",
author = "Handrix",
au | thor_email = "securfox@gmail.com",
url = "http://www.sourceforge.net/projects/gomoz/",
license = "GPL",
keywords = ["scanner", "web application", "securfox", "wxPython"],
windows = [{"script": "gomoz"}],
options = opts,
zipfile = None
)
|
plannerstack/testset | mmri/test_runner.py | Python | mit | 2,465 | 0.004462 | #!/usr/bin/env python
#
# Test a trip planner
import sys
import argparse
from importlib import import_module
import logging
from color_logging import ColoredFormatter
PROVIDER_CHOICES = ['otp', 'hpjp']
logger = logging.getLogger(__name__)
def setup_logging(options):
SUCCESS_LEVEL_NUM = 9
logging.addLevelName(SUCCESS_LEVEL_NUM, "SUCCESS")
def success(self, message, *args, **kws):
# Yes, logger takes its '*args' as 'args'.
self._log(SUCCESS_LEVEL_NUM, message, args, **kws)
logging.Logger.success = success
logger.setLevel(logging.DEBUG if options.debug else logging.INFO)
console = logging.StreamHandler()
console.setFormatter(ColoredFormatter('%(message)s'))
# console.setFormatter(ColoredFormatter('%(name)s: %(message)s (%(filename)s:%(lineno)d)'))
logger.addHandler(console)
def parse_args(args=None):
parser = argparse.ArgumentParser(
description='Test a trip planner and time requests.')
parser.add_argument('-t', '--test', metavar='TEST',
help='id of the test to run (default: all tests)')
parser.add_argument('-p', '--provider', metavar='PROVIDER',
choices=PROVIDER_CHOICES, default='otp',
help='provider to run the tests for (default: otp)')
parser.add_argument('input', metavar='INPUT',
help='file to read test input from')
parser.add_argument('expected_output', metavar='EXPECTED',
help='file to read expected output from')
parser.add_argument('output', metavar='OUTPUT',
help='file to write actual output to')
parser.add_argument('benchmark_output', metavar='TIMING_OUTPUT',
help='file to write timing data to')
parser.add_argument('-u', '--url', metavar='URL',
| help='planner URL (optional based on planner)')
parser.add_argument('-d', '--debug', action='store_true',
help='show debugging output')
parser.add_argument('-s', '--stop_on_error', action='store_true',
help='stop on first test error')
return parser.parse_args(args)
def main():
options = parse_args()
setup_logging(options)
try:
provider = __import__("test_%s" % options.provi | der)
except ImportError:
provider = import_module("mmri.test_%s" % options.provider)
test_class = provider.TestClass(options, logger=logger)
test_class.run_tests(run_test_id=options.test)
if __name__ == '__main__':
main()
|
valentin-krasontovitsch/ansible | lib/ansible/modules/cloud/amazon/elb_classic_lb.py | Python | gpl-3.0 | 53,523 | 0.002111 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: elb_classic_lb
description:
- Returns information about the load balancer.
- Will be marked changed when called only if state is changed.
short_description: Creates or destroys Amazon ELB.
version_added: "2.4"
author:
- "Jim Dalton (@jsdalton)"
options:
state:
description:
- Create or destroy the ELB
choices: ["present", "absent"]
required: true
name:
description:
- The name of the ELB
required: true
listeners:
description:
- List of ports/protocols for this ELB to listen on (see example)
purge_listeners:
description:
- Purge existing listeners on ELB that are not found in listeners
type: bool
default: 'yes'
instance_ids:
description:
- List of instance ids to attach to this ELB
version_added: "2.1"
purge_instance_ids:
description:
- Purge existing instance ids on ELB that are not found in instance_ids
type: bool
default: 'no'
version_added: "2.1"
zones:
description:
- List of availability zones to enable on this ELB
purge_zones:
description:
- Purge existing availability zones on ELB that are not found in zones
type: bool
default: 'no'
security_group_ids:
description:
- A list of security groups to apply to the elb
version_added: "1.6"
security_group_names:
description:
- A li | st of security group names to apply to t | he elb
version_added: "2.0"
health_check:
description:
- An associative array of health check configuration settings (see example)
access_logs:
description:
- An associative array of access logs configuration settings (see example)
version_added: "2.0"
subnets:
description:
- A list of VPC subnets to use when creating ELB. Zones should be empty if using this.
version_added: "1.7"
purge_subnets:
description:
- Purge existing subnet on ELB that are not found in subnets
type: bool
default: 'no'
version_added: "1.7"
scheme:
description:
- The scheme to use when creating the ELB. For a private VPC-visible ELB use 'internal'.
If you choose to update your scheme with a different value the ELB will be destroyed and
recreated. To update scheme you must use the option wait.
choices: ["internal", "internet-facing"]
default: 'internet-facing'
version_added: "1.7"
validate_certs:
description:
- When set to C(no), SSL certificates will not be validated for boto versions >= 2.6.0.
type: bool
default: 'yes'
version_added: "1.5"
connection_draining_timeout:
description:
- Wait a specified timeout allowing connections to drain before terminating an instance
version_added: "1.8"
idle_timeout:
description:
- ELB connections from clients and to servers are timed out after this amount of time
version_added: "2.0"
cross_az_load_balancing:
description:
- Distribute load across all configured Availability Zones
type: bool
default: 'no'
version_added: "1.8"
stickiness:
description:
- An associative array of stickiness policy settings. Policy will be applied to all listeners ( see example )
version_added: "2.0"
wait:
description:
- When specified, Ansible will check the status of the load balancer to ensure it has been successfully
removed from AWS.
type: bool
default: 'no'
version_added: "2.1"
wait_timeout:
description:
- Used in conjunction with wait. Number of seconds to wait for the elb to be terminated.
A maximum of 600 seconds (10 minutes) is allowed.
default: 60
version_added: "2.1"
tags:
description:
- An associative array of tags. To delete all tags, supply an empty dict.
version_added: "2.1"
extends_documentation_fragment:
- aws
- ec2
"""
EXAMPLES = """
# Note: None of these examples set aws_access_key, aws_secret_key, or region.
# It is assumed that their matching environment variables are set.
# Basic provisioning example (non-VPC)
- elb_classic_lb:
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http # options are http, https, ssl, tcp
load_balancer_port: 80
instance_port: 80
proxy_protocol: True
- protocol: https
load_balancer_port: 443
instance_protocol: http # optional, defaults to value of protocol setting
instance_port: 80
# ssl certificate required for https or ssl
ssl_certificate_id: "arn:aws:iam::123456789012:server-certificate/company/servercerts/ProdServerCert"
delegate_to: localhost
# Internal ELB example
- elb_classic_lb:
name: "test-vpc"
scheme: internal
state: present
instance_ids:
- i-abcd1234
purge_instance_ids: true
subnets:
- subnet-abcd1234
- subnet-1a2b3c4d
listeners:
- protocol: http # options are http, https, ssl, tcp
load_balancer_port: 80
instance_port: 80
delegate_to: localhost
# Configure a health check and the access logs
- elb_classic_lb:
name: "test-please-delete"
state: present
zones:
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
health_check:
ping_protocol: http # options are http, https, ssl, tcp
ping_port: 80
ping_path: "/index.html" # not required for tcp or ssl
response_timeout: 5 # seconds
interval: 30 # seconds
unhealthy_threshold: 2
healthy_threshold: 10
access_logs:
interval: 5 # minutes (defaults to 60)
s3_location: "my-bucket" # This value is required if access_logs is set
s3_prefix: "logs"
delegate_to: localhost
# Ensure ELB is gone
- elb_classic_lb:
name: "test-please-delete"
state: absent
delegate_to: localhost
# Ensure ELB is gone and wait for check (for default timeout)
- elb_classic_lb:
name: "test-please-delete"
state: absent
wait: yes
delegate_to: localhost
# Ensure ELB is gone and wait for check with timeout value
- elb_classic_lb:
name: "test-please-delete"
state: absent
wait: yes
wait_timeout: 600
delegate_to: localhost
# Normally, this module will purge any listeners that exist on the ELB
# but aren't specified in the listeners parameter. If purge_listeners is
# false it leaves them alone
- elb_classic_lb:
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
purge_listeners: no
delegate_to: localhost
# Normally, this module will leave availability zones that are enabled
# on the ELB alone. If purge_zones is true, then any extraneous zones
# will be removed
- elb_classic_lb:
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
purge_zones: yes
delegate_to: localhost
# Creates a ELB and assigns a list of subnets to it.
- elb_classic_lb:
state: present
name: 'New ELB'
security_group_ids: 'sg-123456, sg-67890'
region: us-west-2
subnets: 'subnet-123456,subnet-67890'
purge_subnets: yes
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
delegate_to: localhost
# Create an ELB with connection draining, increased idle timeout and cross availability
# zone load balancing
- elb_classic_lb:
name: "New ELB"
state: present
connection_draining_timeout: 60
idle_timeout: 300
cross_az_load_balancing: "yes"
region: us-e |
anhstudios/swganh | data/scripts/templates/object/mobile/shared_dressed_theed_palace_chamberlain.py | Python | mit | 457 | 0.045952 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.tem | plate = "object/mobile/shared_dressed_theed_palace_chamberlain.iff"
result.attribute_template_id = 9
result.stfName("npc_name","human_base_male")
#### BEGIN MODIFICATIONS ####
#### END MODIFI | CATIONS ####
return result |
Daniel-CA/odoomrp-wip-public | procurement_service/models/product_product.py | Python | agpl-3.0 | 903 | 0 | # -*- coding: utf-8 -*-
# Copyright © 201 | 7 Alfredo de la Fuente - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, api
class ProductProduct(models.Model):
_inherit = 'product.product'
@api.multi
def _is_service_buy_make_to_order(self):
for product in self:
if (product.type == 'service' and len(product.route_ids) == 2 and
self.env.ref('stock.route_warehouse0_mto').id in
product.route_ids.ids and
s | elf.env.ref('purchase.route_warehouse0_buy').id in
product.route_ids.ids):
return True
return False
@api.multi
def need_procurement(self):
for product in self:
if product._is_service_buy_make_to_order():
return True
return super(ProductProduct, self).need_procurement()
|
reeshupatel/demo | keystone/identity/core.py | Python | apache-2.0 | 42,880 | 0.000023 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Main entry point into the Identity service."""
import abc
import functools
import os
import uuid
|
from oslo.config import cfg
import six
from keystone import clean
from keystone.common import dependency
from keystone.common import driver_hints
from keystone.common import manager
from keystone import config
from keystone import exception
from keystone.i18n import _
from keystone.identity.mapping_backends import mapping
from keystone import | notifications
from keystone.openstack.common import importutils
from keystone.openstack.common import log
CONF = config.CONF
LOG = log.getLogger(__name__)
DOMAIN_CONF_FHEAD = 'keystone.'
DOMAIN_CONF_FTAIL = '.conf'
def filter_user(user_ref):
"""Filter out private items in a user dict.
'password', 'tenants' and 'groups' are never returned.
:returns: user_ref
"""
if user_ref:
user_ref = user_ref.copy()
user_ref.pop('password', None)
user_ref.pop('tenants', None)
user_ref.pop('groups', None)
user_ref.pop('domains', None)
try:
user_ref['extra'].pop('password', None)
user_ref['extra'].pop('tenants', None)
except KeyError:
pass
return user_ref
class DomainConfigs(dict):
"""Discover, store and provide access to domain specific configs.
The setup_domain_drivers() call will be made via the wrapper from
the first call to any driver function handled by this manager. This
setup call it will scan the domain config directory for files of the form
keystone.<domain_name>.conf
For each file, the domain_name will be turned into a domain_id and then
this class will:
- Create a new config structure, adding in the specific additional options
defined in this config file
- Initialise a new instance of the required driver with this new config.
"""
configured = False
driver = None
def _load_driver(self, assignment_api, domain_id):
domain_config = self[domain_id]
domain_config['driver'] = (
importutils.import_object(
domain_config['cfg'].identity.driver, domain_config['cfg']))
domain_config['driver'].assignment_api = assignment_api
def _load_config(self, assignment_api, file_list, domain_name):
try:
domain_ref = assignment_api.get_domain_by_name(domain_name)
except exception.DomainNotFound:
LOG.warning(
_('Invalid domain name (%s) found in config file name'),
domain_name)
return
# Create a new entry in the domain config dict, which contains
# a new instance of both the conf environment and driver using
# options defined in this set of config files. Later, when we
# service calls via this Manager, we'll index via this domain
# config dict to make sure we call the right driver
domain = domain_ref['id']
self[domain] = {}
self[domain]['cfg'] = cfg.ConfigOpts()
config.configure(conf=self[domain]['cfg'])
self[domain]['cfg'](args=[], project='keystone',
default_config_files=file_list)
self._load_driver(assignment_api, domain)
def setup_domain_drivers(self, standard_driver, assignment_api):
# This is called by the api call wrapper
self.configured = True
self.driver = standard_driver
conf_dir = CONF.identity.domain_config_dir
if not os.path.exists(conf_dir):
LOG.warning(_('Unable to locate domain config directory: %s'),
conf_dir)
return
for r, d, f in os.walk(conf_dir):
for fname in f:
if (fname.startswith(DOMAIN_CONF_FHEAD) and
fname.endswith(DOMAIN_CONF_FTAIL)):
if fname.count('.') >= 2:
self._load_config(assignment_api,
[os.path.join(r, fname)],
fname[len(DOMAIN_CONF_FHEAD):
-len(DOMAIN_CONF_FTAIL)])
else:
LOG.debug(('Ignoring file (%s) while scanning domain '
'config directory'),
fname)
def get_domain_driver(self, domain_id):
if domain_id in self:
return self[domain_id]['driver']
def get_domain_conf(self, domain_id):
if domain_id in self:
return self[domain_id]['cfg']
def reload_domain_driver(self, assignment_api, domain_id):
# Only used to support unit tests that want to set
# new config values. This should only be called once
# the domains have been configured, since it relies on
# the fact that the configuration files have already been
# read.
if self.configured:
if domain_id in self:
self._load_driver(assignment_api, domain_id)
else:
# The standard driver
self.driver = self.driver()
self.driver.assignment_api = assignment_api
def domains_configured(f):
"""Wraps API calls to lazy load domain configs after init.
This is required since the assignment manager needs to be initialized
before this manager, and yet this manager's init wants to be
able to make assignment calls (to build the domain configs). So
instead, we check if the domains have been initialized on entry
to each call, and if requires load them,
"""
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
if (not self.domain_configs.configured and
CONF.identity.domain_specific_drivers_enabled):
self.domain_configs.setup_domain_drivers(
self.driver, self.assignment_api)
return f(self, *args, **kwargs)
return wrapper
def exception_translated(exception_type):
"""Wraps API calls to map to correct exception."""
def _exception_translated(f):
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
try:
return f(self, *args, **kwargs)
except exception.PublicIDNotFound as e:
if exception_type == 'user':
raise exception.UserNotFound(user_id=e.message)
elif exception_type == 'group':
raise exception.GroupNotFound(group_id=e.message)
elif exception_type == 'assertion':
raise AssertionError(_('Invalid user / password'))
else:
raise
return wrapper
return _exception_translated
@dependency.provider('identity_api')
@dependency.optional('revoke_api')
@dependency.requires('assignment_api', 'credential_api', 'id_mapping_api',
'token_api')
class Manager(manager.Manager):
"""Default pivot point for the Identity backend.
See :mod:`keystone.common.manager.Manager` for more details on how this
dynamically calls the backend.
This class also handles the support of domain specific backends, by using
the DomainConfigs class. The setup call for DomainConfigs is called
from with the @domains_configured wrapper in a lazy loading fashion
to get around the fact that we can't satisfy the assignment api it needs
from within our __init__() function since the assignment driver is not
itself yet initialized.
Each of the identity calls are pre-processed here to choos |
DarioGT/OMS-PluginXML | org.modelsphere.sms/lib/jython-2.2.1/Lib/email/MIMEBase.py | Python | gpl-3.0 | 780 | 0.001282 | # Copyright (C) 2001,2002 Python Software Foundation
# Author: barry@zope.com (Barry Warsaw)
"""Base class for MIME specializations.
"""
from email import Message
class MIMEBase(Message.Message):
"""Base class for MIME specializations."""
def __init__(self, _maintype, _subtype, **_params):
"""This constructor adds a Content-Type: and a MIME-Version: header.
The Content-Type: header is taken from the _ma | intype and _subtype
arguments. Additional parameters for this header are taken from the
keyword arguments.
"""
Message.Message.__init__(self)
ctype = '%s/%s' % (_maintype, _subtype)
self.add_header('C | ontent-Type', ctype, **_params)
self['MIME-Version'] = '1.0'
|
turtledb/0install | setup.py | Python | lgpl-2.1 | 3,194 | 0.020664 | from distutils import log
from distutils.core import setup
from distutils.core import Command
from distutils.command.build_py import build_py
from distutils.command.install import install
import os
import zeroinstall
class adjust_scripts_for_home(Command):
"""setup.py install --home puts libraries in ~/lib/python, but Python doesn't look there.
If we're installing with --home, modify the scripts to add this to sys.path.
Don't do this otherwise; the system copy mustn't conflict with the copy in $HOME.
"""
description = "(used internally when using --home)"
user_options = [
('scripts-dir=', 'd', "directory to install scripts to"),
('lib-dir=', 'd', "directory libraries install to"),
]
def initialize_options (self):
self.scripts_dir = None
self.lib_dir = None
def finalize_options (self):
self.set_undefined_options('install',
('install_scripts', 'scripts_dir'),
('install_lib', 'lib_dir'),
)
def run(self):
for script in self.distribution.scripts:
outfile = os.path.join(self.scripts_dir, os.path.basename(script))
stream = open(outfile)
code = stream.read()
stream.close()
code = code.replace('## PATH ##', '''
import os, sys
sys.path.insert(0, %s)''' % repr(self.lib_dir))
stream = open(outfile, 'w')
stream.write(code)
stream.close()
class build_with_data(build_py):
"""Python < 2.4 doesn't support package_data_files, so add it manually."""
package_data_files = [
"zeroinstall/gtkui/desktop.ui",
]
def run(self):
old = log.set_threshold(log.ERROR) # Avoid "__init__.py not found" warning
# Copy .py files and build, as usual
build_py.run(self)
log.set_threshold(old)
# Copy data files
for data_file in self.package_data_files:
outfile = os.path.join(self.build_lib, data_file)
self.copy_file(data_file, outfile, preserve_mode=0)
executable = (os.stat(data_file).st_mode & 0o111) != 0
if executable:
os.chmod(outfile, os.stat(outfile).st_mode | 0o111)
class my_install(install):
def run(self):
install.run(self) # super.run()
if self.home:
self.run_command('adjust_scripts_for_home')
setup(name="zeroinstall-injector",
version=zeroinstall.version,
description="The Zero Install Injector (0launch)",
author="Thomas Leonard",
author_em | ail="zero-install-devel@lists.sourceforge.net",
url="http://0install.net",
scripts=['0install-python-fallback', '0alias'],
license='LGPL',
cmdclass={
'build_py': build_with_data,
'adjust_scripts_for_home': adjust_scripts_for_home,
'install': my_install,
},
long_descript | ion="""\
A running process is created by combining many different libraries (and other
components). In the Zero Install world, we have all versions of each library
available at all times. The problem then is how to choose which versions to
use.
The injector solves this problem by selecting components to meet a program's
requirements, according to a policy you give it. The injector finds out which
versions are available, and downloads and runs the ones you choose.""",
packages=["zeroinstall", "zeroinstall.support", "zeroinstall.injector", "zeroinstall.gtkui", "zeroinstall.cmd"])
|
chiaki-yu/PyYu | videos/models.py | Python | lgpl-3.0 | 3,730 | 0.00256 | # coding: utf-8
from ckeditor_uploader.fields import RichTextUploadingField
from django.contrib.auth.models import User
from django.db import models
class Column(models.Model):
name = models.CharField(max_length=64, unique=True, verbose_name=u'栏目名称')
ename = models.CharField(max_length=64, unique=True, verbose_name=u'栏目标示(唯一)')
description = models.TextField(max_length=512, verbose_name=u'栏目描述')
is_show = models.BooleanField(default=True, verbose_name=u'是否显示')
sort = models.IntegerField(default=0, verbose_name=u'排序')
created_time = models.DateTimeField(auto_now_add=True, verbose_name=u'创建时间')
def __unicode__(self):
return self.name
def get_absolute_url(self):
return '/{}'.format(self.ename)
class Meta:
verbose_name_plural = verbose_name = u'视频栏目'
ordering = ['-sort', '-created_time']
class Category(models.Model):
name = models.CharField(max_length=64, unique=True, verbose_name=u'类别名称')
ename = models.CharField(max_length=64, unique=True, verbose_name=u'类别标示(唯一)')
parent = models.ForeignKey('self', default=None, blank=True, null=True, verbose_name=u'父级类别')
column = models.ForeignKey(Column, verbose_name=u'所属栏目')
sort = models.IntegerField(default=0, verbose_name=u'排序')
is_show = models.BooleanField(default=True, verbose_name=u'是否显示')
created_time = models.DateTimeField(auto_now_add=True, verbose_name=u'创建时间')
def __unicode__(self):
if self.parent:
return '%s-->%s' % (self.parent.name, self.name)
else:
return self.name
def get_videos(self):
data_list = self.video_set.filter(is_show=True)[:12]
return data_list
def get_video_num(self):
video_num = self.video_set.filter(is_show=True).count()
return video_num
def get_absolute_url(self):
return '/{}/{}'.format(self.column.ename, self.ename)
class Meta:
verbose_name_plural = verbose_name = u'视频类别'
ordering = ['-sort', '-created_time']
class Video(models.Model):
title = models.CharField(max_length=64, verbose_name=u'标题')
summary = models.TextField(max_length=512, blank=True, verbose_name=u'简介')
url = models.CharField(max_length=256, blank=True, null=True, verbose_name=u'视频地址')
content = RichTextUploadingField(blank=True, null=True, verbose_name=u'内容')
column = models.ForeignKey(Column, verbose_name=u'所属栏目')
category = models.ForeignKey(Category, verbose_name=u'所属类别')
author = models.ForeignKey(User, verbose_name=u'作者')
featured_image = models.CharField(max_length=512, verbose_name=u'标题图')
sort = models.IntegerField(default=0, verbose_name=u'排序')
is_original = models.BooleanField(default=True, verbose_name=u'是否原创')
is_top = models.BooleanField(default=False, verbose_name=u'是否置顶')
is_show = models.Boole | anFi | eld(default=True, verbose_name=u'是否显示')
created_time = models.DateTimeField(auto_now_add=True, verbose_name=u'创建时间')
def __unicode__(self):
return self.title
def author_name(self):
blog_author_name = self.author.username
if self.author.first_name or self.author.last_name:
blog_author_name = ''.join((self.author.first_name, self.author.last_name))
return blog_author_name
def get_absolute_url(self):
return '/{}/v-{}'.format(self.column.ename, self.id)
class Meta:
verbose_name_plural = verbose_name = u'视频'
ordering = ['-is_top', '-sort','-created_time']
|
nuclear-wizard/moose | python/MooseDocs/common/read.py | Python | lgpl-2.1 | 1,466 | 0.00955 | #* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserve | d, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
"""Utiliti | es for reading files."""
import sys
import codecs
import os
def read(filename):
"""
Reads file using utf-8 encoding.
This function exists simply for convenience and not needing to remember to use "codecs" when
reading files.
Additionally, it handles the MOOSE headers automatically. The prism.js package syntax
highlighting messes up with the headers, so this makes them sane.
Inputs:
filename[str]: The filename to open.
"""
with codecs.open(filename, encoding='utf-8') as fid:
content = fid.read()
return content
def write(filename, content):
"""
Write utf-8 file.
"""
with codecs.open(filename, 'w', encoding='utf-8') as fid:
fid.write(content)
def get_language(filename):
"""
Auto detect the source code language, this is to allow for additions to be propagated to
all MooseDocs stuff that needs language.
Inputs:
filename[str]: The filename to examine.
"""
_, ext = os.path.splitext(filename)
if ext in ['.C', '.h', '.cpp', '.hpp']:
return 'cpp'
elif ext == '.py':
return 'python'
return 'text'
|
nickpack/django-project-template | docs/source/conf.py | Python | mit | 9,716 | 0.005867 | # -*- coding: utf-8 -*-
#
# {{project_name}} documentation build configuration file, created by
# sphinx-quickstart on Fri Sep 20 20:45:07 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration -----------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo',
'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'{{project_name}}'
copyright = u'2013, Nick Pack'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'agogo'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#ht | ml_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None |
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = '{{project_name}}doc'
# -- Options for LaTeX output --------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'project_name.tex', u'\\{\\{project\\_name\\}\\} Documentation',
u'Nick Pack', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'project_name', u'{{project_name}} Documentation',
[u'Nick Pack'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'project_name', u'{{project_name}} Documentation',
u'Nick Pack', 'project_name', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ---------------- |
heyglen/netobj | nettool/nettest.py | Python | mit | 14,175 | 0.001199 | # -*- coding: utf-8 -*-
import re
import ipaddress
from unidecode import unidecode
class NetTest(object):
wildcards = [ipaddress.IPv4Interface(
unicode('0.0.0.0/{}'.format(x))).network.hostmask.exploded for x in range(0, 33)]
netmasks = [ipaddress.IPv4Interface(
unicode('255.255.255.255/{}'.format(x)))
.network.network_address.exploded for x in range(0, 33)]
class validate(object):
""" Network validation """
@staticmethod
def _port(port, raise_exception=False):
if isinstance(port, basestring) and port.isdigit():
port = int(port)
elif type(port) is not int:
if raise_exception:
message = 'Invalid port type \'{}\'.'
raise TypeError(message.format(type(port)))
return False
valid = port > 0 and port < 65536
if not valid and raise_exception:
message = 'Invalid port number \'{}\'. Must be between {}-{}'
raise ValueError(message.format(port, 1, 65535))
return valid
@classmethod
def tcp_port(cls, port, raise_exception=False):
""" Layer 4 TCP port validation """
return cls._port(port, raise_exception=raise_exception)
@classmethod
def udp_port(cls, port, raise_exception=False):
""" Layer 4 UDP port validation """
return cls._port(port, raise_exception=raise_exception)
@staticmethod
def netmask(netmask, raise_exception=False):
""" Network subnet mask validation """
valid = netmask in NetTest.netmasks
if not valid and raise_exception:
raise ValueError('Invalid netmask {}'.format(netmask))
return valid
@staticmethod
def wildcard(wildcard, raise_exception=False):
""" Network wildcard mask validation """
valid = wildcard in NetTest.wildcards
if not valid and raise_exception:
raise ValueError('Invalid wildcard {}'.format(wildcard))
return valid
@staticmethod
def prefix(prefix, raise_exception=False):
""" CIDR prefix length validation """
valid = prefix in range(0, 33)
if not valid and raise_exception:
raise ValueError('Invalid prefix {}'.format(prefix))
return valid
@classmethod
def _get_network_object(cls, network):
cls.ip(network, raise_exception=True) or cls.ip(network, raise_exception=True)
network = NetTest.convert.string.cidr(network)
network = ipaddress.IPv4Interface(network).network
return network
@classmethod
def is_subnet(cls, subnet, supernet):
""" Network is a subnet of the given supernet """
subnet = cls._get_network_object(subnet)
supernet = cls._get_network_object(supernet)
return subnet.subnet_of(supernet)
@classmethod
def ip(cls, value, raise_exception=False):
""" IP address validation """
if cls.network(value):
value = NetTest.convert.string.cidr(value)
if value.endswith('/32'):
value = value.split('/')[0]
else:
return False
if not isinstance(value, basestring):
if raise_exception:
raise TypeError('Invalid type \'{}\''.format(type(value)))
return False
else:
value = unicode(value)
try:
ipaddress.IPv4Address(value)
except (ValueError, TypeError):
if raise_exception:
raise
return False
return True
@classmethod
def network(cls, value, raise_exception=False):
""" Network address validation """
if not isinstance(value, basestring):
if raise_exception:
raise TypeError('Invalid type \'{}\''.format(type(value)))
return False
terms = value.split()
if len(terms) is 2:
if cls.ip(terms[0]):
if cls.netmask(terms[1]):
terms[1] = NetTest.convert.netmask.prefix(terms[1])
elif cls.wildcard(terms[1]):
terms[1] = NetTest.convert.wildcard.prefix(terms[1])
terms[1] = unicode(terms[1])
value = u'/'.join(terms)
try:
value = unicode(value)
ipaddress.IPv4Interface(value)
except (ValueError, TypeError):
if raise_exception:
raise
return False
return True
@staticmethod
def _host_base_checks(value, raise_exception=False):
if not isinstance(value, basestring):
if raise_exception:
raise TypeError('Invalid type \'{}\''.format(type(value)))
return False
try:
if isinstance(value, unicode):
str(value)
else:
unicode(value, encoding='ascii')
except UnicodeDecodeError as e:
value = unicode(value, 'utf-8')
position = re.search(r'in position (\d+):', str(e)).group(1)
invalid_character = value[int(position)]
error_message = u"'{}' invalid character '{}'. Must use ASCII characters"
error_message = error_message.format(value, invalid_character)
if raise_exception:
raise ValueError(error_message)
return False
invalid_character_match = re.search(r'([^0-9a-z\-])', value.lower())
if invalid_character_match:
if raise_exception:
message = "'{}' invalid character \'{}\'."
message = message.format(value, inval | id_character_match.group(1))
raise ValueError(message)
return F | alse
return True
@staticmethod
def host(value, raise_exception=False):
if not isinstance(value, basestring):
if raise_exception:
raise TypeError('Invalid type \'{}\''.format(type(value)))
return False
if not NetTest.validate._host_base_checks(value, raise_exception=raise_exception):
return False
if len(value) < 1:
if raise_exception:
message = "'{}' host too short. Hostname be between 1-63 characters long"
message = message.format(value)
raise ValueError(message)
return False
if len(value) > 63:
if raise_exception:
message = "'{}' host too long. Hostname be between 1-63 characters long"
message = message.format(value)
raise ValueError(message)
return False
return True
@staticmethod
def hostname(value, raise_exception=False):
""" DNS hostname validation """
if not isinstance(value, basestring):
if raise_exception:
raise TypeError('Invalid type \'{}\''.format(type(value)))
return False
if len(value) > 253:
if raise_exception:
message = "'{}' is too long. FQDN must be less than 254 characters"
message = message.format(value)
raise ValueError(message)
return False
for domain_level in value.split('.'):
if not NetTest.validate.host(domain_level):
if raise_exception:
message = "Inalid domain level name '{}' in hostname '{}'."
message = message.format(domain_level, value)
raise |
release-engineering/product-definition-center | examples/find-duplicated-contacts.py | Python | mit | 1,148 | 0 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Find components defining more than one contact with the same role.
In older versions of PDC, it was possible to have multiple contacts with the
same role. When limits for cardinality of this relationship were introduced, we
need to find all components that would not pass the new rules.
This script does exactly that. It iterates over all global and release
components and prints details about any component with duplicate contacts.
"""
import pdc_client
client = pdc_client.PDCClient('prod')
def run(resource):
print 'Running tests for {}'.format(resource)
for component in pdc_client.get_paged(clie | nt[resource]._):
pk = component['id']
name = component['name']
release = component.get('release', {}).get('release_id', '[global]')
seen_roles = set()
for contact in compo | nent['contacts']:
if contact['contact_role'] in seen_roles:
print 'Duplicated roles for {}:{}/{}'.format(pk, release, name)
seen_roles.add(contact['contact_role'])
print ''
run('global-components')
run('release-components')
|
oser-cs/oser-website | tests/test_users/test_user.py | Python | gpl-3.0 | 2,826 | 0 | """Users tests."""
from django.contrib.auth import get_user_model, authenticate
from django.test import TestCase
from django.test import RequestFactory
from tests.utils import ModelTestCase
from users.factory import UserFactory
User = get_user_model()
class EmailAuthenticationTest(TestCase):
"""Tests to make sure a user can authenticate with email and password."""
def setUp(self):
self.creds = {
'email': 'john.doe@email.net',
'password': 'secretpassword',
}
def create_user(self, **kwargs):
return User.objects.create_user(**self.creds, **kwargs)
def test_authenticate_with_email_succeeds(self):
self.create_user()
logged_in = self.client.login(**self.creds)
self.assertTrue(logged_in)
def test_authenticate_with_username_fails(self):
self.creds['username'] = 'johndoe'
self.create_user()
self.creds.pop('email')
logged_in = self.client.login(**self.creds)
self.assertFalse(logged_in)
def test_authenticate_with_django_authenticate(self):
self.create_user()
request = RequestFactory().get('/test')
user = authenticate(request=request, **self.creds)
self.assertIsNotNone(user)
| class UserModelTest(ModelTestCase):
"""Test the user model."""
model = User
field_tests = {
'username': {
'unique': False,
'blank': True,
'null': True,
},
'email': {
'unique': True,
'blank': False,
' | null': False,
},
'profile_type': {
'verbose_name': 'type de profil',
'blank': False,
'null': True,
'choices': (
(User.PROFILE_STUDENT, 'Lycéen'),
(User.PROFILE_TUTOR, 'Tuteur'),
)
},
'phone_number': {
'blank': True,
'null': True,
}
}
model_tests = {
'verbose_name': 'utilisateur',
}
@classmethod
def setUpTestData(self):
self.obj = UserFactory.create()
def test_get_absolute_url(self):
self.client.force_login(self.obj)
url = self.obj.get_absolute_url()
response = self.client.get(url)
self.assertEqual(200, response.status_code)
def test_two_users_with_same_username_allowed(self):
UserFactory.create(username='foo')
UserFactory.create(username='foo')
def test_two_users_with_same_email_not_allowed(self):
same_email = 'same.email@example.net'
UserFactory.create(email=same_email)
_, created = User.objects.get_or_create(email=same_email)
self.assertFalse(created)
def test_visits_relationship(self):
self.assertEqual(self.obj.visit_set.all().count(), 0)
|
moumoutte/django-perf-rec | django_perf_rec/orm.py | Python | mit | 3,345 | 0.000897 | # -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import django
import patchy
from django.db.models.deletion import get_candidate_relations_to_delete
from django.db.models.query import QuerySet
from django.db.models.query_utils import Q
from django.db.models.sql.query import Query
def patch_ORM_to_be_deterministic():
"""
Django's ORM is non-deterministic with regards to the queries it outputs
for e.g. OR clauses. We need it to be deterministic so that we can compare
queries between runs, so we make a couple patches to its internals to do
this. Mostly this is done by adding sorted() in some places so we're not
affected by the vagaries of random dict iteration order.
There is no undo for this, but it doesn't make the ORM much slower or
anything bad.
"""
if patch_ORM_to_be_deterministic.have_patched:
return
patch_ORM_to_be_deterministic.have_patched = True
patch_QuerySet()
patch_Query()
patch_Q()
version = django.get_version()
if version.startswith('1.8') or version.startswith('1.9'):
patch_delete()
patch_ORM_to_be_deterministic.have_patched = False
def patch_QuerySet():
patchy.patch(QuerySet.annotate, """\
@@ -17,7 +17,7 @@
except (AttributeError, TypeError):
raise TypeError("Complex annot | ations require an alias")
annotations[arg.default_alias] = arg
- annotations.update(kwargs)
+ annotations.update(sorted(kwargs.items()))
clone = self._clone()
names = self._fields
""")
def patch_Query():
pat | chy.patch(Query.add_extra, """\
@@ -13,7 +13,7 @@
param_iter = iter(select_params)
else:
param_iter = iter([])
- for name, entry in select.items():
+ for name, entry in sorted(select.items()):
entry = force_text(entry)
entry_params = []
pos = entry.find("%s")
""")
def patch_Q():
# This one can't be done by patchy since __init__ is different in Python 3,
# maybe one day https://github.com/adamchainz/patchy/issues/31 will be
# fixed.
def __init__(self, *args, **kwargs):
super(Q, self).__init__(children=list(args) + sorted(kwargs.items()))
Q.__init__ = __init__
def patch_delete():
patchy.patch(get_candidate_relations_to_delete, """\
@@ -4,9 +4,12 @@ def get_candidate_relations_to_delete(opts):
candidate_models = {opts}
candidate_models = candidate_models.union(opts.concrete_model._meta.proxied_children)
# For each model, get all candidate fields.
- candidate_model_fields = set(chain.from_iterable(
- opts.get_fields(include_hidden=True) for opts in candidate_models
- ))
+ from collections import OrderedDict
+ candidates_dict = OrderedDict()
+ for opts in candidate_models:
+ for field in opts.get_fields(include_hidden=True):
+ candidates_dict[field.name] = field
+ candidate_model_fields = candidates_dict.values()
# The candidate relations are the ones that come from N-1 and 1-1 relations.
# N-N (i.e., many-to-many) relations aren't candidates for deletion.
return (
""")
|
klahnakoski/MoHg | setup.py | Python | mpl-2.0 | 2,261 | 0.008846 | # encoding: utf-8
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from distutils.util import convert_path
import os
from setuptools import setup
root = | os.path.abspath(os.path.dirname(__file__))
path = lambda *p: os.path.join(root, *p)
try:
long_desc = open(path('README.txt')).read()
except Exception:
long_desc = "<Missing README.txt>"
print("Missing README.txt")
def find_packages(where='.', lib_prefix='', exclude=()):
| """
SNAGGED FROM distribute-0.6.49-py2.7.egg/setuptools/__init__.py
"""
out = []
stack=[(convert_path(where), lib_prefix)]
while stack:
where,prefix = stack.pop(0)
for name in os.listdir(where):
fn = os.path.join(where,name)
if ('.' not in name and os.path.isdir(fn) and
os.path.isfile(os.path.join(fn,'__init__.py'))
):
out.append(prefix+name); stack.append((fn,prefix+name+'.'))
for pat in list(exclude)+['ez_setup', 'distribute_setup']:
from fnmatch import fnmatchcase
out = [item for item in out if not fnmatchcase(item,pat)]
return out
setup(
name='mo-hg',
version="2.18.18240",
description='Fast cache for Mozilla\'s Mercurial repository',
long_description=long_desc,
author='Kyle Lahnakoski',
author_email='kyle@lahnakoski.com',
url='https://github.com/klahnakoski/mo-hg',
license='MPL 2.0',
packages=find_packages(),
install_requires=["beautifulsoup4","mo-collections>=2.18.18240","mo-dots>=2.18.18240","mo-future>=2.18.18240","mo-kwargs>=2.18.18240","mo-logs>=2.18.18240","mo-math>=2.18.18240","mo-threads>=2.18.18240","mo-times>=2.18.18240","pyLibrary"],
include_package_data=True,
zip_safe=False,
classifiers=[ #https://pypi.python.org/pypi?%3Aaction=list_classifiers
"Development Status :: 4 - Beta",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
]
)
|
tienhm-p4u/shorten_url | endpoint/__init__.py | Python | gpl-3.0 | 1,393 | 0 | """Endpoint to handle URL shorten request"""
from hashids import Hashids
from flask import Blueprint, request, jsonify, current_app
import models
from lib.url_validate import is_valid_url
from lib import error
api = Blueprint(__name__, "url")
@api.route("/url", methods=["POST"])
def shorten():
"""Shorten a pure URL"""
url = request.values.get("url")
if not is_valid_url(url):
raise error.InvalidURL(url)
existed = models.URL.find_by_url(url)
if existed:
url_id = existed.id
else:
created = models.URL.crea | te({"url": url})
url_id = created.id
hashid = Hashids().encode(url_id)
short_url = "http://%s%s" % (current_app.config["SHORTEN_PREFIX"], hashid)
return jsonify({"shorten_url": short_url})
@api.route("/url/<hashid>", methods=["GET"])
def decode(hashid):
"""Translate from shorten URL t | o pure URL"""
item_id = Hashids().decode(hashid)
if not item_id:
raise error.NotFound(hashid)
saved_url = models.URL.find_by_id(item_id)
if not saved_url:
raise error.NotFound(hashid)
return jsonify({"url": saved_url.url})
@api.errorhandler(error.BaseError)
def handle_error(e: error.BaseError):
"""
Handle known error
:param e: error
:return: response
"""
# TODO: Add sentry to capture exception
return jsonify(e.to_dict()), error.to_status_code(e)
|
smashwilson/strac | strac/rootnode.py | Python | mit | 2,306 | 0.00477 | # Copyright (C) 2009 Ashley J. Wilson
# This software is licensed as described in the file COPYING in the root
# directory of this distribution.
from storenode import StoreNode
from bundlenode import BundleNode
from packagenode import PackageNode
class RootNode(StoreNode):
"""The virtual root of a Store repository.
This node has as its children the BundleNode and/or set of PackageNodes,
| that are specified by the trac.ini file as the interesting part of
the Store repository. RootNodes have only the special revision 'ONLY' and
no previous or next changeset.
"""
def __init__(self, repos, bundle_desc, package_desc):
"""Create a RootNode that will report the interesting subset of the Store
repository as its children.
bundle_desc is expected to be either an exact bundle name from Store, or a
comma-separ | ated list of the same. Similarly, package_desc is either one or
many package prefixes. Either parameter may be 'ALL', which will return every
such entity in the repository, or '', which will return nothing of that type.
"""
StoreNode.__init__(self, '/', 'ONLY', StoreNode.DIRECTORY, repos)
if not bundle_desc:
self.bundle_names = []
else:
self.bundle_names = [bn.strip() for bn in bundle_desc.split(',')]
if not package_desc:
self.package_prefixes = []
else:
self.package_prefixes = [pp.strip() for pp in package_desc.split(',')]
def get_entries(self):
"""Generator method for the PackageNodes and/or BundleNodes contained
within this view of the repository.
"""
for bundle_name in self.bundle_names:
if bundle_name == 'ALL':
for bnode in BundleNode.all(self.repos):
yield bnode
elif bundle_name != None:
yield BundleNode.with_name(self.repos, bundle_name)
for package_prefix in self.package_prefixes:
if package_prefix == 'ALL':
for pkg in PackageNode.all(self.repos):
yield pkg
elif package_prefix != None:
for pkg in PackageNode.named_like(self.repos, package_prefix + '%'):
yield pkg
|
tlake/advent-of-code | 2016/day07_internet_protocol_version_7/python/src/part2.py | Python | mit | 2,010 | 0.000995 | #!/usr/bin/env python
"""Docstring."""
import re
from functools import reduce
from collections import Counter
from common import get_input
class SSLTester:
"""."""
def __init__(self, input_list=[]):
"""Initialize."""
self.input_list = input_list
def find_aba(self, seq):
"""Return list: all 'aba's found in the string <seq>."""
i = 0
abas = []
while i < len(seq) - 2:
if (seq[i] == seq[i + 2] and
seq[i] != seq[i + 1]):
abas.append(seq[i:i + 3])
i += 1
return abas
def batch_find_aba(self, seqs):
"""Return list: all 'aba's found from all strings in <seqs>."""
return reduce(lambda x, y: x + y, [self.find_ | aba(x) for x in seqs], [])
def has_corresponding_bab(self, input_key, seqs):
"""Return bool: <key> exists for any st | ring in <seqs>."""
key = input_key[1::] + input_key[1]
return True in [key in x for x in seqs]
def ip_supports_ssl(self, ip_string):
"""Return bool: <ip_string> supports SSL.
True:
some string in <unbracketed> contains 'aba' pattern
AND some string in <bracketed> contains matching 'bab' pattern
"""
all_segments = re.split(r"\[(\w+)\]", ip_string)
bracketed = re.findall(r"\[(\w+)\]", ip_string)
unbracketed = [x for x in all_segments if x not in bracketed]
return True in [self.has_corresponding_bab(x, bracketed) for x in self.batch_find_aba(unbracketed)]
def count_supported_ips(self, input_list=None):
"""Return int: number of items in <input_list> which support SSL."""
input_list = input_list if input_list else self.input_list
return Counter([self.ip_supports_ssl(x) for x in input_list])[True]
if __name__ == "__main__":
tester = SSLTester(get_input())
print("Out of {} IPs, {} support SSL.".format(
len(tester.input_list),
tester.count_supported_ips()
))
|
Taywee/makerestapiclient | makerestapiclient/__init__.py | Python | mit | 313 | 0.003205 | #!/usr/bin/env python
# -*- coding: utf | -8 -*-
# Copyright © 2016 Taylor C. Richberger <taywee@gmx.com>
# This code is released under the license described in the LICENSE file
from __future__ imp | ort absolute_import, division, print_function, unicode_literals
from .makerestapiclient import make_rest_api_client
|
betogulliver/JupyterWorkflow | jupyterworkflow/data.py | Python | mit | 1,112 | 0.009892 | import os
from urllib import urlretrieve
import pandas as pd
FREEMONT_URL = "https://data.seattle.gov/api/views/65db-xm6k/rows.csv?accessType=DOWNLOAD"
def get_freemont_data(filename="Freemont.csv", url=FREEMONT_URL,
force_download=False) :
"""Download and cache the Freemont data
Parameters
----------
filename : string (optional)
location to save the data
url : string (optional)
web location of the data
force_download : bool (optional)
if True, force redownload of data
Returns
-------
data : pandas.DataFrame
The freemont bridge data
"""
if force_download or not os.path.exists(filename) :
urlretrieve(url, | filename)
data = pd.read_csv("Freemont.csv", index_col="Date")#, parse_dates=True) # XXX: sloooooow
try:
data.index = pd.to_datetime(data.index, format="%m/%d/%Y %I:%M:%S %p") # XXX: fast
except TypeError:
data.index = pd | .to_datetime(data.index)
data.columns = ['West', "East"]
data['Total'] = data["West"] + data["East"]
return data
|
blaze/hdfs3 | hdfs3/core.py | Python | bsd-3-clause | 34,058 | 0.000352 | # -*- coding: utf-8 -*-
"Main module defining filesystem and file classes"
from __future__ import absolute_import
import ctypes
import logging
import os
import posixpath
import re
import warnings
import operator
import functools
from collections import deque
from .compatibility import FileNotFoundError, ConnectionError, PY3
from .conf import conf
from .utils import (read_block, seek_delimiter, ensure_bytes, ensure_string,
ensure_trailing_slash, MyNone)
logger = logging.getLogger(__name__)
_lib = None
DEFAULT_READ_BUFFER_SIZE = 2 ** 16
DEFAULT_WRITE_BUFFER_SIZE = 2 ** 26
def _nbytes(buf):
buf = memoryview(buf)
if PY3:
return buf.nbytes
return buf.itemsize * functools.reduce(operator.mul, buf.shape)
class HDFileSystem(object):
""" Connection to an HDFS namenode
>>> hdfs = HDFileSystem(host='127.0.0.1', port=8020) # doctest: +SKIP
"""
_first_pid = None
def __init__(self, host=MyNone, port=MyNone, connect=True, autoconf=True,
pars=None, **kwargs):
"""
Parameters
----------
host: str; port: int
Overrides which take precedence over information in conf files and
other passed parameters
connect: bool (True)
Whether to automatically attempt to establish a connection to the
name-node.
autoconf: bool (True)
Whether to use the configuration found in the conf module as
the set of defaults
pars : {str: str}
any parameters for hadoop, that you can find in hdfs-site.xml,
https://hadoop.apache.org/docs/r2.6.0/hadoop-project-dist/hadoop-hdfs/hdfs-default.xml
This dict looks exactly like the one produced by conf - you can,
for example, remove any problematic entries.
kwargs: key/value
Further override parameters.
These are applied after the default conf and pars; the most ty | pical
things to set are:
host : str (localhost)
namenode hostname or IP address, in case of HA mode it is name
of the cluster that can be fo | und in "fs.defaultFS" option.
port : int (8020)
namenode RPC port usually 8020, in HA mode port mast be None
user, ticket_cache, token, effective_user : str
kerberos things
"""
self.conf = conf.copy() if autoconf else {}
if pars:
self.conf.update(pars)
self.conf.update(kwargs)
if host is not MyNone:
self.conf['host'] = host
if port is not MyNone:
self.conf['port'] = port
self._handle = None
if self.conf.get('ticket_cache') and self.conf.get('token'):
m = "It is not possible to use ticket_cache and token at same time"
raise RuntimeError(m)
if connect:
self.connect()
def __getstate__(self):
d = self.__dict__.copy()
del d['_handle']
logger.debug("Serialize with state: %s", d)
return d
def __setstate__(self, state):
self.__dict__.update(state)
self._handle = None
self.connect()
def connect(self):
""" Connect to the name node
This happens automatically at startup
"""
get_lib()
conf = self.conf.copy()
if self._handle:
return
if HDFileSystem._first_pid is None:
HDFileSystem._first_pid = os.getpid()
elif HDFileSystem._first_pid != os.getpid():
warnings.warn("Attempting to re-use hdfs3 in child process %d, "
"but it was initialized in parent process %d. "
"Beware that hdfs3 is not fork-safe and this may "
"lead to bugs or crashes."
% (os.getpid(), HDFileSystem._first_pid),
RuntimeWarning, stacklevel=2)
o = _lib.hdfsNewBuilder()
_lib.hdfsBuilderSetNameNode(o, ensure_bytes(conf.pop('host')))
port = conf.pop('port', None)
if port is not None:
_lib.hdfsBuilderSetNameNodePort(o, port)
user = conf.pop('user', None)
if user is not None:
_lib.hdfsBuilderSetUserName(o, ensure_bytes(user))
effective_user = ensure_bytes(conf.pop('effective_user', None))
ticket_cache = conf.pop('ticket_cache', None)
if ticket_cache is not None:
_lib.hdfsBuilderSetKerbTicketCachePath(o, ensure_bytes(ticket_cache))
token = conf.pop('token', None)
if token is not None:
_lib.hdfsBuilderSetToken(o, ensure_bytes(token))
for par, val in conf.items():
if not _lib.hdfsBuilderConfSetStr(o, ensure_bytes(par),
ensure_bytes(val)) == 0:
warnings.warn('Setting conf parameter %s failed' % par)
fs = _lib.hdfsBuilderConnect(o, effective_user)
_lib.hdfsFreeBuilder(o)
if fs:
logger.debug("Connect to handle %d", fs.contents.filesystem)
self._handle = fs
else:
msg = ensure_string(_lib.hdfsGetLastError()).split('\n')[0]
raise ConnectionError('Connection Failed: {}'.format(msg))
def delegate_token(self, user=None):
"""Generate delegate auth token.
Parameters
----------
user: bytes/str
User to pass to delegation (defaults to user supplied to instance);
this user is the only one that can renew the token.
"""
if user is None and self.user is None:
raise ValueError('Delegation requires a user')
user = user or self.user
out = _lib.hdfsGetDelegationToken(self._handle, ensure_bytes(user))
if out:
self.token = out
return out
else:
raise RuntimeError('Token delegation failed')
def renew_token(self, token=None):
"""
Renew delegation token
Parameters
----------
token: str or None
If None, uses the instance's token. It is an error to do that if
there is no token.
Returns
-------
New expiration time for the token
"""
token = token or self.token
if token is None:
raise ValueError('There is no token to renew')
return _lib.hdfsRenewDelegationToken(self._handle, ensure_bytes(token))
def cancel_token(self, token=None):
"""
Revoke delegation token
Parameters
----------
token: str or None
If None, uses the instance's token. It is an error to do that if
there is no token.
"""
token = token or self.token
if token is None:
raise ValueError('There is no token to cancel')
out = _lib.hdfsCancelDelegationToken(self._handle, ensure_bytes(token))
if out:
raise RuntimeError('Token cancel failed')
if token == self.token:
# now our token is invalid - this FS may not work
self.token = None
def disconnect(self):
""" Disconnect from name node """
if self._handle:
logger.debug("Disconnect from handle %d",
self._handle.contents.filesystem)
_lib.hdfsDisconnect(self._handle)
self._handle = None
def open(self, path, mode='rb', replication=0, buff=0, block_size=0):
""" Open a file for reading or writing
Parameters
----------
path: string
Path of file on HDFS
mode: string
One of 'rb', 'wb', or 'ab'
replication: int
Replication factor; if zero, use system default (only on write)
buf: int (=0)
Client buffer size (bytes); if 0, use default.
block_size: int
Size of data-node blocks if writing
"""
if not self._handle:
raise IOError("Filesystem not connected")
if block_size and mode != 'wb':
raise |
Ultimaker/Cura | plugins/GCodeGzReader/GCodeGzReader.py | Python | lgpl-3.0 | 1,298 | 0.010786 | # Copyright (c) 2020 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
import gzip
from UM.Mesh.MeshReader import MeshReader #The class we're extending/implementing.
from UM.MimeTypeDatabase import MimeTypeDatabase, MimeType #To add the .gcode.gz files to the MIME type database.
from UM.PluginRegistry import PluginRegistry
class GCodeGzReader(MeshReader):
"""A file reader that reads gzipped g-code.
If you're zipping g-code, you might as well use gzip!
"""
def __init__(self) -> None:
super().__init__()
MimeTypeDatabase.addMimeType(
MimeType(
name = "application/x-cura-compressed-gcode-file",
comment = "Cura Compressed G-cod | e File",
suffixes = ["gcode.gz"]
)
)
self._supported_extensions = [".gcode.gz"]
def _read(self, file_name):
with open(file_name, "rb") as file:
file_data = file.read()
uncompressed_gcode = gzip.decompress(file_data).decode("utf-8")
PluginRegistry.getInstance().getPluginObject("GCodeReader").preReadFromStream(uncompressed_gcode)
result = PluginRegistry | .getInstance().getPluginObject("GCodeReader").readFromStream(uncompressed_gcode, file_name)
return result
|
ikki407/stacking | examples/Santander/scripts/ikki_NN_1.py | Python | mit | 44,881 | 0.016757 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#Genaral packages
import os, sys
import pandas as pd
import numpy as np
sys.path.append(os.getcwd())
#os.chdir('/Users/IkkiTanaka/Documents/kaggle/Santander/')
#各種PATH
from stacking.base import FOLDER_NAME, PATH, INPUT_PATH, OUTPUT_PATH, ORIGINAL_TRAIN_FORMAT, SUBMIT_FORMAT
np.random.seed(407)
#keras
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD
from keras.utils import np_utils
from keras.layers.advanced_activations import LeakyReLU, PReLU
from keras.layers.normalization import BatchNormalization
from keras.regularizers import l1, l2, l1l2, activity_l2
#base_ver2 utils
from stacking.base import load_data, save_pred_as_submit_format, create_cv_id
#classifiers
from stacking.base import BaseModel, XGBClassifier, KerasClassifier
########### First stage ###########
# FEATURE LISTS in Stage 1.
FEATURE_LIST_stage1 = {
'train':('data/output/features/ikki_features_train_NN_ver3.csv',
'data/output/features/ikki_one_hot_encoder_train_ver3.csv',
),#target is in 'train'
'test':('data/output/features/ikki_features_test_NN_ver3.csv',
'data/output/features/ikki_one_hot_encoder_test_ver3.csv',
),
}
X,y,test = load_data(flist=FEATURE_LIST_stage1, drop_duplicates=True)
assert((False in X.columns == test.columns) == False)
nn_input_dim_NN = X.shape[1]
del X, y, test
# Models in Stage 1
PARAMS_V1 = {
'batch_size':256,
'nb_epoch':35,
'verbose':1,
'callbacks':[],
'validation_split':0.,
'validation_data':None,
'shuffle':True,
'show_accuracy':True,
'class_weight':None,#{0:0.0396, 1:0.9604},
'sample_weight':None,
'normalize':True,
'categorize_y':True
}
class ModelV1(BaseModel):
def build_model(self):
model = Sequential()
model.add(Dropout(0.2, input_shape=(nn_input_dim_NN,)))
model.add(Dense(input_dim=nn_input_dim_NN, output_dim=120, init='uniform'))
model.add(LeakyReLU(alpha=.00001))
model.add(BatchNormalization())
model.add(Dropout(0.5))
model.add(Dense(input_dim=120,output_dim=280, init='uniform'))
model.add(LeakyReLU(alpha=.00001))
model.add(BatchNormalization())
model.add(Dropout(0.5))
model.add(Dense(input_dim=280,output_dim=100, init='uniform', activation='relu'))
model.add(BatchNormalization())
model.add(Dropout(0.4))
model.add(Dense(input_dim=100,output_dim=2, init='uniform', activation='softmax'))
#model.add(Activation('softmax'))
sgd = SGD(lr=0.015, decay=1e-6, mome | ntum=0.9, nesterov=True)
model.compile(optimizer=sgd, loss='binary_crossentropy',class_mode='binary')
return KerasClassifier(nn=model,**self.params)
PARAMS_V2 = {
'batch_size':512,
'nb_epoch':70,
'verbose':1,
'callbacks':[],
'validation_split':0.,
'validation_data':None,
'shuffle':Tru | e,
'show_accuracy':True,
'class_weight':None,#{0:0.0396, 1:0.9604},
'sample_weight':None,
'normalize':True,
'categorize_y':True
}
class ModelV2(BaseModel):
def build_model(self):
model = Sequential()
model.add(Dropout(0.1, input_shape=(nn_input_dim_NN,)))
model.add(Dense(input_dim=nn_input_dim_NN, output_dim=112, init='he_normal'))
model.add(LeakyReLU(alpha=.00001))
model.add(BatchNormalization())
model.add(Dropout(0.5))
model.add(Dense(input_dim=112,output_dim=128, init='he_normal'))
model.add(LeakyReLU(alpha=.00001))
model.add(BatchNormalization())
model.add(Dropout(0.5))
model.add(Dense(input_dim=128,output_dim=68, init='he_normal'))
model.add(LeakyReLU(alpha=.00003))
model.add(BatchNormalization())
model.add(Dropout(0.3))
model.add(Dense(input_dim=68,output_dim=2, init='he_normal'))
model.add(Activation('softmax'))
sgd = SGD(lr=0.01, decay=1e-10, momentum=0.99, nesterov=True)
model.compile(optimizer=sgd, loss='binary_crossentropy',class_mode='binary')
return KerasClassifier(nn=model,**self.params)
PARAMS_V3 = {
'batch_size':128,
'nb_epoch':72,
'verbose':1,
'callbacks':[],
'validation_split':0.,
'validation_data':None,
'shuffle':True,
'show_accuracy':True,
'class_weight':None,#{0:0.0396, 1:0.9604},
'sample_weight':None,
'normalize':True,
'categorize_y':True
}
class ModelV3(BaseModel):
def build_model(self):
model = Sequential()
model.add(Dropout(0.1, input_shape=(nn_input_dim_NN,)))
model.add(Dense(input_dim=nn_input_dim_NN, output_dim=310, init='he_normal'))
model.add(LeakyReLU(alpha=.001))
model.add(BatchNormalization())
model.add(Dropout(0.6))
model.add(Dense(input_dim=310,output_dim=252, init='he_normal'))
model.add(PReLU(init='zero'))
model.add(BatchNormalization())
model.add(Dropout(0.5))
model.add(Dense(input_dim=252,output_dim=128, init='he_normal'))
model.add(LeakyReLU(alpha=.001))
model.add(BatchNormalization())
model.add(Dropout(0.4))
model.add(Dense(input_dim=128,output_dim=2, init='he_normal', activation='softmax'))
#model.add(Activation('softmax'))
sgd = SGD(lr=0.02, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(optimizer=sgd, loss='binary_crossentropy',class_mode='binary')
return KerasClassifier(nn=model,**self.params)
PARAMS_V4 = {
'batch_size':128,
'nb_epoch':56,
'verbose':1,
'callbacks':[],
'validation_split':0.,
'validation_data':None,
'shuffle':True,
'show_accuracy':True,
'class_weight':None,#{0:0.0396, 1:0.9604},
'sample_weight':None,
'normalize':True,
'categorize_y':True
}
class ModelV4(BaseModel):
def build_model(self):
model = Sequential()
model.add(Dropout(0.1, input_shape=(nn_input_dim_NN,)))
model.add(Dense(input_dim=nn_input_dim_NN, output_dim=62, init='he_normal'))
model.add(LeakyReLU(alpha=.001))
model.add(Dropout(0.3))
model.add(Dense(input_dim=62,output_dim=158, init='he_normal'))
model.add(LeakyReLU(alpha=.001))
model.add(Dropout(0.25))
model.add(Dense(input_dim=158,output_dim=20, init='he_normal'))
model.add(PReLU(init='zero'))
model.add(Dropout(0.2))
model.add(Dense(input_dim=20,output_dim=2, init='he_normal', activation='softmax'))
#model.add(Activation('softmax'))
sgd = SGD(lr=0.05, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(optimizer=sgd, loss='binary_crossentropy',class_mode='binary')
return KerasClassifier(nn=model,**self.params)
PARAMS_V5 = {
'batch_size':216,
'nb_epoch':90,
'verbose':1,
'callbacks':[],
'validation_split':0.,
'validation_data':None,
'shuffle':True,
'show_accuracy':True,
'class_weight':None,#{0:0.0396, 1:0.9604},
'sample_weight':None,
'normalize':True,
'categorize_y':True
}
class ModelV5(BaseModel):
def build_model(self):
model = Sequential()
|
jjas0nn/solvem | tensorflow/lib/python2.7/site-packages/numpy/core/setup.py | Python | mit | 41,471 | 0.001495 | from __future__ import division, print_function
import os
import sys
import pickle
import copy
import warnings
from os.path import join
from numpy.distutils import log
from distutils.dep_util import newer
from distutils.sysconfig import get_config_var
from numpy._build_utils.apple_accelerate import (
uses_accelerate_framework, get_sgemv_fix
)
from numpy.compat import npy_load_module
from setup_common import *
# Set to True to enable relaxed strides checking. This (mostly) means
# that `strides[dim]` is ignored if `shape[dim] == 1` when setting flags.
NPY_RELAXED_STRIDES_CHECKING = (os.environ.get('NPY_RELAXED_STRIDES_CHECKING', "1") != "0")
# XXX: ugly, we use a class to avoid calling twice some expensive functions in
# config.h/numpyconfig.h. I don't see a better way because distutils force
# config.h generation inside an Extension class, and as such sharing
# configuration informations between extensions is not easy.
# Using a pickled-based memoize does not work because config_cmd is an instance
# method, which cPickle does not like.
#
# Use pickle in all cases, as cPickle is gone in python3 and the difference
# in time is only in build. -- Charles Harris, 2013-03-30
class CallOnceOnly(object):
def __init__(self):
self._check_types = None
self._check_ieee_macros = None
self._check_complex = None
def check_types(self, *a, **kw):
if self._check_types is None:
out = check_types(*a, **kw)
self._check_types = pickle.dumps(out)
else:
out = copy.deepcopy(pickle.loads(self._check_types))
return out
def check_ieee_macros(self, *a, **kw):
if self._check_ieee_macros is None:
out = check_ieee_macros(*a, **kw)
self._check_ieee_macros = pickle.dumps(out)
else:
out = copy.deepcopy(pickle.loads(self._check_ieee_macros))
return out
def check_complex(self, *a, **kw):
if self._check_complex is None:
out = check_complex(*a, **kw)
self._check_complex = pickle.dumps(out)
else:
out = copy.deepcopy(pickle.loads(self._check_complex))
return out
PYTHON_HAS_UNICODE_WIDE = True
def pythonlib_dir():
"""return path where libpython* is."""
if sys.platform == 'win32':
return os.path.join(sys.prefix, "libs")
else:
return get_config_var('LIBDIR')
def is_npy_no_signal():
"""Return True if the NPY_NO_SIGNAL symbol must be defined in configuration
header."""
return sys.platform == 'win32'
def is_npy_no_smp():
"""Return True if the NPY_NO_SMP symbol must be defined in public
header (when SMP support cannot be reliably enabled)."""
# Perhaps a fancier check is in order here.
# so that threads are only enabled if there
# are actually multiple CPUS? -- but
# threaded code can be nice even on a single
# CPU so that long-calculating code doesn't
# block.
return 'NPY_NOSMP' in os.environ
def win32_checks(deflist):
from numpy.distutils.misc_util import get_build_architecture
a = get_build_architecture()
# Distutils hack on AMD64 on windows
print('BUILD_ARCHITECTURE: %r, os.name=%r, sys.platform=%r' %
(a, os.name, sys.platform))
if a == 'AMD64':
deflist.append('DISTUTILS_USE_SDK')
# O | n win32, force long d | ouble format string to be 'g', not
# 'Lg', since the MS runtime does not support long double whose
# size is > sizeof(double)
if a == "Intel" or a == "AMD64":
deflist.append('FORCE_NO_LONG_DOUBLE_FORMATTING')
def check_math_capabilities(config, moredefs, mathlibs):
def check_func(func_name):
return config.check_func(func_name, libraries=mathlibs,
decl=True, call=True)
def check_funcs_once(funcs_name):
decl = dict([(f, True) for f in funcs_name])
st = config.check_funcs_once(funcs_name, libraries=mathlibs,
decl=decl, call=decl)
if st:
moredefs.extend([(fname2def(f), 1) for f in funcs_name])
return st
def check_funcs(funcs_name):
# Use check_funcs_once first, and if it does not work, test func per
# func. Return success only if all the functions are available
if not check_funcs_once(funcs_name):
# Global check failed, check func per func
for f in funcs_name:
if check_func(f):
moredefs.append((fname2def(f), 1))
return 0
else:
return 1
#use_msvc = config.check_decl("_MSC_VER")
if not check_funcs_once(MANDATORY_FUNCS):
raise SystemError("One of the required function to build numpy is not"
" available (the list is %s)." % str(MANDATORY_FUNCS))
# Standard functions which may not be available and for which we have a
# replacement implementation. Note that some of these are C99 functions.
# XXX: hack to circumvent cpp pollution from python: python put its
# config.h in the public namespace, so we have a clash for the common
# functions we test. We remove every function tested by python's
# autoconf, hoping their own test are correct
for f in OPTIONAL_STDFUNCS_MAYBE:
if config.check_decl(fname2def(f),
headers=["Python.h", "math.h"]):
OPTIONAL_STDFUNCS.remove(f)
check_funcs(OPTIONAL_STDFUNCS)
for h in OPTIONAL_HEADERS:
if config.check_func("", decl=False, call=False, headers=[h]):
moredefs.append((fname2def(h).replace(".", "_"), 1))
for tup in OPTIONAL_INTRINSICS:
headers = None
if len(tup) == 2:
f, args, m = tup[0], tup[1], fname2def(tup[0])
elif len(tup) == 3:
f, args, headers, m = tup[0], tup[1], [tup[2]], fname2def(tup[0])
else:
f, args, headers, m = tup[0], tup[1], [tup[2]], fname2def(tup[3])
if config.check_func(f, decl=False, call=True, call_args=args,
headers=headers):
moredefs.append((m, 1))
for dec, fn in OPTIONAL_FUNCTION_ATTRIBUTES:
if config.check_gcc_function_attribute(dec, fn):
moredefs.append((fname2def(fn), 1))
for fn in OPTIONAL_VARIABLE_ATTRIBUTES:
if config.check_gcc_variable_attribute(fn):
m = fn.replace("(", "_").replace(")", "_")
moredefs.append((fname2def(m), 1))
# C99 functions: float and long double versions
check_funcs(C99_FUNCS_SINGLE)
check_funcs(C99_FUNCS_EXTENDED)
def check_complex(config, mathlibs):
priv = []
pub = []
try:
if os.uname()[0] == "Interix":
warnings.warn("Disabling broken complex support. See #1365", stacklevel=2)
return priv, pub
except:
# os.uname not available on all platforms. blanket except ugly but safe
pass
# Check for complex support
st = config.check_header('complex.h')
if st:
priv.append(('HAVE_COMPLEX_H', 1))
pub.append(('NPY_USE_C99_COMPLEX', 1))
for t in C99_COMPLEX_TYPES:
st = config.check_type(t, headers=["complex.h"])
if st:
pub.append(('NPY_HAVE_%s' % type2def(t), 1))
def check_prec(prec):
flist = [f + prec for f in C99_COMPLEX_FUNCS]
decl = dict([(f, True) for f in flist])
if not config.check_funcs_once(flist, call=decl, decl=decl,
libraries=mathlibs):
for f in flist:
if config.check_func(f, call=True, decl=True,
libraries=mathlibs):
priv.append((fname2def(f), 1))
else:
priv.extend([(fname2def(f), 1) for f in flist])
check_prec('')
check_prec('f')
check_prec('l')
return priv, pub
def check_ieee_macros(config):
priv = []
pub = []
macros = []
def _add_decl(f):
priv.append(fname2def("decl_%s" % f))
pub.append('NPY_%s' % fname2def("decl_%s" % f))
# |
icereval/osf.io | api/scopes/views.py | Python | apache-2.0 | 2,283 | 0.000876 | from rest_framework import generics, permissions as drf_permissions
from rest_framework.exceptions import NotFound
from framework.auth.oauth_scopes import CoreScopes, public_scopes
from api.base.filters import ListFilterMixin
from api.base import permissions as base_permissions
from api.scopes.serializers import ScopeSerializer, Scope
from api.scopes.permissions import IsPublicScope
from api.base.views import JSONAPIBaseView
from api.base.pagination import MaxSizePagination
class ScopeDetail(JSONAPIBaseView, generics.RetrieveAPIView):
"""Private endpoint for gathering scope informat | ion. Do not expect this to be stable.
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
IsPublicScope,
)
required_read_scopes = [CoreScopes.SCOPES_READ]
required_write_scopes = [CoreScopes.NULL]
serializer_class = ScopeSerializer
view_category = 'scopes'
view_name = 'scope-detail'
lookup_url_kwarg = 'scope_id'
# overrides | RetrieveAPIView
def get_object(self):
id = self.kwargs[self.lookup_url_kwarg]
scope_item = public_scopes.get(id, None)
if scope_item:
scope = Scope(id=id, scope=scope_item)
self.check_object_permissions(self.request, scope)
return scope
else:
raise NotFound
class ScopeList(JSONAPIBaseView, generics.ListAPIView, ListFilterMixin):
"""Private endpoint for gathering scope information. Do not expect this to be stable.
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
IsPublicScope,
)
required_read_scopes = [CoreScopes.SCOPES_READ]
required_write_scopes = [CoreScopes.NULL]
pagination_class = MaxSizePagination
serializer_class = ScopeSerializer
view_category = 'scopes'
view_name = 'scope-list'
ordering = ('id', ) # default ordering
def get_default_queryset(self):
scopes = []
for key, value in public_scopes.iteritems():
if value.is_public:
scopes.append(Scope(id=key, scope=value))
return scopes
def get_queryset(self):
return self.get_queryset_from_request()
|
apache/incubator-systemml | src/main/python/systemds/operator/algorithm/builtin/xgboostPredictClassification.py | Python | apache-2.0 | 1,910 | 0.002618 | # --------------------------------------- | ----------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except i | n compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
# Autogenerated By : src/main/python/generator/generator.py
# Autogenerated From : scripts/builtin/xgboostPredictClassification.dml
from typing import Dict, Iterable
from systemds.operator import OperationNode, Matrix, Frame, List, MultiReturn, Scalar
from systemds.script_building.dag import OutputType
from systemds.utils.consts import VALID_INPUT_TYPES
def xgboostPredictClassification(X: Matrix,
M: Matrix,
learning_rate: float):
"""
:param X: Matrix of feature vectors we want to predict (X_test)
:param M: The model created at xgboost
:param learning_rate: the learning rate used in the model
:return: 'OperationNode' containing the predictions of the samples using the given xgboost model. (y_prediction)
"""
params_dict = {'X': X, 'M': M, 'learning_rate': learning_rate}
return Matrix(X.sds_context,
'xgboostPredictClassification',
named_input_nodes=params_dict)
|
Ezhil-Language-Foundation/open-tamil | solthiruthi/data_parser.py | Python | mit | 3,224 | 0.00062 | #!/usr/bin/python
# (C) 2015-2106 - Muthiah Annamalai
# parse data files for Tamil proper nouns
from __future__ import print_function
import codecs
import json
import pprint
import re
import sys
import tamil
class WordList:
# data structure for a WordList containing only one category
def __init__(self, cat):
self.category = cat
self.words = []
def add(self, word):
self.words.append(word.strip())
class DataParser:
def __init__(self, files):
self.categories = []
self.files = files
def process(self):
for filename in self.files:
self.parse_data(filename)
for cat in self.categories:
cat.words = set(cat.words) # unique elements only
return
@staticmethod
def run(args):
# print(u">> starting data processing for files <<")
# print(u"|".join(args))
obj = DataParser(args)
obj.process()
return obj
def parse_data(self, filename):
cat = None
# print(u">> file %s"%filename)
with codecs.open(filename, "r", "utf-8") as fp:
for line in fp.readlines():
if line.startswith(">>"):
if cat:
self.categories.append(cat)
cat = None
newcat = line.replace(">>", "").strip()
cat = WordList(cat=newcat)
elif line.startswith("#"):
continue
elif cat:
word = u"".join(re.split("\s+", line)[1:])
if len(word) > 0:
cat.add(word)
else:
# odd looking line - we'll keep it anyway
cat.add(line.strip())
if cat:
self.categories.append(cat)
return
def analysis(self):
r = {"catlen": 0, "total": 0, "dict": {}}
r["catlen"] = len(self.categories)
word_count = []
for ca | t in self.categories:
cat_wlen = len(cat.words)
r["dict"][cat.category] = list(cat.words)
word_count.append(cat_wlen)
r["total"] = sum(word_count)
return r
def __unicode__(self):
"print the | statistics of the wordlist etc"
rep = u"# categories = %d" % len(self.categories)
word_count = []
for cat in self.categories:
cat_wlen = len(cat.words)
rep += u" %s -> %d\n" % (cat.category, cat_wlen)
word_count.append(cat_wlen)
rep += "Total words -> %d \n" % sum(word_count)
return rep
if __name__ == u"__main__":
if len(sys.argv) < 2:
print(u"usage: python data_parser.py <filename1> ... <filenamen>")
print(
u" this command shows categories of words and their frequencies in document(s);"
)
sys.exit(-1)
obj = DataParser.run(sys.argv[1:])
r = obj.analysis()
# if you wanted to save the results to JSON
with codecs.open("ref.json", "w", "utf-8") as fp:
# pprint.pprint( json.dumps(r), fp )
fp.write(json.dumps(r))
print(u"cat %d / total words %d" % (r["catlen"], r["total"]))
|
locatw/Autonek | TweLiteGateway/twe_lite_gateway/gateway.py | Python | mit | 4,912 | 0.003054 | import twe_lite
import json
import queue
import sys
import time
import traceback
import yaml
from threading import Thread
from datetime import datetime
from pytz import timezone
from iothub_client import IoTHubClient, IoTHubClientError, IoTHubTransportProvider, IoTHubClientResult, IoTHubClientStatus
from iothub_client import IoTHubMessage, IoTHubMessageDispositionResult, IoTHubError, DeviceMethodReturnValue
SECRETS_FILE_NAME = "secrets.yml"
DEFAULT_PORT_NAME = '/dev/ttyUSB0'
TIME_ZONE = timezone('Asia/Tokyo')
def print_usage():
print('Usage: python gateway.py DEVICE_ID [SERIAL_PORT_NAME]')
print("if SERIAL_PORT_NAME is omitted, port name is '/dev/ttyUSB0' by default")
secrets = None
with open(SECRETS_FILE_NAME, 'r') as f:
secrets = yaml.load(f)
device_id = None
port_name | = DEFAULT_PORT_NAME
if len(sys.argv) == 2:
device_id = sys.argv[1]
elif len(sys.argv) == 3:
device_id = sys.argv[1]
port_name = sys.argv[2]
else:
print_usage()
sys.exit(1)
print("Device ID: " + device_id)
print("Port name: " + port_name)
continues = True
class MonoStickTread(Thread): |
def __init__(self, mono_stick, parser, queue):
super().__init__()
self.__mono_stick = mono_stick
self.__parser = parser
self.__queue = queue
def run(self):
print("Start reading data from monostick.")
while continues:
try:
data = mono_stick.read_line()
if len(data) == 0:
continue
print('Data: {0}'.format(data))
received_message = self.__parser.parse(data)
self.__queue.put(received_message, timeout=0.1)
except queue.Full as _:
print('Message queue is full')
except twe_lite.InvalidMessageFormatError as e:
print(traceback.format_exc())
class SendMessageThread(Thread):
def __init__(self, iothub_client, queue):
super().__init__()
self.__iothub_client = iothub_client
self.__queue = queue
def run(self):
print("Start sending data to Azure IoT Hub.")
while continues:
try:
try:
status_notification_message = self.__queue.get(timeout=0.1)
except queue.Empty as _:
continue
self.__queue.task_done()
print(str(status_notification_message))
if not status_notification_message.di1.changed:
continue
if status_notification_message.di1.state == twe_lite.DigitalPinState.HIGH:
continue
self.__send_message()
except IoTHubError as iothub_error:
print("Unexpected error %s from IoTHub" % iothub_error)
time.sleep(1)
except Exception as e:
print(traceback.format_exc())
def __send_message(self):
detected_json = self.__make_detected_json()
sending_message = IoTHubMessage(bytearray(detected_json, 'utf8'))
self.__iothub_client.send_event_async(sending_message, self.__event_confirmation_callback, None)
while True:
status = self.__iothub_client.get_send_status()
if status == IoTHubClientStatus.IDLE:
break
def __event_confirmation_callback(self, message, result, _):
print("Confirmation received for message with result = %s" % (result))
def __make_detected_json(self):
now = datetime.now(TIME_ZONE)
return json.dumps({
'MessageType': 'DeviceEvent',
'DeviceId': device_id,
'EventType': 'HumanDetected',
'EventTime': now.isoformat()
})
def iothub_client_init():
connection_string = secrets['iothub']['connection_string']
client = IoTHubClient(connection_string, IoTHubTransportProvider.MQTT)
client.set_option("messageTimeout", 10000)
client.set_option("logtrace", 0)
client.set_option("product_info", "TweLiteGateway")
return client
with twe_lite.MonoStick(port_name, 0.1, 0.1) as mono_stick:
client = iothub_client_init()
parser = twe_lite.Parser()
message_queue = queue.Queue()
threads = []
try:
threads.append(MonoStickTread(mono_stick, parser, message_queue))
threads.append(SendMessageThread(client, message_queue))
for thread in threads:
thread.start()
while continues:
print("Quit if 'q is entered.")
c = input()
if c == 'q':
continues = False
break
finally:
for thread in threads:
thread.join()
sys.exit(0) |
oluwex/Akede | Articles/admin.py | Python | gpl-3.0 | 424 | 0.007075 | fr | om django.contrib import admin
from .models import Article
# Register your models here.
class ArticleModelAdmin(admin.ModelAdmin):
class Meta:
model = Article
list_display = ['name', 'timestamp', 'updated_time','cate | gory']
list_display_links = ['name']
search_fields = ['name', 'article']
list_filter = ['updated_time', 'author', 'category']
admin.site.register(Article, ArticleModelAdmin) |
kinetifex/maya-kinetifex | scripts/kinetifex/poses.py | Python | gpl-2.0 | 28,451 | 0.024358 | """
Module for building and working with Poses.
"""
import thread, random, re, pprint, sets
import pickle
import maya.cmds as mc
import pymel.core as pm
from pymel.core import mel, Path
from pymel.core.uitypes import OptionMenu
#from pymelX import register_runtime
from impress import register
from config import RUNTIME_SUITE, PREF_POSES
from pickle import TRUE
global selectedPose
def __getValueStr(self):
value = self.getValue()
return str(value)
OptionMenu.getValueStr = __getValueStr
class StoredAttr( object ):
def __init__(self, attr, value):
self.__name__ = self._attr = attr
self._value = value
def __repr__(self):
return "%s('%s', %s, %s)" % ( self.__class__.__name__, self.__name__, self._value )
@property
def attr(self):
return pm.Attribute( self._attr )
@property
def value(self):
return self._value
@property
def node(self):
return pm.PyNode( self._attr.split('.')[0] )
class Pose( object ):
"""
Class for storing, manipulating and apply poses.
Use 'capture' or 'captureSwap' methods for storing a pose on selected objects.
"""
_transList = ( 'translateX', 'translateY', 'translateZ',
'rotateX', 'rotateY', 'rotateZ',
'scaleX', 'scaleY', 'scaleZ',
'footRoll'
)
_mirrAttrs = { u'XY': (u'translateZ', u'rotateX', u'rotateY'),
u'YZ': (u'translateX', u'rotateY', u'rotateZ'),
u'XZ': (u'translateY', u'rotateX', u'rotateZ')
}
leftSuffix = '_l'
rightSuffix = '_r'
leftPrefix = 'L'
rightPrefix = 'R'
def __init__( self, name=None, stored = [] ):
self._stored = stored
if name:
self.__name__ = name
else:
self.__name__ = 'pose'+str( int( random.uniform( 100, 999 ) ) )
def __repr__(self):
return "%s('%s', %s)" % ( self.__class__.__name__, self.__name__, str(self._stored) )
def _getAttr(self, obj):
l = []
for attr in map( pm.Attribute, obj.listAttr( keyable=True ) ):
l = attr.get()
return l
def capture(self):
"""
stores attributes of selected controls into list _stored resulting dict looks like:
{ object:{attribute1: value, attribute2: value}, object2...
[ ( attribute1, value, node ), ( attribute1, value, node ) ]
"""
pm.select(pm.ls(selection=True), replace=True)
self._stored = []
for obj in pm.ls(selection=True):
for attr in map( pm.Attribute, obj.listAttr(keyable=True, unlocked=True) ):
self._stored.append( ( str(attr), attr.get() ) )
# -- push all non-transform attributes to the top --
for t in self._stored:
attr, v = t
if attr.split('.')[-1] not in self._transList:
self._stored.insert( 0, self._stored.pop( self._stored.index(t) ) )
def mirror( self, defaultAxis='YZ' ):
prefix = [self.leftPrefix, self.rightPrefix]
suffix = [self.leftSuffix, self.rightSuffix]
pm.select(pm.ls(selection=True), replace=True)
stored = {}
for obj in pm.ls(selection=True, type="transform"):
if obj.hasAttr("mirrorAxis"):
axis = ('XY','YZ','XZ')[ obj.mirrorAxis.get() ]
else:
axis = defaultAxis
for attr in map( pm.Attribute, obj.listAttr( keyable=True, unlocked=True ) ):
if attr.split('.')[-1] in self._transList:
split = attr.split(':')
if len( split ) > 1:
a = split[1]
else:
a = attr
has_side = False
if a.startswith( prefix[0] ) or a.startswith( prefix[1] ):
if a.startswith( prefix[1] ):
prefix.reverse()
v = pm.Attribute( attr.replace( prefix[0], prefix[1] ) ).get()
has_side = TRUE
elif obj.endswith( suffix[0] ) or obj.endswith( suffix[1] ):
if obj.endswith( suffix[1] ):
suffix.reverse()
v = pm.Attribute( attr.replace( suffix[0], suffix[1] ) ).get()
has_side = TRUE
else:
v = attr.get()
if obj.type() == 'joint':
if has_side:
stored[attr] = v
elif attr.split('.')[-1] in self._mirrAttrs['XY']:
stored[attr] = -v
else:
stored[attr] = v
elif attr.split('.')[-1] in self._mirrAttrs[axis]:
stored[attr] = -v
else:
stored[attr] = v
# -- apply --
| for (attr, v) in stored.iteritems():
pm.setAttr( attr, v )
def apply( self, alterNamespace=False, namespace='' ):
"apply the stored pose to the current scene"
for t in self._stored:
attr, v = t
if alterNamespace:
split = attr.split(':')
if len( split ) > 1:
attr = split[1]
attr = '%s:%s' % (namespace, attr)
try:
pm.Attribute(attr).set(v)
except pm.MayaAttributeError, msg:
mel.warning( str(msg) )
def applyToSelected( self ):
"apply the stored pose to the current scene"
sel_list = pm.ls(sl=1, type="transform")
stored_list = [ StoredAttr(*s) for s in self._stored]
stored_nodes = []
for stored in stored_list:
if stored.node not in stored_nodes:
stored_nodes.append(stored.node)
mapped_nodes = dict( zip( stored_nodes, sel_list ) )
for stored in stored_list:
if stored.node in mapped_nodes.keys():
attr = '.'.join( [ mapped_nodes[stored.node].nodeName(), stored.attr.split('.')[1] ] )
try:
pm.Attribute(attr).set(stored.value)
except pm.MayaAttributeError, msg:
mel.warning( str(msg) )
class PoseManagerWindow( object ):
"""Instanciate PoseManagerWindow objects with name as the instance name."""
def __init__(self, name, title='Pose Rack' ):
self.__name__ = name
self._title = title
self.__instance = __name__ + '.' + self.__name__
# -- Default Pose Group. Adding whitespace to distinguish and widen dropdown.
self._default = 'Default' + ''.join([' ' for x in range(90)])
self._poseGroups = { self._default : {} }
self._loadPrefs()
self.namespace = 'Default'
def _threadSavePrefs( self, pretty=False ):
pose_file = open( PREF_POSES, 'wb')
pickle.dump( self._poseGroups, pose_file )
pose_file.close()
def _savePrefs( self ):
thread.start_new_thread(self._threadSavePrefs, (False,) )
#self._threadSavePrefs()
def _loadPrefs( self ):
if Path(PREF_POSES).isfile():
pose_file = open( PREF_POSES, 'rb')
self._poseGroups = pickle.load( pose_file )
pose_file.close()
if not self._poseGroups.has_key( self._default ):
self._poseGroups.update( { self._default : {} } )
def _sortedGroupList( self ):
"""simple reodering of list to set 'Default' to the top"""
li = self._poseGroups.keys()
li.sort()
li.insert( 0, li.pop( li.index( self._default ) ) )
return li
def _updateGroupList( self ):
try:
pm.deleteUI( self.groupOM )
pm.deleteUI( self.groupMenu, menuItem=True )
except:
pass
pm.setParent( self.optionMenuCol )
self.groupOM = OptionMenu( changeCommand=lambda *args: self._updatePoseList() )
for i in self._sortedGroupList():
| |
conan-io/conan | conans/test/unittests/model/manifest_test.py | Python | mit | 2,125 | 0.002824 | import os
from conans.model.manifest import FileTreeManifest
from conans.test.utils.test_files import temp_folder
from conans.util.files import load, md5, save
class TestManifest:
def test_tree_manifest(self):
tmp_dir = temp_folder()
files = {"one.ext": "aalakjshdlkjahsdlkjahsdljkhsadljkhasljkdhlkjashd",
"path/to/two.txt": "asdas13123",
"two.txt": "asdasdasdasdasdasd",
"folder/damn.pyc": "binarythings",
"folder/damn.pyo": "binarythings2",
"pythonfile.pyc": "binarythings3"}
for filename, content in files.items():
save(os.path.join(tmp_dir, filename), content)
manifest = FileTreeManifest.create(tmp_dir)
manifest.save(tmp_dir)
readed_manifest = FileTreeManifest.load(tmp_dir)
assert readed_manifest.time == manifest.time
assert readed_manifest == manifest
# Not included the pycs or pyo
assert set(manifest.file_sums.keys()) == {"one.ext", "path/to/two.txt", "two.txt"}
for filepath, md5readed in manifest.file_sums.items():
content = files[filepath]
assert md5(content) == md5readed
def test_already_pyc_in_manifest(self):
tmp_dir = temp_folder()
save(os.path.join(tmp_dir, "man.txt"), "1478122267\nconanfile.pyc: "
"2bcac725a0e6843ef351f4d18cf867ec\n"
"conanfile.py: 2bcac725a0e6843ef351f4d18cf867ec\n"
"conanfile.pyo: 2bcac725a0e6843ef351f4d18cf867ec\n")
read_manifest = FileTreeManifest.loads(load(os.path.join(tmp_dir, "man.txt")))
# Not included the pycs or pyo
assert set(read_manifest.file_sums.keys()) == {"conanfile.py"}
def test_special_chars(self):
tmp_dir = temp_folder()
| save(os.path.join(tmp_dir, "conanmanifest.txt"), "1478122267\nsome: file.py: 123\n")
read_manifest = FileTreeManifest.load(tmp_dir)
assert read_manifest.file_sums["some: file.py"] == "123" | |
huaili-cid/metagen_cli | metagen/api/upload.py | Python | mit | 4,807 | 0.001872 |
import os
import sys
import threading
import boto3
import requests
import json
import logging
from boto3.s3.transfer import TransferConfig, ProgressCallbackInvoker
from s3transfer.utils import OSUtils
from botocore.exceptions import ClientError
from boto3.exceptions import RetriesExceededError, S3UploadFailedError
from metagen import utils
from metagen.helpers.upload import CosmosIdTransferManager
from metagen.helpers.exceptions import UploadException, AuthenticationFailed
logger = logging.getLogger(__name__)
class CosmosIdS3Client(object):
_bfresource = '/api/metagenid/v1/files/upload_bfile'
_sfresource = '/api/metagenid/v1/files/upload_sfile'
def __init__(self, **kwargs):
self.base_url = kwargs['base_url']
self.api_key = kwargs['api_key']
self.header = {'X-Api-Key': self.api_key}
self.burl = self.base_url + self._bfresource
self.surl = self.base_url + self._sfresource
def create_multipart_upload(self, *args, **kwargs):
da | ta = dict(kwargs)
mp_up = requests.put(self.burl, json=data, headers=self.header)
resp = dict()
if mp_up.status_code == requests.codes.ok:
resp = mp_up.json()
return resp
def abort_multipart_upload(self, *args, **kwargs):
data = dict(kwargs)
ab_mp = requests.delete(self.burl, json=data, headers=self.header)
return ab_mp.json()
def upload_part(self, * | args, **kwargs):
data = dict(kwargs)
resp = None
upload_body = data.pop('Body')
url_ = requests.get(self.burl, json=data, headers=self.header)
if url_.status_code == requests.codes.ok:
resp = requests.put(url_.json(), upload_body)
return dict(resp.headers)
def put_object(self, *args, **kwargs):
data = dict(kwargs)
upload_body = data.pop('Body')
resp = None
url_ = requests.get(self.surl, json=data, headers=self.header)
if url_.status_code == requests.codes.ok:
resp = requests.put(url_.json(), upload_body)
return dict(resp.headers)
def complete_multipart_upload(self, *args, **kwargs):
data = dict(kwargs)
cmp_up = requests.post(self.burl, json=data, headers=self.header)
return cmp_up.json()
def upload_file(**kwargs):
"""Upload manager."""
filename = kwargs.pop('file')
client = CosmosIdS3Client(**kwargs)
config = TransferConfig()
osutil = OSUtils()
transfer_manager = CosmosIdTransferManager(client, config=config, osutil=osutil)
subscribers = None
_, file_name = os.path.split(filename)
try:
response = requests.get(client.base_url + '/api/metagenid/v1/files/upload_init',
json=dict(file_name=file_name),
headers=client.header
)
if response.status_code == 403:
raise AuthenticationFailed('Authentication Failed. Wrong API Key.')
if response.status_code == requests.codes.ok:
sources = response.json()
future = transfer_manager.upload(
filename, sources['upload_source'], sources['upload_key'], None, subscribers)
s3path, _ = os.path.split(sources['upload_key'])
data = dict(path=s3path, size=str(os.stat(filename)[6]), name=file_name, parent='')
else:
logger.error("File upload inititalisation Failed. Response code: {}".format(response.status_code))
raise UploadException("File upload inititalisation Failed. Response code: {}".format(response.status_code))
future.result()
create_response = requests.post(client.base_url + '/api/metagenid/v1/files',
json=data,
headers=client.header
)
if create_response.status_code == 201:
return create_response.json()
else:
raise UploadException('Failed to upload file: {}'.format(file_name))
'''
If a client error was raised, add the backwards compatibility layer
that raises a S3UploadFailedError. These specific errors were only
ever thrown for upload_parts but now can be thrown for any related
client error.
'''
except ClientError as e:
raise S3UploadFailedError(
"Failed to upload %s to %s: %s" % (
filename, '/'.join([sources['upload_source'], sources['upload_key']]), e))
return False
except AuthenticationFailed as ae:
logger.error('{}'.format(ae))
return False
except UploadException as ue:
logger.error("File Upload Failed. Error: {}".format(ue))
return False
|
adusca/treeherder | tests/log_parser/test_step_parser.py | Python | mpl-2.0 | 570 | 0 | from datetime import datetime
from treeherder.log_parser.parsers import StepParser
def test_date_with_milliseconds():
"""Handle buildbot dates that have a decimal on the seconds."""
parser = StepParser()
dt = parser.parsetime('2015-01-20 16:42:33.352 | 621')
assert dt == datetime(2015, 1, 20, 16, 42, 33, 352621)
def test_date_without_milliseconds():
"""Handle buildbot dates that DON'T have a decimal on the seconds."""
parser = StepParser()
dt = parser | .parsetime('2015-01-20 16:42:33')
assert dt == datetime(2015, 1, 20, 16, 42, 33)
|
praekelt/casepro | casepro/contacts/management/commands/pullcontacts.py | Python | bsd-3-clause | 1,909 | 0.003667 | from dash.orgs.models import Org
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
class Command(BaseCommand):
help = "Pulls all contacts, groups and fields from the backend for the specified org"
def add_arguments(self, parser):
parser.add_argument("org_id", type=int, metavar="ORG", help="The org to pull contacts for")
def handle(self, *args, **options):
org_id = int(options["org_id"])
try:
org = Org.objects.get(pk=org_id)
except Org.DoesNotExist:
raise CommandError("No such org with id %d" % org_id)
prompt = """You have requested to pull all contacts, groups and fields for org '%s' (#%d). Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """ % (
org.name,
org.pk,
)
if input(prompt).lower() != "yes":
self.stdout.write("Operation cancelled")
return
def progress_callback(num_synced):
self.stdout.write(" > Synced %d contacts..." | % num_synced)
backend = org.get_backend()
created, updated, deleted, ignored = backend.pull_fields(org)
self.stdout.write(
"Finished field pull (%d created, %d updated, %d deleted, %d ignored)"
% (created, updated, deleted, ignored)
)
created, updated, deleted, | ignored = backend.pull_groups(org)
self.stdout.write(
"Finished group pull (%d created, %d updated, %d deleted, %d ignored)"
% (created, updated, deleted, ignored)
)
created, updated, deleted, ignored = backend.pull_contacts(org, None, timezone.now(), progress_callback)
self.stdout.write(
"Finished contact pull (%d created, %d updated, %d deleted, %d ignored)"
% (created, updated, deleted, ignored)
)
|
rossnomann/playlog | tests/src/tests/common/test_json.py | Python | mit | 167 | 0 | impo | rt pytest
from playlog.lib.json import Encoder
def test_encoder():
encoder = Encoder()
with pytest.raises(TypeError):
encoder | .default(object())
|
Andrwe/py3status | py3status/modules/xrandr_rotate.py | Python | bsd-3-clause | 5,237 | 0 | # -*- coding: utf-8 -*-
"""
Control screen rotation.
Configuration parameters:
cache_timeout: how often to refresh this module.
(default 10)
format: a string that formats the output, can include placeholders.
(default '{icon}')
hide_if_disconnected: a boolean flag to hide icon when `screen` is
disconnected.
It has no effect unless `screen` option is also configured.
(default False)
horizontal_icon: a character to represent horizontal rotation.
(default 'H')
horizontal_rotation: a horizontal rotation for xrandr to use.
Available options: 'normal' or 'inverted'.
(default 'normal')
screen: display output name to rotate, as detected by xrandr.
If not provided, all enabled screens will be rotated.
(default None)
vertical_icon: a character to represent vertical rotation.
(default 'V')
vertical_rotation: a vertical rotation for xrandr to use.
Available options: 'left' or 'right'.
(default 'left')
Format placeholders:
{icon} a rotation icon, specified by `horizontal_icon` or `ver | tical_icon`.
{screen} a screen name, specified by `screen` option or detected
automatically if only one screen is connected, otherwise 'ALL'.
Color options:
c | olor_degraded: Screen is disconnected
color_good: Displayed rotation is active
@author Maxim Baz (https://github.com/maximbaz)
@license BSD
SAMPLE OUTPUT
{'color': '#00FF00', 'full_text': u'H'}
vertical
{'full_text': u'V'}
"""
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 10
format = "{icon}"
hide_if_disconnected = False
horizontal_icon = "H"
horizontal_rotation = "normal"
screen = None
vertical_icon = "V"
vertical_rotation = "left"
def post_config_hook(self):
self.displayed = ""
self.scrolling = False
def _get_active_outputs(self):
data = self.py3.command_output(["xrandr"]).splitlines()
connected_outputs = [x.split() for x in data if " connected" in x]
active_outputs = []
for output in connected_outputs:
for x in output[2:]:
if "x" in x and "+" in x:
active_outputs.append(output[0])
break
elif "(" in x:
break
return active_outputs
def _get_current_rotation_icon(self, all_outputs):
data = self.py3.command_output(["xrandr"]).splitlines()
output = self.screen or all_outputs[0]
output_line = "".join([x for x in data if x.startswith(output)])
for x in output_line.split():
if "normal" in x or "inverted" in x:
return self.horizontal_icon
elif "left" in x or "right" in x:
return self.vertical_icon
def _apply(self):
if self.displayed == self.horizontal_icon:
rotation = self.horizontal_rotation
else:
rotation = self.vertical_rotation
cmd = "xrandr"
outputs = [self.screen] if self.screen else self._get_active_outputs()
for output in outputs:
cmd += " --output %s --rotate %s" % (output, rotation)
self.py3.command_run(cmd)
def _switch_selection(self):
if self.displayed == self.horizontal_icon:
self.displayed = self.vertical_icon
else:
self.displayed = self.horizontal_icon
def xrandr_rotate(self):
all_outputs = self._get_active_outputs()
selected_screen_disconnected = (
self.screen is not None and self.screen not in all_outputs
)
if selected_screen_disconnected and self.hide_if_disconnected:
self.displayed = ""
full_text = ""
else:
if not self.scrolling:
self.displayed = self._get_current_rotation_icon(all_outputs)
if self.screen or len(all_outputs) == 1:
screen = self.screen or all_outputs[0]
else:
screen = "ALL"
full_text = self.py3.safe_format(
self.format, dict(icon=self.displayed or "?", screen=screen)
)
response = {
"cached_until": self.py3.time_in(self.cache_timeout),
"full_text": full_text,
}
# coloration
if selected_screen_disconnected and not self.hide_if_disconnected:
response["color"] = self.py3.COLOR_DEGRADED
elif self.displayed == self._get_current_rotation_icon(all_outputs):
response["color"] = self.py3.COLOR_GOOD
self.scrolling = False
return response
def on_click(self, event):
"""
Click events
- left click & scroll up/down: switch between rotations
- right click: apply selected rotation
"""
button = event["button"]
if button in [1, 4, 5]:
self.scrolling = True
self._switch_selection()
elif button == 3:
self._apply()
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
|
jat255/hyperspy | hyperspy/tests/io/test_hdf5.py | Python | gpl-3.0 | 31,740 | 0.001323 | # -*- coding: utf-8 -*-
# Copyright 2007-2022 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import logging
from pathlib import Path
import sys
import time
import dask.array as da
import h5py
import numpy as np
import pytest
from hyperspy.io import load
from hyperspy.axes import DataAxis, UniformDataAxis, FunctionalDataAxis, AxesManager
from hyperspy.signal import BaseSignal
from hyperspy._signals.signal1d import Signal1D
from hyperspy._signals.signal2d import Signal2D
from hyperspy.datasets.example_signals import EDS_TEM_Spectrum
from hyperspy.decorators import lazifyTestClass
from hyperspy.exceptions import VisibleDeprecationWarning
from hyperspy.misc.test_utils import assert_deep_almost_equal
from hyperspy.misc.test_utils import sanitize_dict as san_dict
from hyperspy.roi import Point2DROI
from hyperspy.utils import markers
from hyperspy.io_plugins.hspy import get_signal_chunks
my_path = Path(__file__).parent
try:
# zarr (because of numcodecs) is only supported on x86_64 machines
import zarr
zspy_marker = pytest.mark.parametrize("file", ["test.hspy", "test.zspy"])
except ImportError:
zspy_marker = pytest.mark.parametrize("file", ["test.hspy"])
data = np.array([4066., 3996., 3932., 3923., 5602., 5288., 7234., 7809.,
4710., 5015., 4366., 4524., 4832., 5474., 5718., 5034.,
4651., 4613., 4637., 4429., 4217.])
example1_original_metadata = {
'BEAMDIAM -nm': 100.0,
'BEAMKV -kV': 120.0,
'CHOFFSET': -168.0,
'COLLANGLE-mR': 3.4,
'CONVANGLE-mR': 1.5,
'DATATYPE': 'XY',
'DATE': '01-OCT-1991',
'DWELLTIME-ms': 100.0,
'ELSDET': 'SERIAL',
'EMISSION -uA': 5.5,
'FORMAT': 'EMSA/MAS Spectral Data File',
'MAGCAM': 100.0,
'NCOLUMNS': 1.0,
'NPOINTS': 20.0,
'OFFSET': 520.13,
'OPERMODE': 'IMAG',
'OWNER': 'EMSA/MAS TASK FORCE',
'PROBECUR -nA': 12.345,
'SIGNALTYPE': 'ELS',
'THICKNESS-nm': 50.0,
'TIME': '12:00',
'TITLE': 'NIO EELS OK SHELL',
'VERSION': '1.0',
'XLABEL': 'Energy',
'XPERCHAN': 3.1,
'XUNITS': 'eV',
'YLABEL': 'Counts',
'YUNITS': 'Intensity'}
class Example1:
"Used as a base class for the TestExample classes below"
def test_data(self):
assert (
[4066.0,
3996.0,
3932.0,
3923.0,
5602.0,
5288.0,
7234.0,
7809.0,
4710.0,
5015.0,
4366.0,
4524.0,
4832.0,
5474.0,
5718.0,
5034.0,
4651.0,
4613.0,
4637.0,
4429.0,
4217.0] == self.s.data.tolist())
def test_original_metadata(self):
assert (
example1_original_metadata ==
self.s.original_metadata.as_dictionary())
class TestExample1_12(Example1):
def setup_method(self, method):
self.s = load(my_path / "hdf5_files" / "example1_v1.2.hdf5")
def test_date(self):
assert (
self.s.metadata.General.date == "1991-10-01")
def test_time(self):
assert self.s.metadata.General.time == "12:00:00"
class TestExample1_10(Example1):
def setup_method(self, method):
self.s = load(my_path / "hdf5_files" / "example1_v1.0.hdf5")
class TestExample1_11(Example1):
def setup_method(self, method):
self.s = load(my_path / "hdf5_files" / "example1_v1.1.hdf5")
class TestLoadingNewSavedMetadata:
def setup_method(self, method):
with pytest.warns(VisibleDeprecationWarning):
self.s = load(my_path / "hdf5_files" / "with_lists_etc.hdf5")
def test_signal_inside(self):
np.testing.assert_array_almost_equal(
self.s.data,
self.s.metadata.Signal.Noise_properties.variance.data
)
def test_empty_things(self):
assert self.s.metadata.test.empty_list == []
assert self.s.metadata.test.empty_tuple == ()
def test_simple_things(self):
assert self.s.metadata.test.list == [42]
assert self.s.metadata.test.tuple == (1, 2)
def test_inside_things(self):
assert (
self.s.metadata.test.list_inside_list == [
42, 137, [
0, 1]])
assert self.s.metadata.test.list_inside_tuple == (137, [42, 0])
assert (
self.s.metadata.test.tuple_inside_tuple == (137, (123, 44)))
assert (
self.s.metadata.test.tuple_inside_list == [
137, (123, 44)])
@pytest.mark.xfail(
reason="dill is not guaranteed to load across Python versions")
def test_binary_string(self):
import dill
# apparently pickle is not "full" and marshal is not
# backwards-compatible
f = dill.loads(self.s.metadata.test.binary_string)
assert f(3.5) == 4.5
class TestSavingMetadataContainers:
def setup_method(self, method):
self.s = BaseSignal([0.1])
@zspy_marker
def test_save_unicode(self, tmp_path, file):
s = self.s
s.metadata.set_item('test', ['a', 'b', '\u6f22\u5b57'])
fname = tmp_path / file
s.save(fname)
l = load(fname)
assert isinstance(l.metadata.test[0], str)
assert isinstance(l.metadata.test[1], str)
assert isinstance(l.metadata.test[2], str)
assert l.metadata.test[2] == '\u6f22\u5b57'
@zspy_marker
def test_save_long_list(self, tmp_path, file):
s = self.s
s.metadata.set_item('long_list', list(range(10000)))
start = time.ti | me()
fname = tmp_path / file
s.save(fname)
end = time.time()
assert end - start < 1.0 # It should finish in less that 1 s.
@zspy_marker
def test_numpy_only_inner_lists(self, tmp_path, file):
s = self.s
s.metadata.set_item('test', [[1., 2], ('3', 4)])
fname = tmp_path / file
s.save(fname)
l = load(fname)
assert isinsta | nce(l.metadata.test, list)
assert isinstance(l.metadata.test[0], list)
assert isinstance(l.metadata.test[1], tuple)
@pytest.mark.xfail(sys.platform == 'win32',
reason="randomly fails in win32")
@zspy_marker
def test_numpy_general_type(self, tmp_path, file):
s = self.s
s.metadata.set_item('test', np.array([[1., 2], ['3', 4]]))
fname = tmp_path / file
s.save(fname)
l = load(fname)
np.testing.assert_array_equal(l.metadata.test, s.metadata.test)
@pytest.mark.xfail(sys.platform == 'win32',
reason="randomly fails in win32")
@zspy_marker
def test_list_general_type(self, tmp_path, file):
s = self.s
s.metadata.set_item('test', [[1., 2], ['3', 4]])
fname = tmp_path / file
s.save(fname)
l = load(fname)
assert isinstance(l.metadata.test[0][0], float)
assert isinstance(l.metadata.test[0][1], float)
assert isinstance(l.metadata.test[1][0], str)
assert isinstance(l.metadata.test[1][1], str)
@pytest.mark.xfail(sys.platform == 'win32',
reason="randomly fails in win32")
@zspy_marker
def test_general_type_not_working(self, tmp_path, file):
s = self.s
s.metadata.set_item('test', (BaseSignal([1]), 0.1, 'test_string'))
fname = tmp_path / file
s.save(fname)
l = load(fname)
assert isinstance(l.metadata.test, tuple)
asser |
tomstokes/redeem | software/Adafruit_I2C.py | Python | gpl-3.0 | 4,135 | 0.014994 | #!/usr/bin/python
# NOTE!! This code is from the Adafruit Learning System articles on the Raspberry Pi (http://learn.adafruit.com/)
# The original version of the code can be found in the Adafruit Raspberry Pi Python Library on Github at https://github.com/adafruit/Adafruit-Raspberry-Pi-Python-Code
import smbus
# ===========================================================================
# Adafruit_I2C Base Class
# ===========================================================================
class Adafruit_I2C :
def __init__(self, address, busnum = 3, debug=False):
self.address = address
self.bus = smbus.SMBus(busnum)
self.debug = debug
def reverseByteOrder(self, data):
"Reverses the byte order of an int (16-bit) or long (32-bit) value"
# Courtesy Vishal Sapre
dstr = hex(data)[2:].replace('L','')
byteCount = len(dstr[::2])
val = 0
for i, n in enumerate(range(byteCount)):
d = data & 0xFF
val |= (d << (8 * (byteCount - i - 1)))
data >>= 8
return val
def write8(self, reg, value):
"Writes an 8-bit value to the specified register/address"
try:
self.bus.write_byte_data(self.address, reg, value)
if (self.debug):
print "I2C: Wrote 0x%02X to register 0x%02X" % (value, reg)
except IOError, err:
print "Error accessing 0x%02X: Check your I2C address" % self.address
return -1
| def writeList(self, reg, list):
"Writes an array of bytes using I2C format"
try:
if (self.debug):
print "I2C: Writing list to register 0x%02X:" % reg
| print list
self.bus.write_i2c_block_data(self.address, reg, list)
except IOError, err:
print "Error accessing 0x%02X: Check your I2C address" % self.address
return -1
def readList(self, reg, length):
"Read a list of bytes from the I2C device"
results = []
try:
results = self.bus.read_i2c_block_data(self.address, reg, length)
if (self.debug):
print "I2C: Device 0x%02X returned the following from reg 0x%02X" % (self.address, reg)
print results
return results
except IOError, err:
print "Error accessing 09x%02X: Check your I2C address" % self.address
return -1
def readU8(self, reg):
"Read an unsigned byte from the I2C device"
try:
result = self.bus.read_byte_data(self.address, reg)
if (self.debug):
print "I2C: Device 0x%02X returned 0x%02X from reg 0x%02X" % (self.address, result & 0xFF, reg)
return result
except IOError, err:
print "Error accessing 0x%02X: Check your I2C address" % self.address
return -1
def readS8(self, reg):
"Reads a signed byte from the I2C device"
try:
result = self.bus.read_byte_data(self.address, reg)
if (self.debug):
print "I2C: Device 0x%02X returned 0x%02X from reg 0x%02X" % (self.address, result & 0xFF, reg)
if (result > 127):
return result - 256
else:
return result
except IOError, err:
print "Error accessing 0x%02X: Check your I2C address" % self.address
return -1
def readU16(self, reg):
"Reads an unsigned 16-bit value from the I2C device"
try:
hibyte = self.bus.read_byte_data(self.address, reg)
result = (hibyte << 8) + self.bus.read_byte_data(self.address, reg+1)
if (self.debug):
print "I2C: Device 0x%02X returned 0x%04X from reg 0x%02X" % (self.address, result & 0xFFFF, reg)
return result
except IOError, err:
print "Error accessing 0x%02X: Check your I2C address" % self.address
return -1
def readS16(self, reg):
"Reads a signed 16-bit value from the I2C device"
try:
hibyte = self.bus.read_byte_data(self.address, reg)
if (hibyte > 127):
hibyte -= 256
result = (hibyte << 8) + self.bus.read_byte_data(self.address, reg+1)
if (self.debug):
print "I2C: Device 0x%02X returned 0x%04X from reg 0x%02X" % (self.address, result & 0xFFFF, reg)
return result
except IOError, err:
print "Error accessing 0x%02X: Check your I2C address" % self.address
return -1
|
N-Parsons/exercism-python | exercises/bank-account/bank_account_test.py | Python | mit | 4,295 | 0 | import sys
import threading
import time
import unittest
from bank_account import BankAccount
class BankAccountTest(unittest.TestCase):
def test_newly_opened_account_has_zero_balance(self):
account = BankAccount()
account.open()
self.assertEqual(account.get_balance(), 0)
def test_can_deposit_money(self):
account = BankAccount()
account.open()
account.deposit(100)
self.assertEqual(account.get_balance(), 100)
def test_can_deposit_money_sequentially(self):
account = BankAccount()
account.open()
account.deposit(100)
account.deposit(50)
self.assertEqual(account.get_balance(), 150)
def test_can_withdraw_money(self):
account = BankAccount()
account.open()
account.deposit(100)
account.withdraw(50)
self.assertEqual(account.get_balance(), 50)
def test_can_withdraw_money_sequentially(self):
account = BankAccount()
account.open()
account.deposit(100)
account.withdraw(20)
account.wi | thdraw(80)
self.assertEqual(account.get_balance(), 0)
def test_checking_balance_of_closed_account_throws_error(self):
account = BankAccount()
account.open()
account.close()
with self.assertRaisesWi | thMessage(ValueError):
account.get_balance()
def test_deposit_into_closed_account(self):
account = BankAccount()
account.open()
account.close()
with self.assertRaisesWithMessage(ValueError):
account.deposit(50)
def test_withdraw_from_closed_account(self):
account = BankAccount()
account.open()
account.close()
with self.assertRaisesWithMessage(ValueError):
account.withdraw(50)
def test_close_already_closed_account(self):
account = BankAccount()
with self.assertRaisesWithMessage(ValueError):
account.close()
def test_open_already_opened_account(self):
account = BankAccount()
account.open()
with self.assertRaisesWithMessage(ValueError):
account.open()
def test_reopened_account_does_not_retain_balance(self):
account = BankAccount()
account.open()
account.deposit(50)
account.close()
account.open()
self.assertEqual(account.get_balance(), 0)
def test_cannot_withdraw_more_than_deposited(self):
account = BankAccount()
account.open()
account.deposit(25)
with self.assertRaises(ValueError):
account.withdraw(50)
def test_cannot_withdraw_negative(self):
account = BankAccount()
account.open()
account.deposit(100)
with self.assertRaisesWithMessage(ValueError):
account.withdraw(-50)
def test_cannot_deposit_negative(self):
account = BankAccount()
account.open()
with self.assertRaisesWithMessage(ValueError):
account.deposit(-50)
def test_can_handle_concurrent_transactions(self):
account = BankAccount()
account.open()
account.deposit(1000)
self.adjust_balance_concurrently(account)
self.assertEqual(account.get_balance(), 1000)
def adjust_balance_concurrently(self, account):
def transact():
account.deposit(5)
time.sleep(0.001)
account.withdraw(5)
# Greatly improve the chance of an operation being interrupted
# by thread switch, thus testing synchronization effectively
try:
sys.setswitchinterval(1e-12)
except AttributeError:
# For Python 2 compatibility
sys.setcheckinterval(1)
threads = [threading.Thread(target=transact) for _ in range(1000)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
# Utility functions
def setUp(self):
try:
self.assertRaisesRegex
except AttributeError:
self.assertRaisesRegex = self.assertRaisesRegexp
def assertRaisesWithMessage(self, exception):
return self.assertRaisesRegex(exception, r".+")
if __name__ == '__main__':
unittest.main()
|
mysteryjeans/doorsale-demo | doorstep/urls.py | Python | gpl-2.0 | 355 | 0 | from django.conf.urls | import include, url
# Doorstep apps urls
urlpatterns = [
url(r'^', include('doorstep.catalog.urls')),
url(r'^accounts/', include('doorstep.accounts.urls')),
url(r'^sales/', includ | e('doorstep.sales.urls')),
url(r'^payments/', include('doorstep.payments.urls')),
url(r'^pages/', include('doorstep.pages.urls'))
]
|
lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_10_01/operations/express_route_circuit_authorizations_operations.py | Python | mit | 18,188 | 0.003079 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling
from .. import models
class ExpressRouteCircuitAuthorizationsOperations(object):
"""ExpressRouteCircuitAuthorizationsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: Client API version. Constant value: "2017-10-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-10-01"
self.config = config
def _delete_initial(
self, resource_group_name, circuit_name, authorization_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'authorizationName': self._serialize.url("authorization_name", authorization_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, circuit_name, authorization_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Deletes the specified authorization from the specified express route
circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circui | t_name: The name of the express route circuit.
:type circuit_name: str
:param authorization_name: The name of the authorization.
:type authorization_name: str
:param dict custom_headers: headers that will be added to the request
| :param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
authorization_name=authorization_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}'}
def get(
self, resource_group_name, circuit_name, authorization_name, custom_headers=None, raw=False, **operation_config):
"""Gets the specified authorization from the specified express route
circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param authorization_name: The name of the authorization.
:type authorization_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ExpressRouteCircuitAuthorization or ClientRawResponse if
raw=true
:rtype:
~azure.mgmt.network.v2017_10_01.models.ExpressRouteCircuitAuthorization
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'authorizationName': self._serialize.url("authorization_name", authorization_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
|
guangjian/openstack-api-client | test.py | Python | apache-2.0 | 2,512 | 0.01035 | #!/usr/bin/python |
import httplib
import json
# arguments
## make sure that url is set to the actual hostname/IP address,
## port number |
#url = "135.251.0.109:8080"
#url = "135.251.218.126:8774"
#url = "135.251.208.74:5000"
url = "135.251.208.74:35357"
username = "test"
## make sure that osuser is set to your actual username, "admin"
## works for test installs on virtual machines, but it's a hack
osuser = "admin"
## use something else than "shhh" for you password
ospassword = "h0r1z0n"
#params = '{"passwordCredentials":{"username":osuser, "password":ospassword}}'
params = '{"auth":{"passwordCredentials":{"username":"admin", "password":"h0r1z0n"}}}'
headers = {"Content-Type": "application/json"}
# HTTP connection
conn = httplib.HTTPConnection(url)
conn.request("POST", "/v2.0/tokens", params, headers)
# HTTP response
response = conn.getresponse()
data = response.read()
print "Response is: %s \n" % data
dd = json.loads(data)
print "json is: %s \n" % dd
conn.close()
#apitoken = dd['auth']['token']['id']
# apitoken = dd['access']['token']['id']
# print "Your token is: %s" % apitoken
user_id = dd['access']['user']['username']
print "user name is: %s" % user_id
|
garnertb/firecares | firecares/firestation/urls.py | Python | mit | 2,033 | 0.004919 | from .views import DepartmentDetailView, Stats, FireDepartmentListView, SimilarDepartmentsListView, DepartmentUpdateGovernmentUnits
from django.contrib.auth.decorators import permission_required
from django.views.generic import TemplateView
from django.conf.urls import patterns, url
from django.views.decorators.cache import cache_page
urlpatterns = patterns('',
url(r'departments/(?P<pk>\d+)/(?P<slug>[-\w]+)/similar-departments | /?$', SimilarDepartmentsListView.as_view(template_name='firestation/firedepartment_list.html'), name='similar_departments_slug'),
url(r'departments/(?P<pk>\d+)/similar-departments/?$', SimilarDepartmentsListView.as_view(template_name='firestation/firedepartment_list.html'), name='similar_departments'),
url(r'departments/(?P<pk>\d+)/settings/government-u | nits/?$', permission_required('firestation.change_firedepartment')(DepartmentUpdateGovernmentUnits.as_view()), name='firedepartment_update_government_units'),
url(r'departments/(?P<pk>\d+)/?$', DepartmentDetailView.as_view(template_name='firestation/department_detail.html'), name='firedepartment_detail'),
url(r'departments/(?P<pk>\d+)/(?P<slug>[-\w]+)/?$', DepartmentDetailView.as_view(template_name='firestation/department_detail.html'), name='firedepartment_detail_slug'),
url(r'departments/?$', FireDepartmentListView.as_view(template_name='firestation/firedepartment_list.html'), name='firedepartment_list'),
url(r'community-risk$', cache_page(60 * 60 * 24)(TemplateView.as_view(template_name='firestation/community_risk_model.html')), name='models_community_risk'),
url(r'performance-score$', cache_page(60 * 60 * 24)(TemplateView.as_view(template_name='firestation/performance_score_model.html')), name='models_performance_score'),
url(r'stats/fire-stations/?$', Stats.as_view(), name='firestation_stats'),
)
|
dsm054/pandas | pandas/tests/groupby/conftest.py | Python | bsd-3-clause | 3,673 | 0.000545 | import numpy as np
import pytest
from pandas import (
DataFrame,
MultiIndex,
)
import pandas._testing as tm
from pandas.core.groupby.base import (
reduction_kernels,
transformation_kernels,
)
@pytest.fixture(params=[True, False])
def as_index(request):
return request.param
@pytest.fixture
def mframe():
index = MultiIndex(
levels=[["foo", "bar", "baz", "qux"], ["one", "two", "three"]],
codes=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3], [0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
names=["first", "second"],
)
return DataFrame(np.random.randn(10, 3), index=index, columns=["A", "B", "C"])
@pytest.fixture
def df():
return DataFrame(
{
"A": ["foo", "bar", "foo", "bar", "foo", "bar", "foo", "foo"],
"B": ["one", "one", "two", "three", "two", "two", "one", "three"],
"C": np.random.randn(8),
"D": np.random.randn(8),
}
)
@pytest.fixture
def ts():
return tm.makeTimeSeries()
@pytest.fixture
def tsd():
return tm.getTimeSeriesData()
@pytest.fixture
def tsframe(tsd):
return DataFrame(tsd)
@pytest.fixture
def df_mixed_floats():
return DataFrame(
{
"A": ["foo", "bar", "foo", "bar", "foo", "bar", "foo", "foo"],
"B": ["one", "one", "two", "three", "two", "two", "one", "three"],
"C": np.random.randn(8),
"D": np.array(np.random.randn(8), dtype="float32"),
}
)
@pytest.fixture
def three_group():
return DataFrame(
{
"A": [
"foo",
"foo",
"foo",
"foo",
"bar",
"bar",
"bar",
"bar",
"foo",
"foo",
"foo",
],
"B": [
"one",
"one",
"one",
"two",
"one",
"one",
"one",
"two",
"two",
"two",
"one",
],
"C" | : [
"dull",
"dull",
"shiny",
"dull",
"dull",
"shiny",
"shiny",
"dull", |
"shiny",
"shiny",
"shiny",
],
"D": np.random.randn(11),
"E": np.random.randn(11),
"F": np.random.randn(11),
}
)
@pytest.fixture(params=sorted(reduction_kernels))
def reduction_func(request):
"""
yields the string names of all groupby reduction functions, one at a time.
"""
return request.param
@pytest.fixture(params=sorted(transformation_kernels))
def transformation_func(request):
"""yields the string names of all groupby transformation functions."""
return request.param
@pytest.fixture(params=sorted(reduction_kernels) + sorted(transformation_kernels))
def groupby_func(request):
"""yields both aggregation and transformation functions."""
return request.param
@pytest.fixture(params=[True, False])
def parallel(request):
"""parallel keyword argument for numba.jit"""
return request.param
# Can parameterize nogil & nopython over True | False, but limiting per
# https://github.com/pandas-dev/pandas/pull/41971#issuecomment-860607472
@pytest.fixture(params=[False])
def nogil(request):
"""nogil keyword argument for numba.jit"""
return request.param
@pytest.fixture(params=[True])
def nopython(request):
"""nopython keyword argument for numba.jit"""
return request.param
|
OCA/contract | agreement_account/__manifest__.py | Python | agpl-3.0 | 776 | 0 | # Copyright 2017-2020 Akretion France (http://www.akretion.com/)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Agreement Account",
"summary": "Agreement on invoices",
"version": "14.0.1.0.0",
"category": "Contract",
| "author": "Akretion, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/contract",
"license": "AGPL-3",
"depends": [
"agreement",
"account",
],
"data": [
" | security/ir.model.access.csv",
"views/agreement.xml",
"views/account_move.xml",
],
"development_status": "Beta",
"maintainers": [
"alexis-via",
"bealdav",
],
"installable": True,
}
|
cccaballero/services-manager | services-manager.py | Python | gpl-3.0 | 5,938 | 0.006231 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Carlos Cesar Caballero Diaz <ccesar@linuxmail.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gtk
import os
import sys
import imp
import spia.internationalizator as internationalizator
from spia.internationalizator import _
#import gettext #gettext internationalization
APP_NAME = "services-manager"
LOCALE_DIR = os.path.join(sys.path[0], "locale")
#gettext.install(APP_NAME, LOCALE_DIR) #gettext internacionalization
internationalizator.load_locale_chains(LOCALE_DIR)
ABAUT_COMMENTS = _("Services Manager, Take control of your services from the desktop.\nInpired in the Apache-Switch tool:\nhttp://apache-switch.webuda.com")
class manager:
def __init__(self):
self.set_proc_name(APP_NAME)
#folders
self.config_folder = os.getenv('HOME')+"/.services-manager"
self.system_plugin_folder = sys.path[0]+"/plugins"
self.user_plugin_folder = self.config_folder+"/plugins"
self.plugin_folder = [self.system_plugin_folder,self.user_plugin_folder]
#plugins main module name
self.main_module = "__init__"
# create folders if dont exsist
if not os.path.exists(self.config_folder):
os.makedirs(self.config_folder)
if not os.path.exists(self.user_plugin_folder):
os.makedirs(self.user_plugin_folder)
#image files
self.image_green = sys.path[0]+"/media/green.png"
self.image_red = sys.path[0]+"/media/red.png"
#tray declaration
self.tray_icon = gtk.status_icon_new_from_stock(gtk.STOCK_INFO)
self.tray_menu = gtk.Menu()
self.update_menu()
self.tray_icon.connect('popup-menu', self.show_menu, self.tray_menu)
self.tray_icon.set_tooltip("Services Manager")
def get_spia(self):
(file, filename, data) = imp.find_module("spia")
spia_module = imp.load_module("spia", file, filename, data)
return spia_module
def set_proc_name(self, newname):
"""Set a system name to the python process"""
from ctypes import cdll, byref, create_string_buffer
libc = cdll.LoadLibrary('libc.so.6')
buff = create_string_buffer(len(newname)+1)
buff.value = newname
libc.prctl(15, byref(buff), 0, 0, 0)
def getPlugins(self):
"""obtain plugins from folders"""
plugins = []
for folder in self.plugin_folder:
possibleplugins = os.listdir(folder)
for i in possibleplugins:
location = os.path.join(folder, i)
if not os.path.isdir(location) or not self.main_module + ".py" in os.listdir(location):
continue
info = imp.find_module(self.main_module, [location])
plugins.append({"name": i, "info": info})
return plugins
def loadPlugin(self, plugin):
return imp.load_module(self.main_module, *plugin["info"])
def update_menu (self, *args):
#remove menu entrys
for i in self.tray_menu.get_children():
self.tray_menu.remove(i)
#load plugins
for i in self.getPlugins():
#print("Loading plugin " + i["name"])
plugin = self.loadPlugin(i)
plugin.run(self)
self.tray_menu.append(gtk.SeparatorMenuItem())
self.menu_refresh = gtk.ImageMenuItem(gtk.STOCK_REFRESH)
self.menu_refresh.connect("activate", self.update_menu)
self.tray_menu.append(self.menu_refresh)
self.menu_about = gtk.ImageMenuItem(gtk.STOCK_ABOUT)
self.menu_about.connect("activate", self.abaut_dialog)
self.tray_menu.append(self.menu_about)
self.tray_menu.append(gtk.SeparatorMenuItem())
self.menu_quit = gtk.ImageMenuItem(gtk.STOCK_CLOSE)
self.menu_quit.connect("activate", lambda w: gtk.main_quit())
self.tray_menu.append(self.menu_quit)
self.tray_menu.show_all()
def show_menu(self, status_icon, button, activate_time, menu):
menu.popup(None, None, gtk.status_icon_position_menu, button, activate_time, status_icon)
def show_icon(self, *args ):
self.tray_icon.set_visible(T | rue)
return False
def do_response(self, dialog, response):
if response == gtk.RESPONSE_CANCEL:
dialog.destroy()
def notify(self, notification):
try:
import pynotify
if pynotify.init("Services Manager"):
n = pynotify.Notification(notification)
#n.set_timeout(10000)
| n.show()
except:
pass
def abaut_dialog(self, *args):
"""Show the Abaut dialog"""
about = gtk.AboutDialog()
about.set_name("services-manager")
about.set_version("0.2")
about.set_comments(ABAUT_COMMENTS)
about.set_license("GPL v3")
about.set_website("https://github.com/cccaballero/services-manager")
about.set_authors(["Carlos Cesar Caballero Diaz <ccesar@linuxmail.org>"])
#about.set_logo(self.pixbu_logo)
about.run()
about.hide()
pass
if __name__ == '__main__':
manager()
gtk.main()
|
cuauv/software | cave/mainForCommandline.py | Python | bsd-3-clause | 75 | 0.013333 | #!/usr/bin/env python2
import os, sygnal, sys
impor | t argparse
import | math
|
ahmedaljazzar/edx-platform | openedx/core/djangoapps/zendesk_proxy/tests/test_utils.py | Python | agpl-3.0 | 2,179 | 0.000918 | import ddt
from django.test.utils import override_settings
from mock import MagicMock, patch
from openedx.core.djangoapps.zendesk_proxy.utils import create_zendesk_ticket
from openedx.core.lib.api.test_utils import ApiTestCase
@ddt.ddt
@override_settings(
ZENDESK_URL="https://www.superrealurlsthataredefinitelynotfake.com",
ZENDESK_OAUTH_ACCESS_TOKEN="abcdefghijklmnopqrstuvwxyz1234567890"
)
class TestUtils(ApiTestCase):
def setUp(self):
self.request_data = {
'email': 'JohnQStudent@example.com',
'name': 'John Q. Student',
'subject': 'Python Unit Test Help Request',
'body': "Help! I'm trapped in a unit test | factory and I can't get out!",
}
return super(TestUtils, self).setUp()
@override_settings(
ZENDESK_URL=None,
ZENDESK_OAUTH_ACCESS_TOKEN=None
)
def test_missing_settings(self):
status_code = create_zendes | k_ticket(
requester_name=self.request_data['name'],
requester_email=self.request_data['email'],
subject=self.request_data['subject'],
body=self.request_data['body'],
)
self.assertEqual(status_code, 503)
@ddt.data(201, 400, 401, 403, 404, 500)
def test_zendesk_status_codes(self, mock_code):
with patch('requests.post', return_value=MagicMock(status_code=mock_code)):
status_code = create_zendesk_ticket(
requester_name=self.request_data['name'],
requester_email=self.request_data['email'],
subject=self.request_data['subject'],
body=self.request_data['body'],
)
self.assertEqual(status_code, mock_code)
def test_unexpected_error_pinging_zendesk(self):
with patch('requests.post', side_effect=Exception("WHAMMY")):
status_code = create_zendesk_ticket(
requester_name=self.request_data['name'],
requester_email=self.request_data['email'],
subject=self.request_data['subject'],
body=self.request_data['body'],
)
self.assertEqual(status_code, 500)
|
theodotos/arena | rewrite_from.py | Python | gpl-3.0 | 1,572 | 0.001908 | #!/usr/bin/python
'''
## Description
A script to get mail from info@example.com and re-write the 'From"' Header
as "From: John Doe via info at example.com <noreply@example.com>'"
## Installation Instuctions
* Place the script under */usr/local/sbin/rewrite_from.py*.
* Edit */etc/aliases*:
```
info: "|/usr/local/sbin/rewrite_from.py"
```
* Run the `newaliases` command.
'''
import sys
import email
import smtplib
import re
DEBUG = 1
MAIL_SERVER = 'localhost'
TO = ["ceo@example.com", "ceoexternalmail@gmail.com"]
def main():
| '''
Main Function
'''
raw_msg = ''
for line in sys.stdin:
raw_msg = raw_msg + line
msg = email.message_from_string(raw_msg)
reply_to_re = re.search(
r'([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-.]+\.[a-zA-Z0-9-.]+)',
msg['From'])
if reply_to_re:
reply_to = reply_to_re.group(1)
from_re = re.search(
r'^ *(.+) +<?([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-.]+\.[a-zA-Z0-9-.]+)>?',
msg['From'])
if from_re:
fr | om_h = from_re.group(1)
if msg['Reply-To']:
msg['Reply-To'] = reply_to
msg.replace_header('Reply-To', reply_to)
else:
msg.add_header('Reply-To', reply_to)
msg.replace_header('From', from_h + ' via info at example.com <noreply@example.com>')
send_mail = smtplib.SMTP('localhost')
send_mail.sendmail(msg['From'], TO, msg.as_string())
send_mail.quit()
if DEBUG:
filename = "/tmp/msg.out"
out = open(filename, 'w')
out.write(msg.as_string())
if __name__ == "__main__":
main()
|
bamos/parsec-benchmark | pkgs/libs/uptcpip/src/scripts/gen-wrap-funclist.py | Python | bsd-3-clause | 1,350 | 0.033333 | import time
import os, fnmatch
import sys
#########################################################
#
# 1. execute ld to generate log
# 2. collect all undefined reference symbols
# 3. find out the files defining the symbols
# 4. sort all the files
# 5. let user decide whether copy the file or just make fake file
#
if __name__ == '__main__':
count = 0
argc = len(sys.argv)
if argc != 2:
print " Usage: python gen-wrap-funclist.py output\n\n"
sys.exit(-1)
output_file = sys.argv[1]
print "Output result file is [%s]\n" % (output_file)
# collect all the extern-dep functions
func_def_list = []
os.system("ld *.o >& tmpfile.1")
fp = open( | "tmpfile.1", "r")
line = fp.readline().strip()
while line != '':
iterms = line.split()
if len(iterms) != 0:
if "undefined" in iterms and "reference" in iterms:
if not (iterms[-1] in func_def_list):
func_def_list.append(iterms[-1])
count = count + 1
line = fp.readline().strip()
fp.close()
#output the extern fu | nction list
fpw = open(output_file, "w")
for i in range(count):
fpw.write("NULL \t")
fpw.write(func_def_list[i][1:-1])
fpw.write(" \t 0\t NULL\n")
fpw.close()
print("Find %d Wrap-Function Definitaions\n", count)
os.remove("tmpfile.1")
|
pyaiot/pyaiot | pyaiot/gateway/mqtt/gateway.py | Python | bsd-3-clause | 7,515 | 0 | # Copyright 2017 IoT-Lab Team
# Contributor(s) : see AUTHORS file
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""MQTT ga | teway module."""
import logging
import time
import uuid
import json
import asyncio
from tornado.ioloop import PeriodicCallback
from gmqtt import Client as MQTTClient
from pyaiot.gateway.common import Node, GatewayBase
logger = logging.getLogger("pyaiot.gw.mqtt")
MQTT_HOST = 'localhost'
MQTT_PORT = 1886
MAX_TIME | = 120
class MQTTGateway(GatewayBase):
"""Gateway application for MQTT nodes on a network."""
PROTOCOL = "MQTT"
def __init__(self, keys, options):
self.host = options.mqtt_host
self.port = options.mqtt_port
self.max_time = options.max_time
self.options = options
self.node_mapping = {} # map node id to its uuid (TODO: FIXME)
super().__init__(keys, options)
# Connect to the MQTT broker
self.mqtt_client = MQTTClient("client-id")
self.mqtt_client.on_connect = self.on_connect
self.mqtt_client.on_message = self.on_message
self.mqtt_client.on_disconnect = self.on_disconnect
self.mqtt_client.on_subscribe = self.on_subscribe
asyncio.get_event_loop().create_task(self.start())
# Start the node cleanup task
PeriodicCallback(self.check_dead_nodes, 1000).start()
PeriodicCallback(self.request_alive, 30000).start()
logger.info('MQTT gateway application started')
async def start(self):
await self.mqtt_client.connect('{}:{}'.format(self.host, self.port))
def on_connect(self, client, flags, rc, properties):
self.mqtt_client.subscribe('node/check', 1)
def on_message(self, client, topic, payload, qos, properties):
try:
data = json.loads(payload)
except Exception:
# Skip data if not valid
return
logger.debug("Received message from node: {} => {}"
.format(topic, data))
if topic.endswith("/check"):
asyncio.get_event_loop().create_task(
self.handle_node_check(data))
elif topic.endswith("/resources"):
asyncio.get_event_loop().create_task(
self.handle_node_resources(topic, data))
else:
self.handle_node_update(topic, data)
def on_disconnect(self, client, packet, exc=None):
print('Disconnected')
def on_subscribe(self, client, mid, qos, properties):
print('SUBSCRIBED')
def close(self):
loop = asyncio.get_event_loop()
loop.run_until_complete(self._disconnect())
async def _disconnect(self):
for node in self.nodes:
await self._disconnect_from_node(node)
await self.mqtt_client.disconnect()
async def discover_node(self, node):
discover_topic = 'gateway/{}/discover'.format(node.resources['id'])
await self.mqtt_client.publish(discover_topic, "resources", qos=1)
logger.debug("Published '{}' to topic: {}"
.format("resources", discover_topic))
def update_node_resource(self, node, endpoint, payload):
node_id = node.resources['id']
asyncio.get_event_loop().create_task(self.mqtt_client.publish(
'gateway/{}/{}/set'.format(node_id, endpoint), payload, qos=1))
async def handle_node_check(self, data):
"""Handle alive message received from coap node."""
node_id = data['id']
if node_id not in self.node_mapping:
node = Node(str(uuid.uuid4()), id=node_id)
self.node_mapping.update({node_id: node.uid})
resources_topic = 'node/{}/resources'.format(node_id)
await self.mqtt_client.subscribe(resources_topic, 1)
logger.debug("Subscribed to topic: {}".format(resources_topic))
self.add_node(node)
else:
# The node simply sent a check message to notify that it's still
# online.
node = self.get_node(self.node_mapping[node_id])
node.update_last_seen()
async def handle_node_resources(self, topic, data):
"""Process resources published by a node."""
node_id = topic.split("/")[1]
if node_id not in self.node_mapping:
return
for resource in data:
await self.mqtt_client.subscribe(
'node/{}/{}'.format(node_id, resource), 1
)
await self.mqtt_client.publish('gateway/{}/discover'
.format(node_id), "values", qos=1)
def handle_node_update(self, topic_name, data):
"""Handle CoAP post message sent from coap node."""
_, node_id, resource = topic_name.split("/")
value = data['value']
if self.node_mapping[node_id] not in self.nodes:
return
node = self.get_node(self.node_mapping[node_id])
self.forward_data_from_node(node, resource, value)
def request_alive(self):
"""Publish a request to trigger a check publish from nodes."""
logger.debug("Request check message from all MQTT nodes")
asyncio.get_event_loop().create_task(
self.mqtt_client.publish('gateway/check', '', qos=1))
def check_dead_nodes(self):
"""Check and remove nodes that are not alive anymore."""
to_remove = [node for node in self.nodes.values()
if int(time.time()) > node.last_seen + self.max_time]
for node in to_remove:
logger.info("Removing inactive node {}".format(node.uid))
asyncio.get_event_loop().create_task(
self._disconnect_from_node(node))
self.node_mapping.pop(node.resources['id'])
self.remove_node(node)
async def _disconnect_from_node(self, node):
node_id = node.resources['id']
await self.mqtt_client.unsubscribe(
['node/{}/resource'.format(node_id)])
for resource in node.resources:
await self.mqtt_client.unsubscribe(
['node/{}/{}'.format(node_id, resource)])
|
smartczm/python-learn | Old-day01-10/s13-day5/s3.py | Python | gpl-2.0 | 8,551 | 0.002026 | #!/usr/bin/env python3.5
# -*- coding: utf-8 -*-
# Author: ChenLiang
# Python 叫模块,其他叫 类库
"""
模块,用一砣代码实现了某个功能的代码集合。
类似于函数式编程和面向过程编程,函数式编程则完成一个功能,其他代码用来调用即可,提供了代码的重用性和代码间的耦合。而对于一个复杂的功能来,可能需要多个函数才能完成(函数又可以在不同的.py文件中),n个 .py 文件组成的代码集合就称为模块。
如:os 是系统相关的模块;file是文件操作相关的模块
模块分为三种:
自定义模块
第三方模块
内置模块
"""
"""
# 自定义模块
文件
1. 新建src.py文件
2. 在index.py主文件中import src 导入src模块
现在即可调用src中的函数
文件夹
1. 新建lib文件夹,同时在下面创建comm.py文件
2. 在index.py主文件中import lib.comm 导入lib文件夹下面的comm文件
现在即可调用comm中的函数
Python之所以应用越来越广泛,在一定程度上也依赖于其为程序员提供了大量的模块以供使用,如果想要使用模块,则需要导入。导入模块有一下几种方法:
import module # 单模块, 同一级目录下推荐导入方式
from module.xx.xx import xx # 嵌套文件夹, 推荐导入方式
from module.xx.xx import xx as rename # 对于多文件夹重复的模块导入, 推荐导入方式
from module.xx.xx import * # 导入module.xx.xx下面所有的模块, 不推荐这样用
导入模块其实就是告诉Python解释器去解释那个py文件
导入一个py文件,解释器解释该py文件
导入一个包,解释器解释该包下的 __init__.py 文件 【py2.7】
那么问题来了,导入模块时是根据那个路径作为基准来进行的呢?即:sys.path
import sys
for key in sys.path:
print(key)
如果sys.path路径列表没有你想要的路径,可以通过 sys.path.append('路径') 添加。
sys.path.append('E:\\')
模块名称的重要性: 不能和系统内置模块重名
"""
# import sys
#
# print(sys.argv) # 执行脚本传参
# for key in sys.path:
# print(key)
# 内置模块
# 内置模块是Python自带的功能,在使用内置模块相应的功能时,需要【先导入】再【使用】
"""
1. 序列化和反序列化
Python中用于序列化的两个模块
json 用于【字符串】和 【python基本数据类型】 间进行转换,更加适合跨语言
pickle 用于【python特有的类型】 和 【python基本数据类型】间进行转换,更加适合python所有类型的序列化,只适用于python
Json模块提供了四个功能:dumps、dump、loads、load
pickle模块提供了四个功能:dumps、dump、loads、load # 通过特殊方式处理只能python语言识别的字符串
游戏存档就是利用pickle来操作的
import json
dic = {'k1': 'v1'}
print(dic, type(dic))
# 将python基本数据类型转化成字符串形式
result = json.dumps(dic)
print(result, type(result))
string = '{"Year": 2016}' # 通过loads去做反序列化的时候,一定要记住使用双引号
print(string, type(string))
# 将字符串形式转化成基本数据类型
str_inp = json.loads(string)
print(str_inp, type(str_inp))
例子:
# 获取天气信息
import requests
import json
response = requests.get('http://wthrcdn.etouch.cn/weather_mini?city=北京')
response.encoding = 'utf-8'
dic = json.loads(response.text) # .text返回的内容
for key in dic:
print(key, dic[key])
import json
# dump , load 读文件写文件操作
li = [11, 22, 33]
# 将数据通过特殊的形式转换只有python语言认识的字符串,并写入文件
json.dump(li, open('db', 'w'))
print(type(li), li)
# 将输入通过特殊的形式转换为所有程序语言都认识的字符串,并读出文件
li = json.load(open('db', 'r'))
print(type(li), li)
# pickle
import pickle
li = [11, 22, 33]
result = pickle.dumps(li)
print(result, type(result))
li = [11, 22, 33]
result = pickle.loads(result)
print(result, type(result))
ali = [11, 22, 33]
pickle.dump(ali, open('db', 'ab'))
result = pickle.load(open('db', 'r'))
print(result)
2. time模块
import time
import datetime
print(time.time()) # 返回当前系统时间戳
print(time.ctime()) # 输出Tue Jan 26 18:23:48 2016 ,当前系统时间
print(time.ctime(time.time() - 86640)) # 将时间戳转为字符串格式
print(time.gmtime(time.time() - 86640)) # 将时间戳转换成struct_time格式
print(time.localtime(time.time() - 86640)) # 将时间戳转换成struct_time格式,但返回 的本地时间
print(time.mktime(time.localtime())) # 与time.localtime()功能相反,将struct_time格式转回成时间戳格式
time.sleep(4) # sleep等待
print(time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime())) # 将struct_time格式转成指定的字符串格式
print(time.strptime("2016-06-05", "%Y-%m-%d")) # 将字符串格式转换成struct_time格式
# datetime module
print(datetime.date.today()) # 输出格式2016-01-26
print(datetime.date.fromtimestamp(time.time() - 864400)) # 2016-01-16 将时间戳转成日期格式
current_time = datetime.datetime.now() #
print(current_time) # 输出2016-01-26 19:04:30.335935
print(current_time.timetuple()) # 返回struct_time格式
# datetime.replace([year[, month[, day[, hour[, minute[, second[, microsecond[, tzinfo]]]]]]]])
print(current_time.replace(2014, 9, 12)) # 输出2014-09-12 19:06:24.074900,返回当前时间,但指定的值将被替换
str_to_date = datetime.datetime.strptime("21/11/06 16:30", "%d/%m/%y %H:%M") # 将字符串转换成日期格式
print(str_to_date)
new_date = datetime.datetime.now() + datetime.timedelta(days=10) # 比现在加10天
print(new_date)
new_date = datetime.datetime.now() + datetime.timedelta(days=-10) # 比现在减10天
print(new_date)
new_date = datetime.datetime.now() + datetime.timedelta(hours=-10) # 比现在减10小时
print(new_date)
new_date = datetime.datetime.now() + datetime.timedelta(seconds=120) # 比现在+120s
print(new_date)
time_obj = current_time.replace(2015, 5)
print(current_time > time_obj) # 时间比较
print(current_time == time_obj)
3. logging模块
很多程序都有记录日志的需求,并且日志中包含的信息即有正常的程序访问日志,还可能有错误、警告等信息输出,python的logging模块提供了标准的日志接口,你可以通过它存储各种格式的日志,logging的日志可以分为 debug(), info(), warning( | ), error() and critical() 5个级别,下面我们看一下怎么用。
用于便捷记录日志且线程安全的模块
import logging
logging.warning("user [admin] attempted wrong password more than 3 times")
logging.critical("ser | ver is down")
logging.basicConfig(filename='log.log',
format='%(asctime)s - %(name)s - %(levelname)s -%(module)s: %(message)s',
datefmt='%Y/%m/%d %H:%M:%S %p',
level=10)
logging.debug('debug')
logging.info('info')
logging.warning('warning')
logging.error('error')
logging.critical('critical')
logging.log(10,'log')
对应等级:
CRITICAL = 50
FATAL = CRITICAL
ERROR = 40
WARNING = 30
WARN = WARNING
INFO = 20
DEBUG = 10
NOTSET = 0
"""
"""
* sys
用于提供对Python解释器相关的操作:
sys.argv 命令行参数List,第一个元素是程序本身路径
sys.exit(n) 退出程序,正常退出时exit(0)
sys.version 获取Python解释程序的版本信息
sys.maxint 最大的Int值
sys.path 返回模块的搜索路径,初始化时使用PYTHONPATH环境变量的值
sys.platform 返回操作系统平台名称
sys.stdin 输入相关
sys.stdout 输出相关
sys.stderror 错误相关
"""
# 例子*进度百分比*
# import sys
# import time
#
#
# def view_bar(num, total):
# rate = float(num) / float(total)
# rate_num = int(rate * 100)
# r = '\r%d%%' % (rate_num,)
# sys.stdout.write(r)
# sys.stdout.flush()
#
#
# if __name__ == '__main__':
# for i in range(0, 101):
# time.sleep(0.1)
# view_bar(i, 100)
# 第三方模块
"""
1. requests 模块
"""
# Requests 是使用 Apache2 Licensed 许可证的 基于Python开发的HTTP 库,其在Python内置模块的基础上进行了高度的封装,从而使得Pythoner进行网络请求时,变得美好了许多,使用Requests可以轻而易举的完成浏览器可有的任何操作。
"""
# pip3 install requests
# source install
# python3 setup.py install
"""
|
DBuildService/atomic-reactor | atomic_reactor/plugins/exit_store_metadata_in_osv3.py | Python | bsd-3-clause | 12,020 | 0.001581 | """
Copyright (c) 2015, 2019 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
import json
import os
from osbs.exceptions import OsbsResponseException
from atomic_reactor.plugins.pre_reactor_config import get_openshift_session, get_koji
from atomic_reactor.plugins.pre_fetch_sources import PLUGIN_FETCH_SOURCES_KEY
from atomic_reactor.constants import (PLUGIN_KOJI_UPLOAD_PLUGIN_KEY,
PLUGIN_VERIFY_MEDIA_KEY,
PLUGIN_RESOLVE_REMOTE_SOURCE,
SCRATCH_FROM)
from atomic_reactor.plugin import ExitPlugin
from atomic_reactor.util import get_build_json
class StoreMetadataInOSv3Plugin(ExitPlugin):
key = "store_metadata_in_osv3"
is_allowed_to_fail = False
def __init__(self, tasker, workflow, url=None, verify_ssl=True, use_auth=True):
"""
constructor
:param tasker: ContainerTasker instance
:param workflow: DockerBuildWorkflow instance
:param url: str, URL to OSv3 instance
:param use_auth: bool, initiate authentication with openshift?
"""
# call parent constructor
super(StoreMetadataInOSv3Plugin, self).__init__(tasker, workflow)
self.openshift_fallback = {
'url': url,
'insecure': not verify_ssl,
'auth': {'enable': use_auth}
}
self.source_build = PLUGIN_FETCH_SOURCES_KEY in self.workflow.prebuild_results
def get_result(self, result):
if isinstance(result, Exception):
result = ''
return result
def get_pre_result(self, key):
return self.get_result(self.workflow.prebuild_results.get(key, ''))
def get_post_result(self, key):
return self.get_result(self.workflow.postbuild_results.get(key, ''))
def get_exit_result(self, key):
return self.get_result(self.workflow.exit_results.get(key, ''))
def get_config_map(self):
annotations = self.get_post_result(PLUGIN_KOJI_UPLOAD_PLUGIN_KEY)
if not annotations:
return {}
return annotations
def get_digests(self):
"""
Returns a map of repositories to digests
"""
digests = {} # repository -> digest
for registry in self.workflow.push_conf.docker_registries:
for image in self.workflow.tag_conf.images:
image_str = image.to_str()
if image_str in registry.digests:
digest = registry.digests[image_str]
digests[image.to_str(registry=False)] = digest
return digests
def _get_registries(self):
"""
Return a list of registries that this build updated
"""
return self.workflow.push_conf.all_registries
def get_repositories(self):
# usually repositories formed from NVR labels
# these should be used for pulling and layering
primary_repositories = []
for registry in self._get_registries():
for image in self.workflow.tag_conf.primary_images:
registry_image = image.copy()
registry_image.registry = registry.uri
primary_repositories.append(registry_image.to_str())
# unique unpredictable repositories
unique_repositories = []
for registry in self._get_registries():
for image in self.workflow.tag_conf.unique_images:
registry_image = image.copy()
registry_image.registry = registry.uri
unique_repositories.append(registry_image.to_str())
# floating repositories
# these should be used for pulling and layering
floating_repositories = []
for registry in self._get_registries():
for image in self.workflow.tag_conf.floating_images:
registry_image = image.copy()
registry_image.registry = registry.uri
floating_repositories.append(registry_image.to_str())
return {
"primary": primary_repositories,
"unique": unique_repositories,
"floating": floating_repositories,
}
def get_pullspecs(self, digests):
# v2 registry digests
pullspecs = []
for registry in self._get_registries():
for image in self.workflow.tag_conf.images:
image_str = image.to_str()
if image_str in digests:
digest = digests[image_str]
for digest_version in digest.content_type:
if digest_version not in digest:
continue
pullspecs.append({
"registry": registry.uri,
"repository": image.to_str(registry=False, tag=False),
"tag": image.tag,
"digest": digest[digest_version],
"version": digest_version
})
return pullspecs
def get_plugin_metadata(self):
return {
"errors": self.workflow.plugins_errors,
"timestamps": self.workflow.plugins_timestamps,
"durations": self.workflow.plugins_durations,
}
def get_filesystem_metadata(self):
data = {}
try:
data = self.workflow.fs_watc | her.get_usage_data()
self.log.debug("filesystem metadata: %s", data)
except Exception:
self.log.exception("Error getting filesystem stats")
return data
def _update_labels(self, labels, updates):
| if updates:
updates = {key: str(value) for key, value in updates.items()}
labels.update(updates)
def make_labels(self):
labels = {}
self._update_labels(labels, self.workflow.labels)
self._update_labels(labels, self.workflow.build_result.labels)
if 'sources_for_koji_build_id' in labels:
labels['sources_for_koji_build_id'] = str(labels['sources_for_koji_build_id'])
return labels
def set_koji_task_annotations_whitelist(self, annotations):
"""Whitelist annotations to be included in koji task output
Allow annotations whose names are listed in task_annotations_whitelist
koji's configuration to be included in the build_annotations.json file,
which will be attached in the koji task output.
"""
koji_config = get_koji(self.workflow)
whitelist = koji_config.get('task_annotations_whitelist')
if whitelist:
annotations['koji_task_annotations_whitelist'] = json.dumps(whitelist)
def _update_annotations(self, annotations, updates):
if updates:
updates = {key: json.dumps(value) for key, value in updates.items()}
annotations.update(updates)
def apply_build_result_annotations(self, annotations):
self._update_annotations(annotations, self.workflow.build_result.annotations)
def apply_plugin_annotations(self, annotations):
self._update_annotations(annotations, self.workflow.annotations)
def apply_remote_source_annotations(self, annotations):
try:
rs_annotations = self.get_pre_result(PLUGIN_RESOLVE_REMOTE_SOURCE)['annotations']
except (TypeError, KeyError):
return
annotations.update(rs_annotations)
def run(self):
metadata = get_build_json().get("metadata", {})
try:
build_id = metadata["name"]
except KeyError:
self.log.error("malformed build json")
return
self.log.info("build id = %s", build_id)
osbs = get_openshift_session(self.workflow, self.openshift_fallback)
if not self.source_build:
try:
commit_id = self.workflow.source.commit_id
except AttributeError:
commit_id = ""
base_image = self.workflow.builder.dockerfile_images.original_base_image
if (base_image i |
isislovecruft/txrecaptcha | test/test_resources.py | Python | bsd-3-clause | 9,079 | 0.000661 | # -*- coding: utf-8 -*-
#_____________________________________________________________________________
#
# This file is part of txrecaptcha, a Twisted reCAPTCHA client.
#
# :authors: Isis Lovecruft 0xA3ADB67A2CDB8B35 <isis@torproject.org>
# Matthew Finkel 0x017DD169EA793BE2 <sysrqb@torproject.org>
# :copyright: (c) 2013-2015, Isis Lovecruft
# (c) 2013-2015, Matthew Finkel
# (c) 2013-2015, The Tor Project, Inc.
# :license: see LICENSE file for licensing information
#_____________________________________________________________________________
"""Unittests for the txrecaptcha.resources module."""
from __future__ import print_function
import logging
import ipaddr
import random
from BeautifulSoup import BeautifulSoup
from twisted.internet import reactor
from twisted.internet import task
from twisted.internet.error import AlreadyCalled
from twisted.internet.error import AlreadyCancelled
from twisted.trial import unittest
from twisted.web.resource import Resource
from twisted.web.test import requesthelper
from txrecaptcha import resources
# For additional logger output for debugging, comment out the following:
logging.disable(50)
# and then uncomment the following line:
#resources.logging.getLogger().setLevel(10)
class MockWebResource(Resource):
"""A web resource for protecting."""
def render_GET(self, request):
"""Handles requests for the mock resource.
:type request: :api:`twisted.web.server.Request`
:param request: An incoming request.
"""
try:
template = resources.lookup.get_template('index.html')
rendered = template.render(strings,
rtl=rtl,
lang=langs[0])
except Exception as err:
rendered = resources.replaceErrorPage(err)
return rendered
class DummyRequest(requesthelper.DummyRequest):
"""Wrapper for :api:`twisted.test.requesthelper.DummyRequest` to add
redirect support.
"""
def __init__(self, *args, **kwargs):
requesthelper.DummyRequest.__init__(self, *args, **kwargs)
self.redirect = self._redirect(self)
def URLPath(self):
"""Fake the missing Request.URLPath too."""
return self.uri
def _redirect(self, request):
"""Stub method to add a redirect() method to DummyResponse."""
newRequest = type(request)
newRequest.uri = request.uri
return newRequest
class ReCaptchaProtectedResourceTests(unittest.TestCase):
"""Tests for :mod:`txrecaptcha.resources.ReCaptchaProtectedResource`."""
def setUp(self):
"""Create a :class:`MockWebResource` and protect it with a
:class:`ReCaptchaProtectedResource`.
"""
self.timeout = 10.0 # Can't take longer than that, right?
# Set up our resources to fake a minimal HTTP(S) server:
self.pagename = b'captcha.html'
self.root = Resource()
# (None, None) is the (distributor, scheduleInterval):
self.protectedResource = MockWebResource()
self.captchaResource = resources.ReCaptchaProtectedResource(
publicKey='23',
secretKey='42',
remoteIP='111.111.111.111',
useForwardedHeader=True,
protectedResource=self.protectedResource)
self.root.putChild(self.pagename, self.captchaResource)
# Set up the basic parts of our faked request:
self.request = DummyRequest([self.pagename])
def tearDown(self):
"""Cleanup method for removing timed out connections on the reactor.
This seems to be the solution for the dirty reactor due to
``DelayedCall``s which is mentioned at the beginning of this
file. There doesn't seem to be any documentation anywhere which
proposes this solution, although this seems to solve the problem.
"""
for delay in reactor.getDelayedCalls():
try:
delay.cancel()
except (AlreadyCalled, AlreadyCancelled):
pass
def test_renderDeferred_invalid(self):
""":meth:`_renderDeferred` should redirect a ``Request`` (after the
CAPTCHA was NOT xsuccessfully solved) which results from a
``Deferred``'s callback.
"""
self.request.method = b'POST'
def testCB(request):
"""Check the ``Request`` returned from ``_renderDeferred``."""
self.assertIsInstance(request, DummyRequest)
soup = BeautifulSoup(b''.join(request.written)).find('meta')['http-equiv']
self.assertEqual(soup, 'refresh')
d = task.deferLater(reactor, 0, lambda x: x, (False, self.request))
d.addCallback(self.captchaResource._renderDeferred)
d.addCallback(testCB)
return d
def test_renderDeferred_valid(self):
""":meth:`_renderDeferred` should correctly render a ``Request`` (after
the CAPTCHA has been successfully solved) which results from a
``Deferred``'s callback.
"""
| self.request.method = b'POST'
def testCB(request):
"""Check the ``Request`` returned from ``_renderDeferred``."""
self.assertIsInstance(request, DummyRequest)
html = b''.join(request.written)
self.assertSubstring('Sorry! Something went wrong with your request.',
html)
d = task.deferLate | r(reactor, 0, lambda x: x, (True, self.request))
d.addCallback(self.captchaResource._renderDeferred)
d.addCallback(testCB)
return d
def test_renderDeferred_nontuple(self):
""":meth:`_renderDeferred` should correctly render a ``Request`` (after
the CAPTCHA has been successfully solved) which results from a
``Deferred``'s callback.
"""
self.request.method = b'POST'
def testCB(request):
"""Check the ``Request`` returned from ``_renderDeferred``."""
self.assertIs(request, None)
d = task.deferLater(reactor, 0, lambda x: x, (self.request))
d.addCallback(self.captchaResource._renderDeferred)
d.addCallback(testCB)
return d
def test_checkSolution_blankFields(self):
""":meth:`txrecaptcha.resources.ReCaptchaProtectedResource.checkSolution`
should return a redirect if is the solution field is blank.
"""
self.request.method = b'POST'
self.request.addArg('captcha_challenge_field', '')
self.request.addArg('captcha_response_field', '')
self.assertEqual((False, self.request),
self.successResultOf(
self.captchaResource.checkSolution(self.request)))
def test_getRemoteIP_useRandomIP(self):
"""Check that removing our remoteip setting produces a random IP."""
self.captchaResource.remoteIP = None
ip = self.captchaResource.getRemoteIP()
realishIP = ipaddr.IPv4Address(ip).compressed
self.assertTrue(realishIP)
self.assertNotEquals(realishIP, '111.111.111.111')
def test_getRemoteIP_useConfiguredIP(self):
"""Check that our remoteip setting is used if configured."""
ip = self.captchaResource.getRemoteIP()
realishIP = ipaddr.IPv4Address(ip).compressed
self.assertTrue(realishIP)
self.assertEquals(realishIP, '111.111.111.111')
def test_render_GET_missingTemplate(self):
"""render_GET() with a missing template should raise an error and
return the result of replaceErrorPage().
"""
oldLookup = resources.lookup
try:
resources.lookup = None
self.request.method = b'GET'
page = self.captchaResource.render_GET(self.request)
errorPage = resources.replaceErrorPage(Exception('kablam'))
self.assertEqual(page, errorPage)
finally:
resources.lookup = oldLookup
def test_render_POST_blankFields(self):
"""render_POST() with a blank 'captcha_response_field' should return
a redirect to the CaptchaProtectedResource pa |
squirrelo/qiita | qiita_pet/handlers/api_proxy/tests/test_artifact.py | Python | bsd-3-clause | 19,979 | 0.00005 | # -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from unittest import TestCase, main
from os.path import join, exists, basename
from os import remove, close
from datetime import datetime
from tempfile import mkstemp
from json import loads
from time | import sleep
import pandas as pd
import numpy.testing as npt
from moi import r_client
from qiita_core.util import qiita_t | est_checker
from qiita_db.artifact import Artifact
from qiita_db.metadata_template.prep_template import PrepTemplate
from qiita_db.study import Study
from qiita_db.util import get_count, get_mountpoint
from qiita_db.processing_job import ProcessingJob
from qiita_db.user import User
from qiita_db.software import Command, Parameters
from qiita_db.exceptions import QiitaDBWarning
from qiita_pet.handlers.api_proxy.artifact import (
artifact_get_req, artifact_status_put_req, artifact_graph_get_req,
artifact_delete_req, artifact_types_get_req, artifact_post_req,
artifact_summary_get_request, artifact_summary_post_request,
artifact_patch_request)
class TestArtifactAPIReadOnly(TestCase):
def test_artifact_get_req_no_access(self):
obs = artifact_get_req('demo@microbio.me', 1)
exp = {'status': 'error',
'message': 'User does not have access to study'}
self.assertEqual(obs, exp)
def test_artifact_get_req(self):
obs = artifact_get_req('test@foo.bar', 1)
exp = {'id': 1,
'type': 'FASTQ',
'study': 1,
'data_type': '18S',
'timestamp': datetime(2012, 10, 1, 9, 30, 27),
'visibility': 'private',
'can_submit_vamps': False,
'can_submit_ebi': False,
'processing_parameters': None,
'ebi_run_accessions': None,
'is_submitted_vamps': False,
'parents': [],
'filepaths': [
(1, join(get_mountpoint('raw_data')[0][1],
'1_s_G1_L001_sequences.fastq.gz'), 'raw_forward_seqs'),
(2, join(get_mountpoint('raw_data')[0][1],
'1_s_G1_L001_sequences_barcodes.fastq.gz'),
'raw_barcodes')]
}
self.assertEqual(obs, exp)
def test_artifact_graph_get_req_ancestors(self):
obs = artifact_graph_get_req(1, 'ancestors', 'test@foo.bar')
exp = {'status': 'success',
'message': '',
'edge_list': [],
'node_labels': [(1, 'Raw data 1 - FASTQ')]}
self.assertEqual(obs, exp)
def test_artifact_graph_get_req_descendants(self):
obs = artifact_graph_get_req(1, 'descendants', 'test@foo.bar')
exp = {'status': 'success',
'message': '',
'node_labels': [(1, 'Raw data 1 - FASTQ'),
(3, 'Demultiplexed 2 - Demultiplexed'),
(2, 'Demultiplexed 1 - Demultiplexed'),
(4, 'BIOM - BIOM'),
(5, 'BIOM - BIOM'),
(6, 'BIOM - BIOM')],
'edge_list': [(1, 3), (1, 2), (2, 5), (2, 4), (2, 6)]}
self.assertEqual(obs['message'], exp['message'])
self.assertEqual(obs['status'], exp['status'])
self.assertItemsEqual(obs['node_labels'], exp['node_labels'])
self.assertItemsEqual(obs['edge_list'], exp['edge_list'])
def test_artifact_graph_get_req_no_access(self):
obs = artifact_graph_get_req(1, 'ancestors', 'demo@microbio.me')
exp = {'status': 'error',
'message': 'User does not have access to study'}
self.assertEqual(obs, exp)
def test_artifact_graph_get_req_bad_direction(self):
obs = artifact_graph_get_req(1, 'WRONG', 'test@foo.bar')
exp = {'status': 'error', 'message': 'Unknown directon WRONG'}
self.assertEqual(obs, exp)
def test_artifact_types_get_req(self):
obs = artifact_types_get_req()
exp = {'message': '',
'status': 'success',
'types': [['BIOM', 'BIOM table'],
['Demultiplexed', 'Demultiplexed and QC sequeneces'],
['FASTA', None],
['FASTA_Sanger', None],
['FASTQ', None],
['SFF', None],
['per_sample_FASTQ', None]]}
self.assertEqual(obs['message'], exp['message'])
self.assertEqual(obs['status'], exp['status'])
self.assertItemsEqual(obs['types'], exp['types'])
@qiita_test_checker()
class TestArtifactAPI(TestCase):
def setUp(self):
uploads_path = get_mountpoint('uploads')[0][1]
# Create prep test file to point at
self.update_fp = join(uploads_path, '1', 'update.txt')
with open(self.update_fp, 'w') as f:
f.write("""sample_name\tnew_col\n1.SKD6.640190\tnew_value\n""")
self._files_to_remove = [self.update_fp]
def tearDown(self):
for fp in self._files_to_remove:
if exists(fp):
remove(fp)
# Replace file if removed as part of function testing
uploads_path = get_mountpoint('uploads')[0][1]
fp = join(uploads_path, '1', 'uploaded_file.txt')
if not exists(fp):
with open(fp, 'w') as f:
f.write('')
r_client.flushdb()
def test_artifact_summary_get_request(self):
# Artifact w/o summary
obs = artifact_summary_get_request('test@foo.bar', 1)
exp_p_jobs = [
['063e553b-327c-4818-ab4a-adfe58e49860', 'Split libraries FASTQ',
'queued', None, None],
['bcc7ebcd-39c1-43e4-af2d-822e3589f14d', 'Split libraries',
'running', 'demultiplexing', None]]
exp_files = [
(1L, '1_s_G1_L001_sequences.fastq.gz (raw forward seqs)'),
(2L, '1_s_G1_L001_sequences_barcodes.fastq.gz (raw barcodes)')]
exp = {'status': 'success',
'message': '',
'name': 'Raw data 1',
'summary': None,
'job': None,
'processing_jobs': exp_p_jobs,
'errored_jobs': [],
'visibility': 'private',
'buttons': '<button onclick="if (confirm(\'Are you sure you '
'want to make public artifact id: 1?\')) { '
'set_artifact_visibility(\'public\', 1) }" '
'class="btn btn-primary btn-sm">Make public</button>'
' <button onclick="if (confirm(\'Are you sure you '
'want to revert to sandbox artifact id: 1?\')) '
'{ set_artifact_visibility(\'sandbox\', 1) }" '
'class="btn btn-primary btn-sm">Revert to '
'sandbox</button>',
'files': exp_files,
'editable': True,
'prep_id': 1,
'study_id': 1}
self.assertEqual(obs, exp)
# Artifact with summary being generated
job = ProcessingJob.create(
User('test@foo.bar'),
Parameters.load(Command(7), values_dict={'input_data': 1})
)
job._set_status('queued')
obs = artifact_summary_get_request('test@foo.bar', 1)
exp = {'status': 'success',
'message': '',
'name': 'Raw data 1',
'summary': None,
'job': [job.id, 'queued', None],
'processing_jobs': exp_p_jobs,
'errored_jobs': [],
'visibility': 'private',
'buttons': '<button onclick="if (confirm(\'Are you sure you '
'want to make public artifact id: 1?\')) { '
'set_artifact_visibility(\ |
zamattiac/ROSIEBot | deleter.py | Python | mit | 2,875 | 0.002087 | import shutil
import os
import codecs
import json
"""
Using the list of active pages generated by compile_active from the API,
deleter removes whatever isn't accounted for online.
Warning: anything in a category folder that isn't in the compile_active list will be deleted.
"""
MIRROR = 'archive'
class Deleter:
"""
A Deleter class for deleting folders in the archive of pages that have been deleted on the OSF.
A CLI is designed to work with this deleter.
Basic Workflow:
Init -> compare archive to mirror -> delete folders in archive
"""
def __init__(self, json_filename):
"""
Constructor for Deleter class
Generates three attributes to the deleter
1. A list of all active projects
2. A list of all active registrations
3. A list of all active users
:param json_filename: Name of the json file generated from compile_active
"""
with codecs.open(json_filename, mode='r', encoding='utf-8') as file:
active_lists = json.load(file)
self.active_node_guids = active_lists["list_of_active_nodes"]
self.active_registration_guids = active_lists["list_of_active_registrations"]
self.active_user_guids = active_lists["list_of_active_users"]
def compare_to_mirror(self, osf_type, active_list):
"""
Compares the archive to the active nodes from the OSF and deletes the folders of guids that are
present in the archive but no longer active on the OSF.
:param osf_type: Whether the list is of registrations, projects, or user profiles
:param active_list: The list of active guids
"""
mirror_list = os.listdir(MIRROR + '/' + osf | _type)
print("OSF type: " + osf_type)
for subdir in mirror_list:
print("Checking", subdir)
if subdir not in active_list:
print(subdir, "inactive. Deleting")
subdir_path = '/'.join([MIRROR, osf_type, subdir])
self.delete_directory(subdir_path)
def delete_directory(self, directory_ | path):
"""
Deletes a given directory
:param directory_path: The path to the directory to be deleted
"""
print(directory_path)
if os.path.isdir(directory_path):
shutil.rmtree(directory_path)
print("Deleted", directory_path)
def run(self):
"""
CLI Endpoint for deleter
Controls workflow
runs deleter on projects, registrations, and user profiles
"""
self.compare_to_mirror('project', self.active_node_guids)
self.compare_to_mirror('registration', self.active_registration_guids)
self.compare_to_mirror('profile', self.active_user_guids)
|
django-de/django-de-v3 | django_de/versions/utils.py | Python | bsd-3-clause | 1,906 | 0.002623 | import requests
import logging
from django.core.cache import cache
from django.conf import settings
LOG = logging.getLogger(__name__)
def is_prerelease(version_str):
"""
Checks if the given version_str represents a prerelease version.
"""
return any([c.isalpha() for c in version_str])
def get_version_data(force=False):
"""
Fetches the latest version info from the Github API url specified
with VERSIONS_API_URL and returns a tuple of latest_stable and
latest_prerelease.
Note that this implementation caches the result with VERSIONS_CACHE_TIMEOUT.
"""
latest_stable = None
latest_pre = None
cache_key = 'versioninfo'
api_url = getattr(settings, 'VERSIONS_API_URL',
'https://api.github.com/repos/django/django/git/refs/tags')
cache_duration = getattr(settings, 'VERSIONS_CACHE_TIMEOUT', 3600)
cache_throttle_duration = getattr(settings,
'VERSIONS_CACHE_THROTTLE_TIMEOUT', 600)
if not force:
info = cache.get(cache_key)
if info is not None:
LOG.debug("Found versioninfo in cache")
return info
LOG.debug("Couldn't find ve | rsioninfo in cache. Refetching...")
try:
data = requests.get(api_url).json()
except:
LOG.exception("Failed to fetch versinfo data")
return None
if not isinstance(data, list):
# We have probably run into a rate limit an | d will therefor store
# a dummy value to prevent re-fetching for at least 10 minutes
info = ('-', '-')
cache.set(cache_key, info, 600)
return info
for tag in data:
tag_name = tag['ref'].split('/')[-1]
if is_prerelease(tag_name):
latest_pre = tag_name
else:
latest_stable = tag_name
info = (latest_stable, latest_pre)
cache.set(cache_key, info, cache_throttle_duration)
return info
|
adblockplus/abpbot | abpbot.py | Python | gpl-2.0 | 4,512 | 0.000887 | #!/usr/bin/env python
#
# Simple IRC Bot to announce messages
#
# Code originally based on example bot and irc-bot class from
# Joel Rosdahl <joel@rosdahl.net>, author of included python-irclib.
#
"""
An IRC bot stub, it will join a particular channel on a server. All
further functionality is implemented by additio | nal handler classes.
"""
import sys
import ConfigParser
from ircbot import SingleServerIRCBot
import irclib
from botcommon import OutputManager
import logbot
import beanbot
import decisionbot
# The message returned w | hen someone messages the bot
HELP_MESSAGE = "I am the Adblock Plus logging bot."
def parse_host_port(hostport, default_port=None):
lis = hostport.split(":", 1)
host = lis[0]
if len(lis) == 2:
try:
port = int(lis[1])
except ValueError:
print "Error: Erroneous port."
sys.exit(1)
else:
if default_port is None:
print "Error: Port required in %s." % hostport
sys.exit(1)
port = default_port
return host, port
class Bot(SingleServerIRCBot):
def __init__(self, config):
ircaddr = parse_host_port(config.get('main', 'host'), 6667)
self.channel = config.get('main', 'channel')
self.nickname = config.get('main', 'nickname')
try:
self.nickpass = config.get('main', 'nickpass')
except ConfigParser.NoOptionError:
self.nickpass = None
try:
self.needinvite = (config.get('main', 'needinvite') == 'yes')
except ConfigParser.NoOptionError:
self.needinvite = False
SingleServerIRCBot.__init__(self, [ircaddr], self.nickname, self.nickname, 5)
self.queue = OutputManager(self.connection, .9)
self.queue.start()
self.handlers = {}
def handler_for_key(self, key):
return lambda c, e: self.execute_handlers(key, c, e)
for handler in (logbot.Logbot(config, self.queue),
beanbot.Beanbot(config, self.queue),
decisionbot.Decisionbot(config, self.queue)):
for props in handler.__dict__, handler.__class__.__dict__:
for key in props.iterkeys():
if not key.startswith('on_'):
continue
value = getattr(handler, key)
if not hasattr(value, '__call__'):
continue
if not key in self.handlers:
# Set up handling for this message
self.handlers[key] = []
if hasattr(self, key):
self.handlers[key].append(getattr(self, key))
setattr(self, key, handler_for_key(self, key))
# Add new handler for this message
self.handlers[key].append(value)
try:
self.start()
except KeyboardInterrupt:
self.connection.quit("Ctrl-C at console")
except Exception, e:
self.connection.quit("%s: %s" % (e.__class__.__name__, e.args))
raise
def execute_handlers(self, key, c, e):
for handler in self.handlers[key]:
handler(c, e)
def do_join(self, c):
if self.needinvite:
c.privmsg("chanserv", "invite %s" % self.channel)
c.join(self.channel)
def on_nicknameinuse(self, c, e):
c.nick(c.get_nickname() + "_")
def on_quit(self, c, e):
source = irclib.nm_to_n(e.source())
if source == self.nickname:
# Our desired nick just quit - take the nick back
c.nick(self.nickname)
self.do_join(c)
def on_welcome(self, c, e):
if self.nickpass and c.get_nickname() != self.nickname:
# Reclaim our desired nickname
c.privmsg('nickserv', 'ghost %s %s' % (self.nickname, self.nickpass))
else:
# Identify ourselves before joining the channel
c.privmsg("nickserv", "identify %s" % self.nickpass)
self.do_join(c)
def on_privmsg(self, c, e):
c.privmsg(irclib.nm_to_n(e.source()), HELP_MESSAGE)
def usage(exitcode=1):
print "Usage: %s <config-file>" % sys.argv[0]
sys.exit(exitcode)
def main():
if len(sys.argv) < 2:
usage()
configfile = sys.argv[1]
config = ConfigParser.ConfigParser()
config.read(configfile)
Bot(config)
if __name__ == "__main__":
main()
|
siosio/intellij-community | python/testData/intentions/PyAnnotateTypesIntentionTest/resolveAmbiguity.py | Python | apache-2.0 | 94 | 0.031915 | if condition:
def func():
pass
else:
| def func():
| pass
fu<caret>nc() |
pkesist/buildpal | Python/test/test_client.py | Python | gpl-3.0 | 4,561 | 0.004385 | from buildpal_client import compile as buildpal_compile
import os
import subprocess |
import asyncio
import sys
import struct
import threading
import pytest
from buildpal.common import MessageProtocol
class ProtocolTester(MessageProtocol):
@classmethod
def check_exit_code(cls, code):
if hasattr(cls, 'expected_exit_code'):
assert code == cls.expected_exit | _code
def __init__(self, loop):
self.initial = True
self.loop = loop
super().__init__()
def process_msg(self, msg):
if self.initial:
assert len(msg) > 5
self.compiler_name = msg[0].decode()
assert self.compiler_name == 'msvc'
self.executable = msg[1].decode()
assert os.path.exists(self.executable)
assert os.path.isfile(self.executable)
assert os.path.basename(self.executable) == 'cl.exe'
self.sysinclude_dirs = msg[2].decode().rstrip(';').split(';')
for path in self.sysinclude_dirs:
assert os.path.exists(path)
assert os.path.isdir(path)
self.cwd = msg[3].decode()
assert os.path.exists(self.cwd)
assert os.path.isdir(self.cwd)
self.command = [x.decode() for x in msg[4:]]
self.send_request()
self.initial = False
else:
self.process_response(msg)
def send_request(self):
raise NotImplementedError()
def process_response(self, msg):
raise NotImplementedError()
def connection_lost(self, exc):
self.loop.stop()
class RunLocallyTester(ProtocolTester):
expected_exit_code = 0
def send_request(self):
self.send_msg([b'RUN_LOCALLY'])
class ExecuteAndExitTester(ProtocolTester):
@classmethod
def check_exit_code(cls, code):
assert code != 0
def send_request(self):
self.send_msg([b'EXECUTE_AND_EXIT', b'/nologo'])
class ExecuteGetOutputTester(ProtocolTester):
expected_exit_code = 6132
def send_request(self):
self.send_msg([b'EXECUTE_GET_OUTPUT', b'/nologo'])
def process_response(self, msg):
retcode, stdout, stderr = msg
retcode = int(retcode.memory())
assert retcode != 0
assert not stdout.memory()
assert b'missing source filename' in stderr.tobytes()
self.send_msg([b'EXIT', struct.pack('!I', self.expected_exit_code & 0xFFFFFFFF), b'',
b''])
class ExitTester(ProtocolTester):
expected_exit_code = 666
def send_request(self):
self.send_msg([b'EXIT', struct.pack('!I', self.expected_exit_code & 0xFFFFFFFF), b'',
b''])
class LocateFiles(ProtocolTester):
expected_exit_code = 3124
files = [b'cl.exe', b'c1xx.dll']
def send_request(self):
self.send_msg([b'LOCATE_FILES'] + self.files)
def process_response(self, msg):
assert len(msg) == len(self.files)
for file, full in zip(self.files, msg):
assert os.path.basename(full.tobytes()) == file
assert os.path.isfile(full.tobytes())
self.send_msg([b'EXIT', struct.pack('!I', self.expected_exit_code & 0xFFFFFFFF), b'',
b''])
@pytest.fixture(scope='function')
def buildpal_compile_args(tmpdir, vcenv_and_cl):
port = 'test_protocol_{}'.format(os.getpid())
file = os.path.join(str(tmpdir), 'aaa.cpp')
with open(file, 'wt'):
pass
args = ['compile', '/c', file]
env, cl = vcenv_and_cl
return ("msvc", cl, env, subprocess.list2cmdline(args), port)
@pytest.mark.parametrize("protocol_tester", [RunLocallyTester,
ExecuteGetOutputTester, ExecuteAndExitTester, ExitTester, LocateFiles])
def test_protocol(buildpal_compile_args, protocol_tester):
loop = asyncio.ProactorEventLoop()
[server] = loop.run_until_complete(loop.start_serving_pipe(
lambda : protocol_tester(loop), "\\\\.\\pipe\\BuildPal_{}".format(buildpal_compile_args[-1])))
class ExitCode:
pass
def run_thread():
ExitCode.exit_code = buildpal_compile(*buildpal_compile_args)
thread = threading.Thread(target=run_thread)
thread.start()
loop.run_forever()
thread.join()
@asyncio.coroutine
def close_server():
server.close()
loop.run_until_complete(close_server())
assert ExitCode.exit_code != None
protocol_tester.check_exit_code(ExitCode.exit_code)
|
cwoebker/paxo | paxo/core.py | Python | bsd-3-clause | 3,371 | 0.000593 | """
paxo.core - the guts of it all
"""
import os
from clint.textui import colored, puts
from clint import resources
from paxo import __author__
from paxo.command import define_command, Collection
from paxo.util import args, show_error, ExitStatus, XDG_DATA_HOME
from paxo.storage import storage
class Paxo(object):
def __init__(self, name, description, command_info, version,
default_action=None, dynamic_action=None, store=None):
self.name = name
self.description = description
self.command_info = command_info
self.version = version
self.store = store
resources.init(__author__, self.name)
if self.store:
# path = os.path.expanduser('~/.'+self.name)
# if XDG_DATA_HOME:
path = os.path.join(XDG_DATA_HOME, self.name, self.name)
path = resources.user.read('path.ini') or path
path_dir = os.path.dirname(path)
if not os.path.exists(path_dir):
os.makedirs(path_dir)
# TODO: make this more general
self.store.setPath(path)
self.store.bootstrap()
self.default_action = default_action or self.display_info
self.dynamic_action = dynamic_action or self.display_info
define_command(name='help', short='h', fn=self.cmd_help,
usage='help <command>',
help='Display help for a command.')
def go(self):
if args.contains(('-h', '--help')):
self.display_info(args)
return ExitStatus.HELP
elif args.contains(('-v', '--version')):
puts('{0} v{1}'.format(
colored.yellow(self.name),
self.version
))
return ExitStatus.VERSION
with storage(self.store):
arg = args.get(0)
if arg:
command = Collection.lookup_command(arg)
if command:
self.execute(command)
else:
self.dynamic_action(args)
else:
self.default_action(args)
return ExitStatus.OK # ExitStatus is defined centrally and has to be adjusted at any point
@staticmethod
def execute(command):
arg = args.get(0)
args.remove(arg)
command.__call__(args)
def display_info(self, args):
puts('{0} - {1}'.format(colored.yellow(self.name), self.description))
header_info = 'Usage: {0} {1}'.format(
| colored.yellow(self.name),
colored.green(self.co | mmand_info)
)
puts(header_info)
puts('-' * len(header_info))
for command in Collection.list_commands():
usage = command.usage or command.name
text = command.help or ''
puts('{0} {1}'.format(colored.green(usage), text))
def cmd_help(self, args):
command = args.get(0)
if command is None:
self.display_info(args)
return
elif not Collection.lookup_command(command):
command = 'help'
show_error(colored.red('Unknown command: {0}'.format(args.get(0))))
cmd = Collection.lookup_command(command)
usage = cmd.usage or ''
help = cmd.help or ''
help_text = '%s - %s' % (usage, help)
puts(help_text)
|
satyammittal/sample-platform | mod_auth/models.py | Python | isc | 3,955 | 0.000506 | """
mod_auth Models
===================
In this module, we are trying to maintain all database models used
for authentication.
List of models corresponding to mysql tables: ['User' => 'user']
"""
import string
from passlib.apps import custom_app_context as pwd_context
from sqlalchemy import Column, Integer, String, Text
from database import Base, DeclEnum
class Role(DeclEnum):
admin = "admin", "Admin"
user = "user", "User"
contributor = "contributor", "Contributor"
tester = "tester", "Tester"
class User(Base):
__tablename__ = 'user'
__table_args__ = {'mysql_engine': 'InnoDB'}
id = Column(Integer, primary_key=True)
name = Column(String(50), unique=True)
email = Column(String(255), unique=True, nullable=True)
github_token = Column(Text(), nullable=True)
password = Column(String(255), unique=False, nullable=False)
role = Column(Role.db_type())
def __init__(self, name, role=Role.user, email=None, password='', github_token=None):
"""
Parametrized constructor for the User model
:param name: The value of the 'name' field of User model
:type name: str
:param role: The value of the 'role' field of User model
:type role: Role
:param email: The value of the 'email' field of User model (None by
default)
:type email: str
:param password: The value of the 'password' field of User model (
empty by default)
:type password: str
"""
self.name = name
self.email = email
self.password = password
self.role = role
self.github_token = github_token
def __repr__(self):
"""
Representation function
Represent a User Model by its 'name' Field.
:return str(name): Returns the string containing 'name' field
of the User model
:rtype str(name): str
"""
return '<User {name}>'.format(name=self.name)
@staticmethod
def generate_hash(password):
"""
Generates a Hash value for a password
:param password: The password to be hashed
:type password: str
:return : The hashed password
:rtype : str
"""
# Go for increased strength no matter what
return pwd_context.encrypt(password, category='admin')
@staticmethod
def create_random_password(length=16):
"""
Creates a random password of default length 16
:param length: If parameter is passed, length will be the parameter.
16 by default
:type length: int
:return : Randomly generated password
:rtype : str
"""
chars = string.ascii_letters + string.digits + '!@#$%^&*()'
import os
return ''.join(chars[ord(os.urandom(1)) % len(chars)] for i in range(length))
def is_password_valid(self, password):
"""
Checks the validity of the password
:param password: The password to be validated
:type password: str
:return : Validity of password
:rtype : boolean
"""
return pwd_context.verify(password, s | elf.password)
def update_password(self, new_password):
| """
Updates the password to a new one
:param new_password: The new password to be updated
:type new_password: str
"""
self.password = self.generate_hash(new_password)
@property
def is_admin(self):
"""
Verifies if a User is the admin
:return : Checks if User has an admin role
:rtype: boolean
"""
return self.role == Role.admin
def has_role(self, name):
"""
Checks whether the User has a particular role
:param name: Role of the user
:type name: str
:return : Checks whether a User has 'name' role
:rtype: boolean
"""
return self.role.value == name or self.is_admin
|
hroncok/devassistant | test/dapi/test_dap.py | Python | gpl-2.0 | 21,493 | 0.00219 | # -*- coding: utf-8 -*-
import pytest
import sys
import os
import logging
import itertools
import glob
import subprocess
from flexmock import flexmock
try:
from cStringIO import StringIO
except:
try:
from StringIO import StringIO
except:
from io import StringIO
from devassistant.dapi import *
from test import fixtures_dir
from devassistant import utils
def dap_path(fixture):
'''Return appropriate dap path'''
return os.path.join(fixtures_dir, 'dapi', 'daps', fixture)
def l(level = logging.WARNING, output = sys.stderr):
'''Gets the logger'''
logger = logging.getLogger('daptest')
handler = logging.StreamHandler(output)
logger.addHandler(handler)
logger.setLevel(level)
return logger
class TestDap(object):
'''Tests for the Dap class'''
def test_no_gz(self):
'''Not-gzip archive should raise DapFileError'''
with pytest.raises(DapFileError):
Dap(dap_path('bz2.dap'))
def test_no_exist(self):
'''Nonexisting file should raise DapFileError'''
with pytest.raises(DapFileError):
Dap('foo')
def test_no_meta(self):
'''Dap without meta.yaml should raise DapMetaError'''
with pytest.raises(DapMetaError):
Dap(dap_path('no_meta.dap'))
def test_dap_data(self):
'''Dap should have correct content in meta, basename and files'''
dap = Dap(dap_path('meta_only/foo-1.0.0.dap'))
assert dap.meta['package_name'] == 'foo'
assert dap.meta['version'] == '1.0.0'
assert u'Hrončok' in dap.meta['authors'][0]
assert dap.basename == 'foo-1.0.0.dap'
assert dap.files == ['foo-1.0.0', 'foo-1.0.0/meta.yaml']
def test_no_toplevel(self):
'''Dap with no top-level directory is invalid'''
out = StringIO()
assert not Dap(dap_path('no_toplevel/foo-1.0.0.dap')).check(logger=l(output=out, level=logging.ERROR))
assert len(out.getvalue().rstrip().split('\n')) == 1
assert 'not in top-level directory' in out.getvalue()
def test_valid_names(self):
'''Test if valid names are valid'''
d = Dap('', fake=True)
for name in 'foo f bar v8 foo-bar-foo ffffff8ff f-_--s '.split():
d.meta['package_name'] = name
assert d._isvalid('package_name')
def test_invalid_names(self):
'''Test if invalid names are invalid'''
d = Dap('', fake=True)
for name in '9 8f -a - a_ _ ř H aaHa ? aa!a () * ff+a f8-- .'.split():
d.meta['package_name'] = name
assert not d._isvalid('package_name')
def test_valid_versions(self):
'''Test if valid versions are valid'''
d = Dap('', fake=True)
for version in '0 1 888 0.1 0.1a 0.0.0b 666dev 0.0.0.0.0 8.11'.split():
d.meta['version'] = version
assert d._isvalid('version')
def test_invalid_versions(self):
'''Test if invalid versions are invalid'''
d = Dap('', fake=True)
for version in '00 01 0.00.0 01.0 1c .1 1-2 h č . 1..0 1.0.'.split():
d.meta['version'] = version
assert not d._isvalid('version')
def test_loading_float_version(self):
'''Test that loading doesn't fail if version is loaded from YAML as float'''
out = StringIO()
assert Dap(dap_path('meta_only/bad_version-0.1.dap')).check(logger=l(output=out, level=logging.ERROR))
def test_valid_urls(self):
'''Test if valid URLs are valid'''
d = Dap('', fake=True)
urls = ['http://g.com/aa?ff=g&g#f',
'ftp://g.aa/',
'http://user:password@fee.com',
'https://f.f.f.f.f.sk/cgi-bin/?f=Program%20Files']
for url in urls:
d.meta['homepage'] = url
assert d._isvalid('homepage')
def test_invalid_urls(self):
'''Test if invalid URLs are invalid'''
d = Dap('', fake=True)
urls = ['g.com/a',
'mailto:foo@bar.com',
'ftp://192.168.1.1/?a',
'https://localhost/']
for url in urls:
d.meta['homepage'] = url
assert not d._isvalid('homepage')
def test_valid_bugreports(self):
'''Test if valid URLs or e-mails are valid'''
d = Dap('', fake=True)
bugs = ['http://g.com/',
'miro@hroncok.cz',
'?ouch@devassiatnt.org',
'par_at_no.id',
'par_at_n@o.id']
for bug in bugs:
d.meta['bugreports'] = bug
assert d._isvalid('bugreports')
def test_invalid_bugreports(self):
'''Test if invalid URLs or e-mails are invalid'''
d = Dap('', fake=True)
bugs = ['httpr://g.com/',
'miro@h@roncok.cz',
'?ouchdevassiatnt.org',
'par_at_no.iduss',
'@o.id']
for bug in bugs:
d.meta['bugreports'] = bug
assert not d._isvalid('bugreports')
def test_valid_summary(self):
'''Test if valid summary is valid'''
d = Dap('', fake=True)
d.meta['summary'] = 'foo'
assert d._isvalid('summary')
def test_invalid_summary(self):
'''Test if invalid summary is invalid'''
d = Dap('', fake=True)
d.meta['summary'] = 'foo\nbar'
assert not d._isvalid('summary')
def test_empty_required(self):
'''Required metadata should fail when undefined'''
d = Dap('', fake=True)
for item in 'package_name version license authors summary'.split():
assert not d._isvalid(item)
def test_valid_licenses(self):
'''Test if valid licenses are valid'''
d = Dap('', fake=True)
licenses = ['AGPLv3 with exceptions',
'GPL+ or Artistic',
'LGPLv2+ and LGPLv2 and LGPLv3+ and (GPLv3 or LGPLv2) and (GPLv3+ or LGPLv2) and (CC-BY-SA or LGPLv2+) and (CC-BY-SA or LGPLv2) and CC-BY and BSD and MIT and Public Domain']
for license in licenses:
d.meta['license'] = license
assert d._isvalid('license')
def test_invalid_licenses(self):
'''Test if invalid licenses are invalid'''
d = Dap('', fake=True)
licenses = ['Redistributable',
'GPLv4',
'LGPLv2+ and (LGPLv2',
'GNU GPL']
for license in licenses:
d.meta['license'] = license
assert not d._isvalid('license')
def test_valid_authors(self):
'''Test if valid authors are valid'''
d = Dap('', fake=True)
pool = [u'Miro Hrončok <miro@hroncok.cz>',
u'Miro Hrončok <miro_at_hroncok.cz>',
u'Miro Hrončok',
u'Dr. Voštěp',
u'Никола I Петровић-Његош']
for r in range(1, len(pool) + 1):
for authors in itertools.combinations(pool, r):
d.meta['authors'] = list(authors)
ok, bads = d._arevalid('authors')
assert ok
assert not bads
def test_invalid_authors(self):
'''Test if invalid authors are invalid'''
d = Dap('', fake=True)
pool = [u'Mir | o Hrončok ',
' ',
u' Miro Hrončok',
u'Miro Hrončok miro@hroncok.cz',
u'Miro Hrončok <miro@hr@oncok.cz>',
'']
for r in range(1, len(pool) + 1):
for authors in itertools.combinations(pool, r):
d.meta['authors'] = list(authors)
ok, bads = d._arevalid( | 'authors')
assert not ok
assert bads == list(authors)
d.meta['authors'] = ['OK2 <ok@ok.ok>'] + pool + ['OK <ok@ok.ok>']
ok, bads = d._arevalid('authors')
assert bads == pool
def test_duplicate_authors(self):
'''Test if duplicate valid authors are invalid'''
d = Dap('', fake=True)
d.meta['authors'] = ['A', 'B', 'A']
ok, bads = d._arevalid('authors')
assert not ok
assert bads == ['A']
def test_empty_authors(self):
'''Test i |
rwightman/pytorch-image-models | timm/loss/asymmetric_loss.py | Python | apache-2.0 | 3,322 | 0.001505 | import torch
import torch.nn as nn
class AsymmetricLossMultiLabel(nn.Module):
def __init__(self, gamma_neg=4, gamma_pos=1, clip=0.05, eps=1e-8, disable_torch_grad_focal_loss=False):
super(AsymmetricLossMultiLabel, self).__init__()
self.gamma_neg = gamma_neg
self.gamma_pos = gamma_pos
self.clip = clip
self.disable_torch_grad_focal_loss = disable_torch_grad_focal_loss
self.eps = eps
def forward(self, x, y):
""""
Parameters
----------
x: input logits
y: targets (multi-label binarized vector)
"""
# Calculating Probabilities
x_sigmoid = torch.sigmoid(x)
xs_pos = x_sigmoid
xs_neg = 1 - x_sigmoid
# Asymmetric Clipping
if self.clip is not None and self.clip > 0:
xs_neg = (xs_neg + self.clip).clamp(max=1)
# Basic CE calculation
los_pos = y * torch.log(xs_pos.clamp(min=self.eps))
los_neg = (1 - y) * torch.log(xs_neg.clamp(min=self.eps))
loss = los_pos + los_neg
# Asymmetric Focusing
if self.gamma_neg > 0 or self.gamma_pos > 0:
if self.disable_t | orch_grad_focal_loss:
torch._C.set_grad_enabled(False)
pt0 = xs_pos * y
pt1 = xs_neg * (1 - y) # pt = p if t > 0 else 1-p
pt = pt0 + pt1
one_sided_gamma = self.gamma_pos * y + self.gamma_neg * (1 - y)
one_sided_w = torch.pow(1 - pt, one_sided_gamma)
if self.disable_torch_grad_focal_loss:
torch._C.set_grad_enabled(True)
loss *= one_s | ided_w
return -loss.sum()
class AsymmetricLossSingleLabel(nn.Module):
def __init__(self, gamma_pos=1, gamma_neg=4, eps: float = 0.1, reduction='mean'):
super(AsymmetricLossSingleLabel, self).__init__()
self.eps = eps
self.logsoftmax = nn.LogSoftmax(dim=-1)
self.targets_classes = [] # prevent gpu repeated memory allocation
self.gamma_pos = gamma_pos
self.gamma_neg = gamma_neg
self.reduction = reduction
def forward(self, inputs, target, reduction=None):
""""
Parameters
----------
x: input logits
y: targets (1-hot vector)
"""
num_classes = inputs.size()[-1]
log_preds = self.logsoftmax(inputs)
self.targets_classes = torch.zeros_like(inputs).scatter_(1, target.long().unsqueeze(1), 1)
# ASL weights
targets = self.targets_classes
anti_targets = 1 - targets
xs_pos = torch.exp(log_preds)
xs_neg = 1 - xs_pos
xs_pos = xs_pos * targets
xs_neg = xs_neg * anti_targets
asymmetric_w = torch.pow(1 - xs_pos - xs_neg,
self.gamma_pos * targets + self.gamma_neg * anti_targets)
log_preds = log_preds * asymmetric_w
if self.eps > 0: # label smoothing
self.targets_classes.mul_(1 - self.eps).add_(self.eps / num_classes)
# loss calculation
loss = - self.targets_classes.mul(log_preds)
loss = loss.sum(dim=-1)
if self.reduction == 'mean':
loss = loss.mean()
return loss
|
rimbalinux/LMD3 | transaction/urls.py | Python | bsd-3-clause | 171 | 0.005848 | from django.conf.urls.defaults import *
|
urlpatterns = patterns('transaction.views',
(r'^create/(?P<pid>.*)$', 'create'),
(r'^edit/( | ?P<tid>.*)$', 'edit'),
)
|
box/box-python-sdk | demo/example.py | Python | apache-2.0 | 9,973 | 0.002005 | import os
from boxsdk import Client
from boxsdk.exception import BoxAPIException
from boxsdk.object.collaboration import CollaborationRole
from demo.auth import authenticate
def run_user_example(client):
# 'me' is a handy value to get info on the current authenticated user.
me = client.user(user_id='me').get(fields=['login'])
print(f'The email of the user is: {me["login"]}')
def run_folder_examples(client):
root_folder = client.folder(folder_id='0').get()
print(f'The root folder is owned by: {root_folder.owned_by["login"]}')
items = root_folder.get_items(limit=100, offset=0)
print('This is the first 100 items in the root folder:')
for item in items:
print(" " + item.name)
def run_collab_examples(client):
root_folder = client.folder(folder_id='0')
collab_folder = root_folder.create_subfolder('collab folder')
try:
print(f'Folder {collab_folder.get()["name"]} created')
collaboration = collab_folder.add_collaborator('someone@example.com', CollaborationRole.VIEWER)
print('Created a collaboration')
try:
modified_collaboration = collaboration.update_info(role=CollaborationRole.EDITOR)
print(f'Modified a collaboration: {modified_collaboration.role}')
finally:
collaboration.delete()
print('Deleted a collaboration')
finally:
# Clean up
print(f'Delete folder collab folder succeeded: {collab_folder.delete()}')
def rename_folder(client):
root_folder = client.folder(folder_id='0')
foo = root_folder.create_subfolder('foo')
try:
print(f'Folder {foo.get()["name"]} created')
bar = foo.rename('bar')
print(f'Renamed to {bar.get()["name"]}')
finally:
print(f'Delete folder bar succeeded: {foo.delete()}')
def get_folder_shared_link(client):
root_folder = client.folder(folder_id='0')
collab_folder = root_folder.create_subfolder('shared link folder')
try:
print(f'Folder {collab_folder.get().name} created')
shared_link = collab_folder.get_shared_link()
print('Got shared link:' + shared_link)
finally:
print(f'Delete folder collab folder succeeded: {collab_folder.delete()}')
def upload_file(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
a_file = root_folder.upload(file_path, file_name='i-am-a-file.txt')
try:
print(f'{a_file.get()["name"]} uploaded: ')
finally:
print(f'Delete i-am-a-file.txt succeeded: {a_file.delete()}')
def upload_accelerator(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
a_file = root_folder.upload(file_path, file_name='i-am-a-file.txt', upload_using_accelerator=True)
try:
print(f'{a_file.get()["name"]} uploaded via Accelerator: ')
file_v2_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file_v2.txt')
a_file = a_file.update_contents(file_v2_path, upload_using_accelerator=True)
print(f'{a_file.get()["name"]} updated via Accelerator: ')
finally:
print(f'Delete i-am-a-file.txt succeeded: {a_file.delete()}')
def rename_file(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
foo = root_folder.upload(file_path, file_name='foo.txt')
try:
print(f'{foo.get()["name"]} uploaded ')
bar = foo.rename('bar.txt')
print(f'Rename succeeded: {bool(bar)}')
finally:
foo.delete()
def update_file(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
file_v1 = root_folder.upload(file_path, file_name='file_v1.txt')
try:
# print f'File content after upload: {file_v1.content()}'
file_v2_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file_v2.txt')
file_v2 = file_v1.update_contents(file_v2_path)
# print f'File content after update: {file_v2.content()}'
finally:
file_v1.delete()
def search_files(client):
search_results = client.search().query(
'i-am-a-file.txt',
limit=2,
offset=0,
ancestor_folders=[client.folder(folder_id='0')],
file_extensions=['txt'],
)
for item in search_results:
item_with_name = item.get(fields=['name'])
print('matching item: ' + item_with_name.id)
else:
print('no matching items')
def copy_item(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
a_file = root_folder.upload(file_path, file_name='a file.txt')
try:
subfolder1 = root_folder.create_subfolder('copy_sub')
try:
a_file.copy(subfolder1)
print(subfolder1.get_items(limit=10, offset=0))
subfolder2 = root_folder.create_subfolder('copy_sub2')
try:
subfolder1.copy(subfolder2)
print(subfolder2.get_items(limit=10, offset=0))
finally:
subfolder2.delete()
finally:
subfolder1.delete()
finally:
a_file.delete()
def move_item(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
a_file = root_folder.upload(file_path, file_name='a file.txt')
try:
subfolder1 = root_folder.create_subfolder('move_sub')
try:
a_file.move(subfolder1)
print(subfolder1.get_items(limit=10, offset=0))
subfolder2 = root_f | older.create_subfolder('move_sub2')
try:
| subfolder1.move(subfolder2)
print(subfolder2.get_items(limit=10, offset=0))
finally:
subfolder2.delete()
finally:
try:
subfolder1.delete()
except BoxAPIException:
pass
finally:
try:
a_file.delete()
except BoxAPIException:
pass
def get_events(client):
print(client.events().get_events(limit=100, stream_position='now'))
def get_latest_stream_position(client):
print(client.events().get_latest_stream_position())
def long_poll(client):
print(client.events().long_poll())
def _delete_leftover_group(existing_groups, group_name):
"""
delete group if it already exists
"""
existing_group = next((g for g in existing_groups if g.name == group_name), None)
if existing_group:
existing_group.delete()
def run_groups_example(client):
"""
Shows how to interact with 'Groups' in the Box API. How to:
- Get info about all the Groups to which the current user belongs
- Create a Group
- Rename a Group
- Add a member to the group
- Remove a member from a group
- Delete a Group
"""
try:
# First delete group if it already exists
original_groups = client.groups()
_delete_leftover_group(original_groups, 'box_sdk_demo_group')
_delete_leftover_group(original_groups, 'renamed_box_sdk_demo_group')
new_group = client.create_group('box_sdk_demo_group')
except BoxAPIException as ex:
if ex.status != 403:
raise
print('The authenticated user does not have permissions to manage groups. Skipping the test of this demo.')
return
print('New group:', new_group.name, new_group.id)
new_group = new_group.update_info({'name': 'renamed_box_sdk_demo_group'})
print("Group's new name:", new_group.name)
me_dict = client.user().get(fields=['login'])
me = client.user(user_id=me_dict['id'])
group_membership = new_group.add_member(me, 'member')
members = list(new_group.membership())
print('The group has a membership of: ', len(members))
print('The id of that membership: ', group_membership.object_id)
group_membership.d |
lyynocs/magento-connector-v8 | sale_automatic_workflow_exception/__openerp__.py | Python | agpl-3.0 | 1,693 | 0 | # -*- coding: utf-8 -*-
###############################################################################
#
# Module for OpenERP
# Copyright (C) 2014 Akretion (http://www.akretion.com).
# @author Sébastien BEAU <sebastien.beau@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
{'name': 'Sale Automatic Workflow Exception',
'version': '0.0.1',
'author': "Akretion,Odoo Community Association (OCA)",
'website': 'www.akretion.com',
'license': 'AGPL-3',
'category': 'Generic Modules',
'description': """Auto installable Module for the compatibility
with the Sale Automatic Workflow and Sale Exception.
Order in exception are ignored | by the cron to avoid useless testing
every minutes. Please take care to active the cron "test all draft"
in order to retest some time the order with an ex | ception.
""",
'depends': [
'sale_exceptions',
'sale_automatic_workflow',
],
'data': [
],
'installable': True,
'application': False,
'auto_install': True,
}
|
mzuther/lalikan | src/Lalikan.py | Python | gpl-3.0 | 7,043 | 0 | #!/usr/bin/env py | thon3
"""Lalikan
=======
Backup s | cheduler for Disk ARchive (DAR)
Copyright (c) 2010-2019 Dr. Martin Zuther (http://www.mzuther.de/)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Thank you for using free software!
"""
import argparse
import os
import sys
import lalikan.settings
import lalikan.runner
def assert_requirements():
"""
Check application requirements. Exits with an error if the requirements
are not fulfilled.
:rtype:
None
"""
# check Python version
if sys.version_info.major != 3:
print()
print('Lalikan does not run on Python {}.'.format(
sys.version_info.major))
print()
exit(1)
def print_header():
"""
Print application name and version on command line.
:rtype:
None
"""
# get application name and version
name_and_version = settings.get_name_and_version()
print()
print(name_and_version)
print('=' * len(name_and_version))
def list_sections(message):
"""
Print defined message and defined backup sections on command line.
:param message:
message to be displayed on command line
:type settings:
String
:rtype:
None
"""
print()
print(message)
print()
# loop over defined sections
for section in settings.sections():
print(' * ' + section)
print()
def parse_command_line(settings):
"""
Parse command line.
:param settings:
backup settings and application information
:type settings:
lalikan.settings
:returns:
selected a list of backup sections and whether backups should
be forced
:rtype:
tuple(section, force_backup)
"""
# initialise command line parser
parser = argparse.ArgumentParser(
description=settings.get_description(),
formatter_class=argparse.RawDescriptionHelpFormatter)
# argument: show version information
parser.add_argument(
'--version',
action='version',
version=settings.get_name_and_version())
# argument: show copyright and licence information
parser.add_argument(
'--licence',
action='store_true',
dest='licence',
default=False,
help='show copyright and licence information and exit')
# argument: list all backup sections
parser.add_argument(
'-l', '--list',
action='store_true',
dest='list_sections',
default=False,
help='list all defined backup sections and exit')
group = parser.add_mutually_exclusive_group()
# argument: create backup for section only
group.add_argument(
'-s', '--section',
action='store',
dest='section',
metavar='SECTION',
default=None,
help='create backup for SECTION (otherwise, backups for all '
'sections will be created)')
# argument: force backup for section
group.add_argument(
'--force',
action='store',
dest='force_backup',
metavar='SECTION',
default=None,
help='force backup for SECTION')
# parse command line
args = parser.parse_args()
# show copyright and licence information
if args.licence:
# get application name and version
name_and_version = settings.get_name_and_version()
print()
print(name_and_version)
print('=' * len(name_and_version))
print(settings.get_description())
print()
print(settings.get_copyrights())
print()
print(settings.get_license(True))
print()
exit(0)
# list defined sections and exit
if args.list_sections:
list_sections('Backup sections:')
exit(0)
# user asked to create a specific backup which is not defined
if args.section and args.section not in settings.sections():
# print error message and exit
message = 'Could not find section "{}". '.format(args.section)
message += 'Please use one of these sections:'
list_sections(message)
exit(1)
# create backup for specified section
if args.force_backup:
sections = [args.force_backup]
force_backup = True
# create backup for specified section
elif args.section:
sections = [args.section]
force_backup = False
# create backup for all sections
else:
sections = settings.sections()
force_backup = False
return (sections, force_backup)
if __name__ == '__main__':
# check application requirements; exits if a requirement is not met
assert_requirements()
# load Lalikan settings
config_filename = '/etc/lalikan'
settings = lalikan.settings.Settings(config_filename)
# parse command line
sections, force_backup = parse_command_line(settings)
# print application name and version
print_header()
print()
# on Linux, check whether the script runs with superuser rights
if sys.platform == 'linux' and os.getuid() != 0:
box_width = 24
print(' ╔' + '═' * box_width + '╗')
print(' ║' + ' ' * box_width + '║')
print(' ║ YOU LACK SUPER POWER ║')
print(' ║' + ' ' * box_width + '║')
print(' ║ Your backup may be ║')
print(' ║ incomplete. Maybe ║')
print(' ║ there\'s no backup. ║')
print(' ║' + ' ' * box_width + '║')
print(' ║ YOU\'VE BEEN WARNED ║')
print(' ║' + ' ' * box_width + '║')
print(' ╚' + '═' * box_width + '╝')
print()
# keep track of backup errors
errors_occurred = False
# loop over specified backup sections
for n, section in enumerate(sections):
try:
# create backup for section
lalikan.runner.BackupRunner(settings, section, force_backup)
except OSError as err:
# print error message
print(err)
# remember that an error occurred
errors_occurred = True
finally:
if n < (len(sections) - 1):
print()
print('---')
print()
# print summary and exit with error code
if errors_occurred:
print('---')
print()
print('At least one error has occurred!')
print()
exit(1)
|
MehdiSfr/tensor-flow | tensorflow/g3doc/how_tos/reading_data/fully_connected_preloaded.py | Python | apache-2.0 | 5,244 | 0.005721 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Trains the MNIST network using preloaded data in a constant.
Command to run this py_binary target:
bazel run -c opt \
<...>/tensorflow/g3doc/how_tos/reading_data:fully_connected_preloaded
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import tim | e
import tensorflow.python.platform
import numpy
import tensorflow as tf
from tensorflow.g3doc.tutorials.mnist import input_data
from tensorflow.g3doc.tutorials.mnist import mnist
# Basic model par | ameters as external flags.
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_float('learning_rate', 0.01, 'Initial learning rate.')
flags.DEFINE_integer('num_epochs', 2, 'Number of epochs to run trainer.')
flags.DEFINE_integer('hidden1', 128, 'Number of units in hidden layer 1.')
flags.DEFINE_integer('hidden2', 32, 'Number of units in hidden layer 2.')
flags.DEFINE_integer('batch_size', 100, 'Batch size. '
'Must divide evenly into the dataset sizes.')
flags.DEFINE_string('train_dir', 'data', 'Directory to put the training data.')
flags.DEFINE_boolean('fake_data', False, 'If true, uses fake data '
'for unit testing.')
def run_training():
"""Train MNIST for a number of epochs."""
# Get the sets of images and labels for training, validation, and
# test on MNIST.
data_sets = input_data.read_data_sets(FLAGS.train_dir, FLAGS.fake_data)
# Tell TensorFlow that the model will be built into the default Graph.
with tf.Graph().as_default():
with tf.name_scope('input'):
# Input data
input_images = tf.constant(data_sets.train.images)
input_labels = tf.constant(data_sets.train.labels)
image, label = tf.train.slice_input_producer(
[input_images, input_labels], num_epochs=FLAGS.num_epochs)
label = tf.cast(label, tf.int32)
images, labels = tf.train.batch(
[image, label], batch_size=FLAGS.batch_size)
# Build a Graph that computes predictions from the inference model.
logits = mnist.inference(images, FLAGS.hidden1, FLAGS.hidden2)
# Add to the Graph the Ops for loss calculation.
loss = mnist.loss(logits, labels)
# Add to the Graph the Ops that calculate and apply gradients.
train_op = mnist.training(loss, FLAGS.learning_rate)
# Add the Op to compare the logits to the labels during evaluation.
eval_correct = mnist.evaluation(logits, labels)
# Build the summary operation based on the TF collection of Summaries.
summary_op = tf.merge_all_summaries()
# Create a saver for writing training checkpoints.
saver = tf.train.Saver()
# Create the op for initializing variables.
init_op = tf.initialize_all_variables()
# Create a session for running Ops on the Graph.
sess = tf.Session()
# Run the Op to initialize the variables.
sess.run(init_op)
# Instantiate a SummaryWriter to output summaries and the Graph.
summary_writer = tf.train.SummaryWriter(FLAGS.train_dir,
graph_def=sess.graph_def)
# Start input enqueue threads.
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
# And then after everything is built, start the training loop.
try:
step = 0
while not coord.should_stop():
start_time = time.time()
# Run one step of the model.
_, loss_value = sess.run([train_op, loss])
duration = time.time() - start_time
# Write the summaries and print an overview fairly often.
if step % 100 == 0:
# Print status to stdout.
print('Step %d: loss = %.2f (%.3f sec)' % (step, loss_value,
duration))
# Update the events file.
summary_str = sess.run(summary_op)
summary_writer.add_summary(summary_str, step)
step += 1
# Save a checkpoint periodically.
if (step + 1) % 1000 == 0:
print('Saving')
saver.save(sess, FLAGS.train_dir, global_step=step)
step += 1
except tf.errors.OutOfRangeError:
print('Saving')
saver.save(sess, FLAGS.train_dir, global_step=step)
print('Done training for %d epochs, %d steps.' % (FLAGS.num_epochs, step))
finally:
# When done, ask the threads to stop.
coord.request_stop()
# Wait for threads to finish.
coord.join(threads)
sess.close()
def main(_):
run_training()
if __name__ == '__main__':
tf.app.run()
|
mcgee/ns-3 | src/click/bindings/modulegen__gcc_LP64.py | Python | gpl-2.0 | 362,122 | 0.015152 | from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.click', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## log.h (module 'core'): ns3::LogLevel [enumeration]
module.add_enum('LogLevel', ['LOG_NONE', 'LOG_ERROR', 'LOG_LEVEL_ERROR', 'LOG_WARN', 'LOG_LEVEL_WARN', 'LOG_DEBUG', 'LOG_LEVEL_DEBUG', 'LOG_INFO', 'LOG_LEVEL_INFO', 'LOG_FUNCTION', 'LOG_LEVEL_FUNCTION', 'LOG_LOGIC', 'LOG_LEVEL_LOGIC', 'LOG_ALL', 'LOG_LEVEL_ALL', 'LOG_PREFIX_FUNC', 'LOG_PREFIX_TIME', 'LOG_PREFIX_NODE', 'LOG_PREFIX_LEVEL', 'LOG_PREFIX_ALL'], import_from_module='ns.core')
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from | _module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iter | ator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
module.add_class('Inet6SocketAddress', import_from_module='ns.network')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
root_module['ns3::Inet6SocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
module.add_class('InetSocketAddress', import_from_module='ns.network')
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
root_module['ns3::InetSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## int-to-type.h (module 'core'): ns3::IntToType<0> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['0'])
## int-to-type.h (module 'core'): ns3::IntToType<0>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 0 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<1> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['1'])
## int-to-type.h (module 'core'): ns3::IntToType<1>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 1 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<2> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['2'])
## int-to-type.h (module 'core'): ns3::IntToType<2>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 2 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<3> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['3'])
## int-to-type.h (module 'core'): ns3::IntToType<3>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 3 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<4> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['4'])
## int-to-type.h (module 'core'): ns3::IntToType<4>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 4 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<5> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['5'])
## int-to-type.h (module 'core'): ns3::IntToType<5>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 5 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<6> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['6'])
## int-to-type.h (module 'core'): ns3::IntToType<6>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 6 >'], import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress [class]
module.add_class('Ipv4InterfaceAddress', import_from_module='ns.internet')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e [enumeration]
module.add_enum('InterfaceAddressScope_e', ['HOST', 'LINK', 'GLOBAL'], outer_class=root_module['ns3::Ipv4InterfaceAddress'], import_from_module='ns.internet')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress [class]
module.add_class('Ipv6InterfaceAddress', import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::State_e [enumeration]
module.add_enum('State_e', ['TENTATIVE', 'DEPRECATED', 'PREFERRED', 'PERMANENT', 'HOMEADDRESS', 'TENTATIVE_OPTIMISTIC', 'INVALID'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Scope_e [enumeration]
module.add_enum('Scope_e', ['HOST', 'LINKLOCAL', 'GLOBAL'], outer_class=root_module['ns3::Ipv6Interf |
bongtrop/cilab-python | ann/bp.py | Python | bsd-3-clause | 1,933 | 0.017072 | import math
import net
SIGMOID = 0
TANH = 1
class bp:
def __init__(self, net, learning_rate, momentum):
self.type = net.getType()
self.net = net
self.lr = learning_rate
self.m = momentum
self.layer = net.getLayer()
self.lc = [[[0]*max(self.layer)]*max(self.layer)]*len(self.layer)
def _dfunc(self, y):
if self.type==SIGMOID:
return y * (1.0 - y)
else:
return 1.0 - y**2
def setLearningRate(self,x):
self.lr = x
def setMomentum(self, x):
self.m = x
def backPropagate(self, input, target):
if len(target)!=self.layer[-1]:
print len(target)
print self.layer[-1]
raise ValueError('Wrong number of target values')
self.net.process(input)
nlayer = len(self.layer)
delta = []
for i in range(0, nlayer):
delta.append([0.0] * self.layer[i])
for i in range(0,self.layer[nlayer-1]):
node = self.net.getNode(nlayer-1, i)
error = target[i] - node
delta[nlayer-1][i] = self._dfunc(node) * error
for l in range(nlayer-2, 0, -1):
for i in range(0, self.layer[l]):
error = 0.0
for j in range(0, self.layer[l+1]):
error = error + delta[l+1][j] * self.net.getWeight(l+1, i, j)
delta[l][i] = self._dfunc(self.net.getNode(l,i)) * error
for l in range(nlayer-2, -1, -1):
for i in range(0, self.layer[l]):
for j in range(0, self.layer[l+1]):
change = delta[l+1][j] * | self.net.getNode(l, i)
w = self.net.getWeight(l+1, i, j) + self.lr * change + self.m * self.lc[l+1][i][j]
self.net.setWeight(l+1, i, j, w)
self.lc[l+1][i][j] = change
for i in range(0, self.layer[l+1]):
b = self.net.getBias(l+1, i) + delta[l+1][i]
self.net.setBia | s(l+1, i, b)
error = 0.0
for i in range(0, len(target)):
error = error + 0.5 * (target[i] - self.net.getNode(nlayer-1, i))**2
return error
|
roxma/nvim-completion-manager | pythonx/cm_scopers/html_scoper.py | Python | mit | 4,724 | 0.012066 | # -*- coding: utf-8 -*-
import re
import logging
import copy
from cm import Base, getLogger
logger = getLogger(__name__)
class Scoper(Base):
scopes = ['html','xhtml','php','blade','jinja','jinja2','vue.html.javascript.css','vue']
def sub_context(self,ctx,src):
lnum = ctx['lnum']
col = ctx['col']
from html.parser import HTMLParser
scoper = self
class MyHTMLParser(HTMLParser):
last_data_start = None
last_data = None
scope_info = None
skip = False
def handle_starttag(self,tag,attrs):
self.skip = False
if tag in ['style','script']:
for attr in attrs:
try:
# avoid css completion for lang="stylus"
if tag=='style' and attr[0]=='lang' and attr[1] and attr[1] not in ['css','scss']:
self.skip = True
return
if tag=='style' and attr[0]=='type' and attr[1] and attr[1] not in ['text/css']:
self.skip = True
return
if tag=='script' and attr[0]=='type' and attr[1] and attr[1] not in ['text/javascript']:
self.skip = True
return
except:
pass
def handle_endtag(self, tag):
if self.skip:
return
if tag in ['style','script']:
startpos = self.last_data_start
endpos = self.getpos()
if ((startpos[0]<lnum
or (startpos[0]==lnum
and startpos[1]+1<=col))
and
(endpos[0]>lnum
or (endpos[0]==lnum
and endpos[1]>=col))
):
self.scope_info = {}
self.scope_info['lnum'] = lnum-startpos[0]+1
if lnum==startpos[0]:
self.scope_info['col'] = col-(startpos[1]+1)+1
else:
self.scope_info['col']=col
if tag=='script':
self.scope_info['scope']='javascript'
else:
# style
self.scope_info['scope']='css'
self.scope_info['scope_offset']= scoper.get_pos(startpos[0],startpos[1]+1,src)
self.scope_info['scope_len']=len(self.last_data)
# offset as lnum, col format
self.scope_info['scope_lnum']= startpos[0]
# startpos[1] is zero based
self.scope_info['scope_col']= startpos[1]+1
def handle_data(self, data):
self.last_data = data
self.last_data_start = self.getpos()
parser = MyHTMLParser | ()
parser.feed(src)
if parser.scope_info:
new_ctx = copy.deepcopy(ctx)
new_ctx['scope'] = parser.scope_info['scope']
new_ctx['lnum'] = parser.scope_info['lnum']
new_ctx['col'] = parser.scope_info['col']
new_ctx['scope_offset'] = parser.scope_info | ['scope_offset']
new_ctx['scope_len'] = parser.scope_info['scope_len']
new_ctx['scope_lnum'] = parser.scope_info['scope_lnum']
new_ctx['scope_col'] = parser.scope_info['scope_col']
return new_ctx
pos = self.get_pos(lnum,col,src)
# css completions for style='|'
for match in re.finditer(r'style\s*=\s*("|\')(.*?)\1',src):
if match.start(2)>pos:
return
if match.end(2)<pos:
continue
# start < pos and and>=pos
new_src = match.group(2)
new_ctx = copy.deepcopy(ctx)
new_ctx['scope'] = 'css'
new_ctx['scope_offset'] = match.start(2)
new_ctx['scope_len'] = len(new_src)
scope_lnum_col = self.get_lnum_col(match.start(2),src)
new_ctx['scope_lnum'] = scope_lnum_col[0]
new_ctx['scope_col'] = scope_lnum_col[1]
sub_pos = pos - match.start(2)
sub_lnum_col = self.get_lnum_col(sub_pos,new_src)
new_ctx['lnum'] = sub_lnum_col[0]
new_ctx['col'] = sub_lnum_col[1]
return new_ctx
return None
|
NaturalGIS/QGIS | python/plugins/processing/gui/menus.py | Python | gpl-2.0 | 13,513 | 0.001406 | # -*- coding: utf-8 -*-
"""
***************************************************************************
menus.py
---------------------
Date : February 2016
Copyright : (C) 2016 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Victor Olaya'
import os
from qgis.PyQt.QtCore import QCoreApplication
from qgis.PyQt.QtWidgets import QAction, QMenu
from qgis.PyQt.QtGui import QIcon
from qgis.PyQt.QtWidgets import QApplication
from processing.core.ProcessingConfig import ProcessingConfig, Setting
from processing.gui.MessageDialog import MessageDialog
from processing.gui.AlgorithmDialog import AlgorithmDialog
from qgis.utils import iface
from qgis.core import QgsApplication, QgsMessageLog, QgsStringUtils, QgsProcessingAlgorithm
from qgis.gui import QgsGui
from processing.gui.MessageBarProgress import MessageBarProgress
from processing.gui.AlgorithmExecutor import execute
from processing.gui.Postprocessing import handleAlgorithmResults
from processing.core.Processing import Processing
from processing.tools import dataobjects
algorithmsToolbar = None
menusSettingsGroup = 'Menus'
defaultMenuEntries = {}
vectorMenu = QApplication.translate('MainWindow', 'Vect&or')
analysisToolsMenu = vectorMenu + "/" + Processing.tr('&Analysis Tools')
defaultMenuEntries.update({'qgis:distancematrix': analysisToolsMenu,
'native:sumlinelengths': analysisToolsMenu,
'native:countpointsinpolygon': analysisToolsMenu,
'qgis:listuniquevalues': analysisToolsMenu,
'qgis:basicstatisticsforfields': analysisToolsMenu,
'native:nearestneighbouranalysis': analysisToolsMenu,
'native:meancoordinates': analysisToolsMenu,
'native:lineintersections': analysisToolsMenu})
researchToolsMenu = vectorMenu + "/" + Processing.tr('&Research Tools')
defaultMenuEntries.update({'native:creategrid': researchToolsMenu,
| 'qgis:randomselection': researchToolsMenu,
'qgis:randomselectionwithinsubsets': researchToolsMenu,
'native:ra | ndompointsinextent': researchToolsMenu,
'qgis:randompointsinlayerbounds': researchToolsMenu,
'qgis:randompointsinsidepolygons': researchToolsMenu,
'qgis:regularpoints': researchToolsMenu,
'native:selectbylocation': researchToolsMenu,
'native:polygonfromlayerextent': researchToolsMenu})
geoprocessingToolsMenu = vectorMenu + "/" + Processing.tr('&Geoprocessing Tools')
defaultMenuEntries.update({'native:buffer': geoprocessingToolsMenu,
'native:convexhull': geoprocessingToolsMenu,
'native:intersection': geoprocessingToolsMenu,
'native:union': geoprocessingToolsMenu,
'native:symmetricaldifference': geoprocessingToolsMenu,
'native:clip': geoprocessingToolsMenu,
'native:difference': geoprocessingToolsMenu,
'native:dissolve': geoprocessingToolsMenu,
'qgis:eliminateselectedpolygons': geoprocessingToolsMenu})
geometryToolsMenu = vectorMenu + "/" + Processing.tr('G&eometry Tools')
defaultMenuEntries.update({'qgis:checkvalidity': geometryToolsMenu,
'qgis:exportaddgeometrycolumns': geometryToolsMenu,
'native:centroids': geometryToolsMenu,
'qgis:delaunaytriangulation': geometryToolsMenu,
'qgis:voronoipolygons': geometryToolsMenu,
'native:simplifygeometries': geometryToolsMenu,
'native:densifygeometries': geometryToolsMenu,
'native:multiparttosingleparts': geometryToolsMenu,
'native:collect': geometryToolsMenu,
'native:polygonstolines': geometryToolsMenu,
'qgis:linestopolygons': geometryToolsMenu,
'native:extractvertices': geometryToolsMenu})
managementToolsMenu = vectorMenu + "/" + Processing.tr('&Data Management Tools')
defaultMenuEntries.update({'native:reprojectlayer': managementToolsMenu,
'qgis:joinattributesbylocation': managementToolsMenu,
'qgis:splitvectorlayer': managementToolsMenu,
'native:mergevectorlayers': managementToolsMenu,
'native:createspatialindex': managementToolsMenu})
rasterMenu = QApplication.translate('MainWindow', '&Raster')
projectionsMenu = rasterMenu + "/" + Processing.tr('Projections')
defaultMenuEntries.update({'gdal:warpreproject': projectionsMenu,
'gdal:extractprojection': projectionsMenu,
'gdal:assignprojection': projectionsMenu})
conversionMenu = rasterMenu + "/" + Processing.tr('Conversion')
defaultMenuEntries.update({'gdal:rasterize': conversionMenu,
'gdal:polygonize': conversionMenu,
'gdal:translate': conversionMenu,
'gdal:rgbtopct': conversionMenu,
'gdal:pcttorgb': conversionMenu})
extractionMenu = rasterMenu + "/" + Processing.tr('Extraction')
defaultMenuEntries.update({'gdal:contour': extractionMenu,
'gdal:cliprasterbyextent': extractionMenu,
'gdal:cliprasterbymasklayer': extractionMenu})
analysisMenu = rasterMenu + "/" + Processing.tr('Analysis')
defaultMenuEntries.update({'gdal:sieve': analysisMenu,
'gdal:nearblack': analysisMenu,
'gdal:fillnodata': analysisMenu,
'gdal:proximity': analysisMenu,
'gdal:griddatametrics': analysisMenu,
'gdal:gridaverage': analysisMenu,
'gdal:gridinversedistance': analysisMenu,
'gdal:gridnearestneighbor': analysisMenu,
'gdal:aspect': analysisMenu,
'gdal:hillshade': analysisMenu,
'gdal:roughness': analysisMenu,
'gdal:slope': analysisMenu,
'gdal:tpitopographicpositionindex': analysisMenu,
'gdal:triterrainruggednessindex': analysisMenu})
miscMenu = rasterMenu + "/" + Processing.tr('Miscellaneous')
defaultMenuEntries.update({'gdal:buildvirtualraster': miscMenu,
'gdal:merge': miscMenu,
'gdal:gdalinfo': miscMenu,
'gdal:overviews': miscMenu,
'gdal:tileindex': miscMenu})
def initializeMenus():
for m in defaultMenuEntries.keys():
alg = QgsApplication.processingRegistry().algorithmById(m)
if alg is None or alg.id() != m:
QgsMessageLog.logMessage(Processing.tr('Invalid algorithm ID for menu: {}').format(m), Processing.tr('Processing'))
for provider in QgsApplication.processingRegistry().providers():
for alg in provider.algorithms():
d = defaultMenuE |
trousev/watchme | backend/models.py | Python | bsd-2-clause | 800 | 0.00625 | from django.db.models import *
from django.contrib.auth.models import User
from uuid import uuid4
class Device(Model):
user = ForeignKey(User, on_delete=CASCADE, related_name='devices')
api_token = CharField(max_length=512, default=uuid4)
name = CharField(max_length=512)
def len(self):
return len( self.recor | ds.all() )
class Record(Model):
# This is a DEVICE which user was operating with
device = ForeignKey(Device, on_delete=CASCADE, related_name='records')
# When this was reported. It should be CLIENT time, not SERVER
reported_at = DateTimeField()
# Application which was under user's eyes (a.k.a. "Active Window")
application = CharField(max_leng | th=512)
# Context. Most propably window title.
context = CharField(max_length=4096)
|
NeoRazorX/ubuntufaq | enlaces.py | Python | agpl-3.0 | 11,757 | 0.007487 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of ubuntufaq
# Copyright (C) 2011 Carlos Garcia Gomez neorazorx@gmail.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import cgi, os, logging, random
from google.appengine.ext import db, webapp
from google.appengine.ext.webapp import template
from google.appengine.api import users
from recaptcha.client import captcha
from datetime import datetime
from base import *
class Actualidad(Pagina):
# muestra los ultimos enlaces
def get(self, p=0):
Pagina.get(self)
enlaces_query = db.GqlQuery(" | SELECT * FROM Enlace ORDER BY fecha DESC")
# paginamos
enlaces, paginas, p_actual = self.paginar(enlaces_query, 20, p)
datos_paginacion = [paginas, p_actual, '/actualidad/']
template_values = {
'titulo': 'Act | ualidad de Ubuntu FAQ',
'descripcion': u'Noticias, blogs, vídeos, imágenes y en definitiva toda la actualidad en torno a Ubuntu y Linux en general. Comparte con nosotros!',
'tags': 'ubufaq, ubuntu faq, noticias ubuntu, actualidad ubuntu, linux, lucid, maverick, natty',
'url': self.url,
'url_linktext': self.url_linktext,
'mi_perfil': self.mi_perfil,
'formulario': self.formulario,
'vista': 'actualidad',
'enlaces': enlaces,
'datos_paginacion': datos_paginacion,
'usuario': users.get_current_user(),
'notis': self.get_notificaciones(),
'error_dominio': self.error_dominio,
'stats': self.sc.get_stats(),
'foco': 'enlace'
}
path = os.path.join(os.path.dirname(__file__), 'templates/actualidad.html')
self.response.out.write(template.render(path, template_values))
# crea un nuevo enlace
def post(self):
if self.request.get('descripcion'):
redirigir = False
# comprobamos que no se haya introducido anteriormente el enlace
url = cgi.escape(self.request.get('url'), True)
if url != '':
enlaces = db.GqlQuery("SELECT * FROM Enlace WHERE url = :1", url).fetch(1)
if enlaces:
redirigir = enlaces[0].get_link()
if redirigir:
self.redirect( redirigir )
else:
enl = Enlace()
enl.descripcion = cgi.escape(self.request.get('descripcion')[:450].replace("\n", ' '), True)
if url != '':
enl.url = url
enl.os = self.request.environ['HTTP_USER_AGENT']
if users.get_current_user():
if self.request.get('anonimo') != 'on':
enl.autor = users.get_current_user()
try:
enl.nuevo( self.sc.get_alltags() )
self.redirect( enl.get_link() )
except:
logging.warning('Imposible guardar enlace a: ' + url)
self.redirect('/error/503')
else:
challenge = self.request.get('recaptcha_challenge_field')
response = self.request.get('recaptcha_response_field')
remoteip = self.request.remote_addr
cResponse = captcha.submit(
challenge,
response,
RECAPTCHA_PRIVATE_KEY,
remoteip)
if cResponse.is_valid:
try:
enl.nuevo()
self.redirect( enl.get_link() )
except:
logging.warning('Imposible guardar enlace a: ' + url)
self.redirect('/error/503')
else:
self.redirect('/error/403c')
else:
self.redirect('/error/403')
class Redir_enlace(webapp.RequestHandler):
def get(self, id_enlace=None):
if id_enlace:
self.redirect('/story/' + id_enlace)
else:
self.redirect('/error/404')
class Detalle_enlace(Pagina):
def get(self, id_enlace=None):
Pagina.get(self)
e = self.sc.get_enlace(id_enlace, self.request.remote_addr)
if e:
editar = False
modificar = False
if (users.get_current_user() and users.get_current_user() == e.autor) or users.is_current_user_admin():
editar = True
if self.request.get('modificar') and editar:
modificar = True
# el captcha
if users.get_current_user():
chtml = ''
else:
chtml = captcha.displayhtml(
public_key = RECAPTCHA_PUBLIC_KEY,
use_ssl = False,
error = None)
template_values = {
'titulo': e.descripcion + ' - Ubuntu FAQ',
'descripcion': u'Discusión sobre: ' + e.descripcion,
'tags': e.tags,
'url': self.url,
'url_linktext': self.url_linktext,
'mi_perfil': self.mi_perfil,
'formulario': self.formulario,
'enlace': e,
'comentarios': self.sc.get_comentarios_de(id_enlace),
'captcha': chtml,
'relacionadas': self.sc.paginas_relacionadas( e.tags ),
'administrador': users.is_current_user_admin(),
'editar': editar,
'modificar': modificar,
'usuario': users.get_current_user(),
'notis': self.get_notificaciones(),
'error_dominio': self.error_dominio,
'foco': 'enlace'
}
path = os.path.join(os.path.dirname(__file__), 'templates/enlace.html')
self.response.out.write(template.render(path, template_values))
else:
self.redirect('/error/404')
# modifica el enlace
def post(self):
e = self.sc.get_enlace( self.request.get('id') )
if e and ((users.get_current_user() and users.get_current_user() == e.autor) or users.is_current_user_admin()):
try:
e.url = cgi.escape(self.request.get('url'), True)
e.descripcion = cgi.escape(self.request.get('descripcion').replace("\n", ' '), True)
e.tags = cgi.escape(self.request.get('tags'), True)
if self.request.get('tipo_enlace') in ['youtube', 'vimeo', 'vhtml5', 'imagen', 'deb', 'package', 'texto']:
e.tipo_enlace = self.request.get('tipo_enlace')
else:
e.tipo_enlace = None
e.put()
logging.info('Se ha modificado el enlace: ' + e.get_link())
self.sc.borrar_cache_enlace( self.request.get('id') )
self.redirect( e.get_link() )
except:
self.redirect('/error/503')
else:
self.redirect('/error/403')
class Acceder_enlace(Pagina):
def get(self, id_enlace=None):
try:
e = self.sc.get_enlace(id_enlace, self.request.remote_addr)
self.redirect( e.url )
except:
self.redirect('/error/404')
class Hundir_enlace(Pagina):
def get(self):
if users.is_current_user_admin() and self.request.get('id'):
e = self.sc.get_enlace( self.request.get('id') )
i |
h3llrais3r/Auto-Subliminal | autosubliminal/core/websocket.py | Python | gpl-3.0 | 4,134 | 0.001935 | # coding=utf-8
import codecs
import logging
import cherrypy
import tailer
from schema import And, Schema, SchemaError, Use
from ws4py.messaging import TextMessage
from ws4py.websocket import WebSocket
import autosubliminal
from autosubliminal import system
from autosubliminal.core.runner import Runner
from autosubliminal.util.encoding import b2u
from autosublimina | l.util.json | import from_json, to_json
log = logging.getLogger(__name__)
RUN_SCHEDULER = 'RUN_SCHEDULER'
RUN_SYSTEM_PROCESS = 'RUN_SYSTEM_PROCESS'
SUPPORTED_EVENT_TYPES = [RUN_SCHEDULER, RUN_SYSTEM_PROCESS]
MESSAGE_SCHEMA = Schema({
'type': 'EVENT',
'event': {
'type': And(Use(str), lambda t: t in SUPPORTED_EVENT_TYPES),
'data': And(Use(dict))
}
})
class WebSocketHandler(WebSocket):
"""
WebSocket handler class for receiving messages on the server through the websocket system.
For now we only support event messages that trigger something on the server.
"""
def received_message(self, message):
if isinstance(message, TextMessage):
# Data is always returned in bytes through the websocket, so convert it first to unicode
message_dict = from_json(b2u(message.data))
self.handle_message(message_dict)
else:
log.warning('Unsupported message received on websocket server: %r', message)
def handle_message(self, message):
handled = False
# Check for a valid event message structure
if self.check_message_structure(message):
event = message['event']
# Handle a RUN_SCHEDULER event
if event['type'] == RUN_SCHEDULER:
name = event['data']['name']
if name in autosubliminal.SCHEDULERS:
autosubliminal.SCHEDULERS[name].run()
handled = True
# Handle a RUN_SYSTEM_PROCESS event
elif event['type'] == RUN_SYSTEM_PROCESS:
name = event['data']['name']
if name == 'restart':
system.restart()
handled = True
elif name == 'shutdown':
system.shutdown()
handled = True
elif name == 'update':
system.update()
handled = True
elif name == 'flushCache':
system.flush_cache()
handled = True
elif name == 'flushWantedItems':
system.flush_wanted_items()
handled = True
elif name == 'flushLastDownloads':
system.flush_last_downloads()
handled = True
elif name == 'flushLibrary':
system.flush_library()
handled = True
if not handled:
log.warning('Unsupported message received on websocket server: %r', message)
return handled
def check_message_structure(self, message):
try:
MESSAGE_SCHEMA.validate(message)
return True
except SchemaError:
return False
class WebSocketBroadCaster(Runner):
"""
WebSocket broadcaster class for broadcasting data from the server through the websocket system.
"""
def run(self):
# Check for messages on the websocket queue and pop it
if len(autosubliminal.WEBSOCKETMESSAGEQUEUE) > 0:
message = autosubliminal.WEBSOCKETMESSAGEQUEUE.pop(0)
log.debug('Broadcasting websocket message: %r', message)
# The message on the websocket queue is a dict, so convert it to a json string
cherrypy.engine.publish('websocket-broadcast', to_json(message))
class WebSocketLogHandler(WebSocket):
"""
Websocket handler for log file tailing.
"""
def opened(self):
cherrypy.log("WebSocketLogHandler opened, starting log file tailing...")
logfile = autosubliminal.LOGFILE
for line in tailer.follow(codecs.open(logfile, 'r', 'utf-8')):
self.send(TextMessage(line), False)
|
pranavj1001/LearnLanguages | python/DataStructures/Heaps/App.py | Python | mit | 159 | 0 | from Heap import Heap
newHeap = Heap()
n | ewHeap.insert(12)
newHeap.insert(-3)
newHeap | .insert(23)
newHeap.insert(4)
newHeap.insert(100)
newHeap.displayHeap()
|
vigojug/reto | 201705/peque/biggest_set.py | Python | bsd-3-clause | 1,385 | 0.000722 | #!/usr/bin/env python3
from contextlib import contextmanager
import time
from main import has_subset_sum_zero
class Duration:
def __init__(self, elapsed=None):
self.elapsed = elapsed
@contextmanager
def tim | eit():
duration = Duration()
tic = time.time()
yield duration
elapsed = time.time() - tic
duration.elapsed | = elapsed
def nosolution_case(N):
return range(1, N + 1)
def negative_worst_case(N):
case = list(range(-N + 1, 0))
case += [abs(sum(case))]
return case
def positive_worst_case(N):
case = list(range(1, N))
case.insert(0, - sum(case))
return case
def do():
strategies = [nosolution_case, negative_worst_case, positive_worst_case]
max_seconds = 300
for strategy in strategies:
print(f'## Using {strategy.__name__}')
print()
for n in [1e4, 1e5, 1e6, 1e7]:
n = int(n)
source = strategy(n)
print(f'Length: {n} items')
with timeit() as duration:
result = has_subset_sum_zero(source)
print(f'Result: {result}')
print(f'Duration: {duration.elapsed} seconds')
if duration.elapsed >= max_seconds:
print('Limit reached. Stopping.')
break
print('Continue searching...')
print()
if __name__ == '__main__':
do()
|
SF-Zhou/prett | prett/__init__.py | Python | mit | 1,611 | 0.004345 | from .sender import SignalSender, connect_with
from .base import ValueModel, AttachAbility
from .base import ValueInterface, ChangedInterface
from .base import AbstractItem, Item, AbstractProperty
from .multi_types import StringValueModel, StringProperty, StringItemInterface, StringItem
from .multi_types import IntValueModel, IntProperty, IntItemInterface, IntItem
from .multi_types import FloatValueModel, FloatProperty, FloatItemInterface, FloatItem
from .multi_types import DictValueModel, DictProperty, DictItemInterface, DictItem
from .multi_types import ListValueModel, ListProperty, ListItemInterface, ListItem
from .multi_types import StringListValueModel, StringListProperty, StringListItemInterface, StringListItem
from .multi_types import DictListValueModel, DictListProperty, DictListItemInterface, DictListItem
from .multi_types import StringIntProperty, StringIntItemInterface
from .multi_types import StringFloatProperty, StringFloatItemInterface
from .project import AbstractProject, AbstractProjectItem
from .project import StringProjectItem, IntProjectItem, FloatProjectItem
from .project import TimePo | intItem
from .setting import AbstractSetting, AbstractSettingItem
from .set | ting import StringSettingItem, StringListSettingItem, IntSettingItem, FloatSettingItem
from .widget_interface import WidgetStringItem, WidgetStringInterface
from .widget_interface import IndexItem, WidgetIndexInterface
from .widget_interface import StringsItem, WidgetStringListInterface
from .widget_interface import WidgetDictItem, WidgetDictInterface
from .widget_interface import WidgetDictListInterface
|
andymckay/zamboni | mkt/commonplace/views.py | Python | bsd-3-clause | 4,032 | 0.000744 | import datetime
import importlib
import os
from urlparse import urlparse
from django.conf import settings
from django.core.files.storage import default_storage as storage
from django.http import HttpResponse, Http404
from django.shortcuts import render
import jingo
import jinja2
import newrelic.agent
import waffle
from cache_nuggets.lib import memoize
def get_build_id(repo):
try:
# This is where the `build_{repo}.py` files get written to after
# compiling and minifying our assets.
# Get the `BUILD_ID` from `build_{repo}.py` and use that to
# cache-bust the assets for this repo's CSS/JS minified bundles.
module = 'build_%s' % repo
return importlib.import_module(module).BUILD_ID
except (ImportError, AttributeError):
try:
build_id_fn = os.path.join(settings.MEDIA_ROOT, repo, 'build_id.txt')
with storage.open(build_id_fn) as fh:
return fh.read()
except:
# Either `build_{repo}.py` does not exist or `build_{repo}.py`
# exists but does not contain `BUILD_ID`. Fall back to
# `BUILD_ID_JS` which is written to `build.py` by jingo-minify.
try:
from build import BUILD_ID_CSS
return BUILD_ID_CSS
except ImportError:
return 'dev'
def get_imgurls(repo):
imgurls_fn = os.path.join(settings.MEDIA_ROOT, repo, 'imgurls.txt')
with storage.open(imgurls_fn) as fh:
return list(set(fh.readlines()))
def commonplace(request, repo, **kwargs):
if repo not in settings.COMMONPLACE_REPOS:
raise Http404
BUILD_ID = get_build_id(repo)
ua = request.META.get('HTTP_USER_AGENT', '').lower()
include_persona = True
include_splash = False
if repo == 'fireplace':
include_splash = True
if (request.GET.get('nativepersona') or
'mccs' in request.GET or
('mcc' in request.GET and 'mnc' in request.GET)):
include_persona = False
elif repo == 'discoplace':
include_persona = False
include_splash = True
if waffle.switch_is_active('firefox-accounts'):
# We never want to include persona shim if firefox accounts is enabled:
# native fxa already provides navigator.id, and fallback fxa doesn't
# need it.
include_persona = False
site_settings = {}
else:
site_settings = {
'persona_unverified_issuer': settings.BROWSERID_DOMAIN,
}
ctx = {
'BUILD_ID': BUILD_ID,
'appcache': repo in settings.COMMONPLACE_REPOS_APPCACHED,
'include_persona': include_persona,
'include_splash': include_splash,
'repo': repo,
'robots': 'googlebot' in ua,
'site_settings': site_settings,
'newrelic_header': newrelic.agent.get_browser_timing_header,
'newrelic_footer': newrelic.agent.get_browser_timing_footer,
}
media_url = urlparse(settings.MEDIA_URL)
if media_url.netloc:
ctx['media_origin'] = media_url.scheme + '://' + media_url.netloc
return render(request, 'commonplace/index.html', ctx)
def appcache_manifest(request):
"""Serves the appcache manifest."""
repo = request.GET.get('repo')
if not repo or repo not in settings.COMMONPLACE_REPOS_APPCACHED:
raise Http404
template = appcache_manifest_template(repo)
return Http | Response(template, content_type='text/cache-manifest')
@memoize('appcache-manifest-template')
def appcache_manifest_template(repo):
ctx = {
'BUILD_ID': get_build_id(repo),
'imgurls': get_imgurls(repo),
'repo': repo,
'timestamp': datetime.datetime.now(),
}
t = jingo.env.get_template('commonplace/manifest.appcache').render(ctx)
return unicode(ji | nja2.Markup(t))
def iframe_install(request):
return render(request, 'commonplace/iframe-install.html')
def potatolytics(request):
return render(request, 'commonplace/potatolytics.html')
|
lamestation/packthing | packthing/packagers/inno.py | Python | gpl-3.0 | 4,477 | 0.000893 | # -*- coding: utf-8 -*-
import os
import subprocess
import uuid
from .. import icons, util
from . import _base
try:
from PIL import Image
except:
util.error("Failed to import Python Imaging Library; is it installed?")
REQUIRE = [
"iscc",
"windeployqt",
]
KEYS = ["banner"]
class Packager(_base.Packager):
def __init__(self, config, files):
super(Packager, self).__init__(config, files)
self.EXT_BIN = "exe"
self.EXT_LIB = "dll"
self.LIB_PREFIX = ""
self.DIR_OUT = os.path.join(self.DIR_STAGING, "win")
self.OUT["bin"] = ""
self.OUT["lib"] = ""
self.OUT["share"] = ""
def get_path(self):
return self.DIR_OUT
# Taken from innosetup module (https://pypi.python.org/pypi/innosetup/0.6.6)
def AppID(self):
src = self.config["url"].encode("ascii")
appid = uuid.uuid5(uuid.NAMESPACE_URL, src).urn.rsplit(":", 1)[1]
return "{{%s}" % appid
def iss_setup(self):
bannerpath = os.path.join(
self.DIR, self.config["master"], self.config["banner"].replace("/", "\\")
)
d = {
"APPID": self.AppID(),
"ORGANIZATION": self.config["org"],
"NAME": self.config["name"],
"PACKAGENAME": self.packagename(),
"WEBSITE": self.config["url"],
"VERSION": self.config["version"],
"BANNER": bannerpath,
"SOURCEDIR": self.DIR_OUT,
"OUTDIR": self.DIR_STAGING,
}
return util.get_template("inno/setup.iss").substitute(d)
def iss_file(self, target):
d = {
"NAME": self.config["files"][target]["name"],
"FILENAME": target,
}
return util.get_template("inno/file.iss").substitute(d)
def iss_run(self, target):
d = {
"NAME": self.config["files"][target]["name"],
"FILENAME": target,
}
return util.get_template("inno/run.iss").substitute(d)
def iss_mime(self, mimetype, executable, reponame):
d = {
"TYPE": mimetype["extension"]
+ "FiletypeAssociation-"
+ self.config["package"],
"EXTENSION": mimetype["extension"],
"DESCRIPTION": mimetype["description"],
"EXECUTABLE": executable,
"ICON": os.path.basename(mimetype["icon"]),
}
return util.get_template("inno/mime.iss").substitute(d)
def pillow(self, icon, target, size, fmt):
if os.path.exists(icon):
print(
"Generate icon:",
icon,
target,
"(" + str(size) + "x" + str(size),
fmt + ")",
)
img = Image.open(icon)
img.thumbnail((size, size), Image.ANTIALIAS)
img.save(target)
else:
util.error("Icon does not exist:", icon)
def icon(self, icon, executable):
self.pillow(
os.path.join(executable, icon),
os.path.join(self.DIR_OUT, executable + ".ico"),
256,
"png",
)
self.iss += self.iss_file(executable)
if "run" in self.config and executable in self.config["run"]:
self.iss += self.iss_run(executable)
def mimetypes(self, mi | metypes, executable, reponame):
for mimetype in mimetypes:
self.iss += self.iss_mime(mimetype, executable, reponame)
self.pillow(
os.path.join(reponame, mimetype["icon"]),
os.path.join(self.DIR_OUT, os.path.basename(mimetype["icon"])),
256,
"png",
)
def make | (self):
super(Packager, self).make()
self.iss = self.iss_setup() + "\n"
def finish(self):
super(Packager, self).finish()
with util.pushd(self.DIR_STAGING):
util.write(self.iss, "installer.iss")
try:
subprocess.check_call(["iscc", "installer.iss"])
except (OSError, WindowsError):
util.error("iscc not found; exiting.")
except subprocess.CalledProcessError as e:
util.error("iscc returned:", e.returncode)
def install(self):
with util.pushd(self.DIR_STAGING):
try:
util.command([".\\" + self.packagename() + ".exe"])
except:
util.error("Installation failed! Oh, well.")
|
JustinTulloss/harmonize.fm | masterapp/masterapp/model/manage/versions/22/22.py | Python | mit | 650 | 0.015385 | from sqlalchemy import *
from sqlalchemy.exceptions import | OperationalError
from sqlalchemy.schema import DDL
import migrate.changeset
from migrate.changeset.exceptions import NotSupportedError
from migrate import *
metadata = MetaData(migrate_engine)
users_table = Table('users', metadata,
Column('id', Integer, primary_key=True))
premiumcol = Column('p | remium', Boolean)
def upgrade():
users_table.append_column(premiumcol)
migrate.changeset.create_column(premiumcol, users_table)
def downgrade():
try:
migrate.changeset.drop_column('premium', users_table)
except NotSupportedError:
print 'Unable to downgrade!'
|
osantana/correios | tests/vcr.py | Python | apache-2.0 | 1,315 | 0 | # Copyright 2016 Osvaldo Santana Neto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except i | n compliance with | the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
from vcr import VCR
USER_REGEX = re.compile(r'<usuario>\w+</usuario>')
PASS_REGEX = re.compile(r'<senha>.*</senha>')
def replace_auth(request):
if not request.body:
return request
body = request.body.decode()
body = USER_REGEX.sub(r'<usuario>teste</usuario>', body)
body = PASS_REGEX.sub(r'<senha>****</senha>', body)
request.body = body.encode()
return request
FIXTURES_DIR = os.path.join(os.path.dirname(__file__), "fixtures")
vcr = VCR(
record_mode='once',
serializer='yaml',
cassette_library_dir=os.path.join(FIXTURES_DIR, 'cassettes'),
path_transformer=VCR.ensure_suffix('.yaml'),
match_on=['method'],
before_record_request=replace_auth,
)
|
urbn/kombu | t/unit/utils/test_encoding.py | Python | bsd-3-clause | 3,499 | 0 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import sys
from contextlib import contextmanager
from case import patch, skip
from kombu.five import bytes_t, string_t, string
from kombu.utils.encoding import (
get_default_encoding_file, safe_str,
set_default_encoding_file, default_encoding,
)
@contextmanager
def clean_encoding():
old_encoding = sys.modules.pop('kombu.utils.encoding', None)
import kombu.utils.encoding
try:
yield kombu.utils.encoding
finally:
if old_encoding:
sys.modules['kombu.utils.encoding'] = old_encoding
class test_default_encoding:
def test_set_default_ | file(self):
prev = get_d | efault_encoding_file()
try:
set_default_encoding_file('/foo.txt')
assert get_default_encoding_file() == '/foo.txt'
finally:
set_default_encoding_file(prev)
@patch('sys.getfilesystemencoding')
def test_default(self, getdefaultencoding):
getdefaultencoding.return_value = 'ascii'
with clean_encoding() as encoding:
enc = encoding.default_encoding()
if sys.platform.startswith('java'):
assert enc == 'utf-8'
else:
assert enc == 'ascii'
getdefaultencoding.assert_called_with()
@skip.if_python3()
def test_str_to_bytes():
with clean_encoding() as e:
assert isinstance(e.str_to_bytes('foobar'), bytes_t)
@skip.if_python3()
def test_from_utf8():
with clean_encoding() as e:
assert isinstance(e.from_utf8('foobar'), bytes_t)
@skip.if_python3()
def test_default_encode():
with clean_encoding() as e:
assert e.default_encode(b'foo')
class newbytes(bytes):
"""Mock class to simulate python-future newbytes class"""
def __repr__(self):
return 'b' + super(newbytes, self).__repr__()
def __str__(self):
return 'b' + "'{0}'".format(super(newbytes, self).__str__())
class newstr(string):
"""Mock class to simulate python-future newstr class"""
def encode(self, encoding=None, errors=None):
return newbytes(super(newstr, self).encode(encoding, errors))
class test_safe_str:
def setup(self):
self._encoding = self.patching('sys.getfilesystemencoding')
self._encoding.return_value = 'ascii'
def test_when_bytes(self):
assert safe_str('foo') == 'foo'
def test_when_newstr(self):
"""Simulates using python-future package under 2.7"""
assert str(safe_str(newstr('foo'))) == 'foo'
def test_when_unicode(self):
assert isinstance(safe_str('foo'), string_t)
def test_when_encoding_utf8(self):
self._encoding.return_value = 'utf-8'
assert default_encoding() == 'utf-8'
s = 'The quiæk fåx jømps øver the lazy dåg'
res = safe_str(s)
assert isinstance(res, string_t)
def test_when_containing_high_chars(self):
self._encoding.return_value = 'ascii'
s = 'The quiæk fåx jømps øver the lazy dåg'
res = safe_str(s)
assert isinstance(res, string_t)
assert len(s) == len(res)
def test_when_not_string(self):
o = object()
assert safe_str(o) == repr(o)
def test_when_unrepresentable(self):
class UnrepresentableObject(object):
def __repr__(self):
raise KeyError('foo')
assert '<Unrepresentable' in safe_str(UnrepresentableObject())
|
bndl/bndl | bndl/compute/tests/test_many_nodes.py | Python | apache-2.0 | 745 | 0 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BAS | IS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from bndl.compute.tests import ComputeTest
class ManyN | odesTest(ComputeTest):
executor_count = 8
def test_many_nodes(self):
self.assertAlmostEqual(self.ctx.range(1, 100).mean(), 50)
|
patrickwestphal/owlapy | tests/util/hashcode_tests.py | Python | gpl-3.0 | 55,391 | 0.000036 | import unittest
from rdflib import URIRef
from owlapy import model
from owlapy.util.hashcode import HashCode
from owlapy.vocab.owlfacet import OWLFacet
class TestHashCode(unittest.TestCase):
def test_hash_ontology(self):
ont_id = model.OWLOntologyID()
data_factory = model.OWLDataFactory()
man = model.OWLOntologyManager(data_factory)
ont = model.OWLOntology(man, ont_id)
ont_id_hash = hash(ont_id)
self.assertEqual(ont_id_hash, HashCode.hash_code(ont))
def test_asym_obj_prop_axiom(self):
prop = model.OWLObjectProperty(model.IRI('http://ex.org/prop'))
ann_prop1 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp'))
ann_val1 = model.OWLLiteral('annotation 1')
ann1 = model.OWLAnnotation(ann_prop1, ann_val1, [])
ann_prop2 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp2'))
ann_val2 = model.OWLLiteral('annotation 2')
ann2 = model.OWLAnnotation(ann_prop2, ann_val2, [])
anns = {ann1, ann2}
asym_obj_prop = model.OWLAsymmetricObjectPropertyAxiom(prop, anns)
asym_obj_prop_hash = (((3 * HashCode.MULT) + hash(prop)) *
HashCode.MULT) + HashCode._hash_list(anns)
self.assertEqual(asym_obj_prop_hash, HashCode.hash_code(asym_obj_prop))
def test_cls_assertion_axiom(self):
indiv = model.OWLNamedIndividual(model.IRI('http://ex.org/indivXYZ'))
ce = model.OWLClass(model.IRI('http://ex.org/SomeCls'))
ann_prop1 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp'))
ann_val1 = model.OWLLiteral('annotation 1')
ann1 = model.OWLAnnotation(ann_prop1, ann_val1, [])
ann_prop2 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp2'))
ann_val2 = model.OWLLiteral('annotation 2')
ann2 = model.OWLAnnotation(ann_prop2, ann_val2, [])
anns = {ann1, ann2}
cls_ass_ax = model.OWLClassAssertionAxiom(indiv, ce, anns)
cls_ass_ax_hash = (((((7 * HashCode.MULT) + hash(indiv)) *
HashCode.MULT) + hash(ce)) * HashCode.MULT) + \
HashCode._hash_list(anns)
self.assertEqual(cls_ass_ax_hash, HashCode.hash_code(cls_ass_ax))
def test_data_prop_assertion_axiom(self):
subj = model.OWLNamedIndividual(model.IRI('http://ex.org/indivABC'))
prop = model.OWLDataProperty(model.IRI('http://ex.org/prop'))
val = model.OWLLiteral('abcd')
ann_prop1 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp'))
ann_val1 = model.OWLLiteral('annotation 1')
ann1 = model.OWLAnnotation(ann_prop1, ann_val1, [])
ann_prop2 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp2'))
ann_val2 = model.OWLLiteral('annotation 2')
ann2 = model.OWLAnnotation(ann_prop2, ann_val2, [])
anns = {ann1, ann2}
axiom = model.OWLDataPropertyAssertionAxiom(subj, prop, val, anns)
axiom_hash = (((((((11 * HashCode.MULT) + hash(subj)) *
HashCode.MULT) + hash(prop)) * HashCode.MULT) +
hash(val)) * HashCode.MULT) + HashCode._hash_list(anns)
self.assertEqual(axiom_hash, HashCode.hash_code(axiom))
def test_data_prop_dom_axiom(self):
prop = model.OWLDataProperty(model.IRI('http://ex.org/prop'))
dom = model.OWLClass(model.IRI('http://ex.org/SomeClass'))
ann_prop1 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp'))
ann_val1 = model.OWLLiteral('annotation 1')
ann1 = model.OWLAnnotation(ann_prop1, ann_val1, [])
ann_prop2 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp2'))
ann_val2 = model.OWLLiteral('annotation 2')
ann2 = model.OWLAnnotation(ann_prop2, ann_val2, [])
anns = {ann1, ann2}
axiom = model.OWLDataPropertyDomainAxiom(prop, dom, anns)
axiom_hash = (((((13 * HashCode.MULT) + hash(prop)) *
HashCode.MULT) + hash(dom)) * HashCode.MULT) + \
HashCode._hash_list(anns)
self.assertEqual(axiom_hash, HashCode.hash_code(axiom))
def test_data_prop_range_axiom(self):
prop = model.OWLDataProperty(model.IRI('http://ex.org/prop'))
rnge = model.OWLDatatype(model.IRI('http://ex.org/dtype/int'))
ann_prop1 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp'))
ann_val1 = model.OWLLiteral('annotation 1')
ann1 = model.OWLAnnotation(ann_prop1, ann_val1, [])
ann_prop2 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp2'))
ann_val2 = model.OWLLiteral('annotation 2')
ann2 = model.OWLAnnotation(ann_prop2, ann_val2, [])
anns = {ann1, ann2}
axiom = model.OWLDataPropertyRangeAxiom(prop, rnge, anns)
axiom_hash = (((((17 * HashCode.MULT) + hash(prop)) *
HashCode.MULT) + hash(rnge)) * HashCode.MULT) + \
HashCode._hash_list(anns)
self.assertEqual(axiom_hash, HashCode.hash_code(axiom))
def test_sub_data_prop_of_axiom(self):
sub_prop = model.OWLDataProperty('http://ex.org/subProp')
super_prop = model.OWLDataProperty('http://ex.org/superProp')
ann_prop1 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp'))
ann_val1 = model.OWLLiteral('annotation 1')
ann1 = model.OWLAnnotation(ann_prop1, ann_val1, [])
ann_prop2 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp2'))
ann_val2 = model.OWLLiteral('annotation 2')
ann2 = model.OWLAnnotation(ann_prop2, ann_val2, [])
anns = {ann1, ann2}
axiom = model.OWLSubDataPropertyOfAxiom(sub_prop, super_prop, anns)
axiom_hash = (((((19 * HashCode.MULT) + hash(sub_prop)) *
HashCode.MULT) + hash(super_prop)) * HashCode.MULT) + \
HashCode._hash_list(anns)
self.assertEqual(axiom_hash, HashCode.hash_code(axiom))
def test_declaration_axiom(self):
entity = model.OWLClass(model.IRI('http://ex.org/SomeClass'))
ann_prop1 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp'))
ann_val1 = model.OWLLiteral('annotation 1')
ann1 = model.OWLAnnotation(ann_prop1, ann_val1, [])
ann_prop2 = model.OWLAnnotationProperty(
| model.IRI('http://ex.org/anProp2'))
ann_val2 = model.OWLLiteral('annotation 2')
ann2 = model.OWLAnnotation(ann_prop2, ann_val2, [])
anns = {ann1, ann2}
axiom = model.OWLDeclarationAxiom(entity, anns)
axiom_hash = (((23 * HashCode.MULT) + hash(entity)) *
HashCode.MULT) + HashCode._hash_list(anns)
| self.assertEqual(axiom_hash, HashCode.hash_code(axiom))
def test_diff_indivs_axiom(self):
indiv1 = model.OWLNamedIndividual(model.IRI('http://ex.org/indivABC'))
indiv2 = model.OWLAnonymousIndividual(model.NodeID('_:23'))
indivs = {indiv1, indiv2}
ann_prop1 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp'))
ann_val1 = model.OWLLiteral('annotation 1')
ann1 = model.OWLAnnotation(ann_prop1, ann_val1, [])
ann_prop2 = model.OWLAnnotationProperty(
model.IRI('http://ex.org/anProp2'))
ann_val2 = model.OWLLiteral('annotation 2')
ann2 = model.OWLAnnotation(ann_prop2, ann_val2, [])
anns = {ann1, ann2}
axiom = model.OWLDifferentIndividualsAxiom(indivs, anns)
axiom_hash = (((29 * HashCode.MULT) + HashCode._hash_list(indivs)) *
HashCode.MULT) + HashCode._hash_list(anns)
self.assertEqual(axiom_hash, HashCode.hash_code(axiom))
def test_disjoint_classes_axiom(self):
ce1 = model.OWLClass(model.IRI('http://ex.org/SomeClass'))
prop = model.OWLObjectProperty(model.IRI('http://ex.org/prop'))
filler = model.OWLClass(model.IRI('http://ex.org/AnotherClass'))
|
avsm/perscon-imap | offlineimap/folder/UIDMaps.py | Python | gpl-2.0 | 8,119 | 0.002833 | # Base folder support
# Copyright (C) 2002 John Goerzen
# <jgoerzen@complete.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from threading import *
from offlineimap import threadutil
from offlineimap.threadutil import InstanceLimitedThread
from offlineimap.ui import UIBase
from IMAP import IMAPFolder
import os.path, re
class MappingFolderMixIn:
def _initmapping(self):
self.maplock = Lock()
(self.diskr2l, self.diskl2r) = self._loadmaps()
self._mb = self.__class__.__bases__[1]
def _getmapfilename(self):
return os.path.join(self.repository.getmapdir(),
self.getfolderbasename())
def _loadmaps(self):
self.maplock.acquire()
try:
mapfilename = self._getmapfilename()
if not os.path.exists(mapfilename):
return ({}, {})
file = open(mapfilename, 'rt')
r2l = {}
l2r = {}
| while 1:
line = file.readline()
if not len(line):
break
line = line.strip()
(str1, str2) = line.split(':')
loc = long(str1)
rem = long(str2)
r2l[rem] = loc
l2r[loc] = rem
return (r2l, l2r)
finally:
self.maplock.release()
def _savemaps(self, dolock = 1):
mapfilename = self._getmapfilename()
| if dolock: self.maplock.acquire()
try:
file = open(mapfilename + ".tmp", 'wt')
for (key, value) in self.diskl2r.iteritems():
file.write("%d:%d\n" % (key, value))
file.close()
os.rename(mapfilename + '.tmp', mapfilename)
finally:
if dolock: self.maplock.release()
def _uidlist(self, mapping, items):
return [mapping[x] for x in items]
def cachemessagelist(self):
self._mb.cachemessagelist(self)
reallist = self._mb.getmessagelist(self)
self.maplock.acquire()
try:
# OK. Now we've got a nice list. First, delete things from the
# summary that have been deleted from the folder.
for luid in self.diskl2r.keys():
if not reallist.has_key(luid):
ruid = self.diskl2r[luid]
del self.diskr2l[ruid]
del self.diskl2r[luid]
# Now, assign negative UIDs to local items.
self._savemaps(dolock = 0)
nextneg = -1
self.r2l = self.diskr2l.copy()
self.l2r = self.diskl2r.copy()
for luid in reallist.keys():
if not self.l2r.has_key(luid):
ruid = nextneg
nextneg -= 1
self.l2r[luid] = ruid
self.r2l[ruid] = luid
finally:
self.maplock.release()
def getmessagelist(self):
"""Gets the current message list.
You must call cachemessagelist() before calling this function!"""
retval = {}
localhash = self._mb.getmessagelist(self)
self.maplock.acquire()
try:
for key, value in localhash.items():
try:
key = self.l2r[key]
except KeyError:
# Sometimes, the IMAP backend may put in a new message,
# then this function acquires the lock before the system
# has the chance to note it in the mapping. In that case,
# just ignore it.
continue
value = value.copy()
value['uid'] = self.l2r[value['uid']]
retval[key] = value
return retval
finally:
self.maplock.release()
def getmessage(self, uid):
"""Returns the content of the specified message."""
return self._mb.getmessage(self, self.r2l[uid])
def savemessage(self, uid, content, flags, rtime):
"""Writes a new message, with the specified uid.
If the uid is < 0, the backend should assign a new uid and return it.
If the backend cannot assign a new uid, it returns the uid passed in
WITHOUT saving the message.
If the backend CAN assign a new uid, but cannot find out what this UID
is (as is the case with many IMAP servers), it returns 0 but DOES save
the message.
IMAP backend should be the only one that can assign a new uid.
If the uid is > 0, the backend should set the uid to this, if it can.
If it cannot set the uid to that, it will save it anyway.
It will return the uid assigned in any case.
"""
if uid < 0:
# We cannot assign a new uid.
return uid
if uid in self.r2l:
self.savemessageflags(uid, flags)
return uid
newluid = self._mb.savemessage(self, -1, content, flags, rtime)
if newluid < 1:
raise ValueError, "Backend could not find uid for message"
self.maplock.acquire()
try:
self.diskl2r[newluid] = uid
self.diskr2l[uid] = newluid
self.l2r[newluid] = uid
self.r2l[uid] = newluid
self._savemaps(dolock = 0)
finally:
self.maplock.release()
def getmessageflags(self, uid):
return self._mb.getmessageflags(self, self.r2l[uid])
def getmessagetime(self, uid):
return None
def savemessageflags(self, uid, flags):
self._mb.savemessageflags(self, self.r2l[uid], flags)
def addmessageflags(self, uid, flags):
self._mb.addmessageflags(self, self.r2l[uid], flags)
def addmessagesflags(self, uidlist, flags):
self._mb.addmessagesflags(self, self._uidlist(self.r2l, uidlist),
flags)
def _mapped_delete(self, uidlist):
self.maplock.acquire()
try:
needssave = 0
for ruid in uidlist:
luid = self.r2l[ruid]
del self.r2l[ruid]
del self.l2r[luid]
if ruid > 0:
del self.diskr2l[ruid]
del self.diskl2r[luid]
needssave = 1
if needssave:
self._savemaps(dolock = 0)
finally:
self.maplock.release()
def deletemessageflags(self, uid, flags):
self._mb.deletemessageflags(self, self.r2l[uid], flags)
def deletemessagesflags(self, uidlist, flags):
self._mb.deletemessagesflags(self, self._uidlist(self.r2l, uidlist),
flags)
def deletemessage(self, uid):
self._mb.deletemessage(self, self.r2l[uid])
self._mapped_delete([uid])
def deletemessages(self, uidlist):
self._mb.deletemessages(self, self._uidlist(self.r2l, uidlist))
self._mapped_delete(uidlist)
#def syncmessagesto_neguid_msg(self, uid, dest, applyto, register = 1):
# does not need changes because it calls functions that make the changes
# same goes for all other sync messages types.
# Define a class for local part of IMAP.
class MappedIMAPFolder(MappingFolderMixIn, IMAPFolder):
def __init__(self, *args, **kwargs):
apply(IMAPFolder.__init__, (self,) + args, kwargs)
self._initmapping()
|
dotsdl/PyTables | tables/group.py | Python | bsd-3-clause | 47,685 | 0.000713 | # -*- coding: utf-8 -*-
########################################################################
#
# License: BSD
# Created: September 4, 2002
# Author: Francesc Alted - faltet@pytables.com
#
# $Id$
#
########################################################################
"""Here is defined the Group class."""
import os
import weakref
import warnings
import tables.misc.proxydict
from tables import hdf5extension
from tables import utilsextension
from tables.registry import class_id_dict
from tables.exceptions import (
NodeError, NoSuchNodeError, NaturalNameWarning, PerformanceWarning)
from tables.filters import Filters
from tables.registry import get_class_by_name
from tables.path import check_name_validity, join_path, isvisiblename
from tables.node import Node, NotLoggedMixin
from tables.leaf import Leaf
from tables.unimplemented import UnImplemented, Unknown
from tables.link import Link, SoftLink, ExternalLink
obversion = "1.0"
class _ChildrenDict(tables.misc.proxydict.ProxyDict):
def _get_value_from_container(self, container, key):
return container._f_get_child(key)
class Group(hdf5extension.Group, Node):
"""Basic PyTables grouping structure.
Instances of this class are grouping structures containing *child*
instances of zero or more groups or leaves, together with
supporting metadata. Each group has exactly one *parent* group.
Working with groups and leaves is similar in many ways to working
with directories and files, respectively, in a Unix filesystem.
As with Unix directories and files, objects in the object tree are
often described by giving their full (or absolute) path names.
This full path can be specified either as a string (like in
'/group1/group2') or as a complete object path written in *natural
naming* schema (like in file.root.group1.group2).
A collateral effect of the *natural naming* schema is that the
names of members in the Group class and its instances must be
carefully chosen to avoid colliding with existing children node
names. For this reason and to avoid polluting the children
namespace all members in a Group start with some reserved prefix,
like _f_ (for public methods), _g_ (for private ones), _v_ (for
instance variables) or _c_ (for class variables). Any attempt to
create a new child node whose name starts with one of these
prefixes will raise a ValueError exception.
Another effect of natural naming is that children named after
Python keywords or having names not valid as Python identifiers
(e.g. class, $a or 44) can not be accessed using the node.child
syntax. You will be forced to use node._f_get_child(child) to
access them (which is recommended for programmatic accesses).
You will also need to use _f_get_child() to access an existing
child node if you set a Python attribute in the Group with the
same name as that node (you will get a NaturalNameWarning when
doing this).
Parameters
----------
parentnode
The parent :class:`Group` object.
.. versionchanged:: 3.0
Renamed from *parentNode* to *parentnode*
name : str
The name of this node in its parent group.
title
The title for this group
new
If this group is new or has to be read from disk
filters : Filters
A Filters instance
Notes
-----
The following documentation includes methods that are automatically
called when a Group instance is accessed in a special way.
For instance, this class defines the __setattr__, __getattr__, and
__delattr__ methods, and they set, get and delete *ordinary Python
attributes* as normally intended. In addition to that, __getattr__
allows getting *child nodes* by their name for the sake of easy
interaction on the command line, as long as there is no Python
attribute with the same name. Groups also allow the interactive
completion (when using readline) of the names of child nodes.
For instance::
# get a Python attribute
nchild = group._v_ | nchildren
# Add a Table child called 'table' under 'group'.
h5file.create_table(group, 'table', myDescription)
table = group.table # get the table child instance
group.table = 'foo' # set a Python attribute
# (PyTables warns you here about using the name of a child node.)
foo = group.table # get a | Python attribute
del group.table # delete a Python attribute
table = group.table # get the table child instance again
.. rubric:: Group attributes
The following instance variables are provided in addition to those
in Node (see :ref:`NodeClassDescr`):
.. attribute:: _v_children
Dictionary with all nodes hanging from this group.
.. attribute:: _v_groups
Dictionary with all groups hanging from this group.
.. attribute:: _v_hidden
Dictionary with all hidden nodes hanging from this group.
.. attribute:: _v_leaves
Dictionary with all leaves hanging from this group.
.. attribute:: _v_links
Dictionary with all links hanging from this group.
.. attribute:: _v_unknown
Dictionary with all unknown nodes hanging from this group.
"""
# Class identifier.
_c_classid = 'GROUP'
# Children containers that should be loaded only in a lazy way.
# These are documented in the ``Group._g_add_children_names`` method.
_c_lazy_children_attrs = (
'__members__', '_v_children', '_v_groups', '_v_leaves',
'_v_links', '_v_unknown', '_v_hidden')
# `_v_nchildren` is a direct read-only shorthand
# for the number of *visible* children in a group.
def _g_getnchildren(self):
return len(self._v_children)
_v_nchildren = property(_g_getnchildren, None, None,
"The number of children hanging from this group.")
# `_v_filters` is a direct read-write shorthand for the ``FILTERS``
# attribute with the default `Filters` instance as a default value.
def _g_getfilters(self):
filters = getattr(self._v_attrs, 'FILTERS', None)
if filters is None:
filters = Filters()
return filters
def _g_setfilters(self, value):
if not isinstance(value, Filters):
raise TypeError(
"value is not an instance of `Filters`: %r" % (value,))
self._v_attrs.FILTERS = value
def _g_delfilters(self):
del self._v_attrs.FILTERS
_v_filters = property(
_g_getfilters, _g_setfilters, _g_delfilters,
"""Default filter properties for child nodes.
You can (and are encouraged to) use this property to get, set and
delete the FILTERS HDF5 attribute of the group, which stores a Filters
instance (see :ref:`FiltersClassDescr`). When the group has no such
attribute, a default Filters instance is used.
""")
def __init__(self, parentnode, name,
title="", new=False, filters=None,
_log=True):
# Remember to assign these values in the root group constructor
# if it does not use this one!
# First, set attributes belonging to group objects.
self._v_version = obversion
"""The object version of this group."""
self._v_new = new
"""Is this the first time the node has been created?"""
self._v_new_title = title
"""New title for this node."""
self._v_new_filters = filters
"""New default filter properties for child nodes."""
self._v_max_group_width = parentnode._v_file.params['MAX_GROUP_WIDTH']
"""Maximum number of children on each group before warning the user.
.. versionchanged:: 3.0
The *_v_maxGroupWidth* attribute has been renamed into
*_v_max_group_width*.
"""
# Finally, set up this object as a node.
super(Group, self).__init__(parentnode, name, _log)
def _g_post_init_hook(self):
if self._v_new:
|
arnavd96/Cinemiezer | myvenv/lib/python3.4/site-packages/music21/alpha/webapps/server/urlqueryapp.py | Python | mit | 3,408 | 0.013791 | # -*- coding: utf-8 -*-
'''
An interface for music21 using mod_wsgi
To use, first install mod_wsgi and include it in the HTTPD.conf file.
Add this file to the server, ideally not in the document root,
on mac this could be /Library/WebServer/wsgi-scripts/music21wsgiapp.py
Then edit the HTTPD.conf file to redirect any requests to WEBSERVER:/music21interface to call this file:
Note: unlike with mod_python, the end of the URL does not determine which function is called,
WSGIScriptAlias always calls application.
WSGIScriptAlias /music21interface /Library/WebServer/wsgi-scripts/music21wsgiapp.py |
Further down the conf file, give the webserver access to this directory:
<Directory "/Library/WebServer/wsgi-scripts">
Order allow,deny
Allow from all
</Directory>
The mod_wsgi handler will call the application function below with the request
content in the environ variable.
To use the application, send a POST request to WEBSERVER:/music21interface
where the contents of the POST is a JSON s | tring.
See docs for music21.webapps for specifications about the JSON string structure
'''
#sys.path.insert(0, '/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/')
#sys.path.insert(0, '/Library/Python/2.7/site-packages/')
#import cgi
#from music21 import webapps
from music21 import note
def music21ModWSGIVisualApplication(environ, start_response):
'''
Application function in proper format for a MOD-WSGI Application:
Used to test returning visual images (plots, lily.png) to the user
'''
status = '200 OK'
#pathInfo = environ['PATH_INFO'] # Contents of path after mount point of wsgi app but before question mark
queryString = environ['QUERY_STRING'] # Contents of URL after question mark
documentRoot = environ['DOCUMENT_ROOT']
#outputStr = ""
noteName = queryString
n = note.Note(noteName)
tempPath = n.write('png')
print(tempPath)
writePath = documentRoot + "/music21/OutputFiles/"
fin = open(tempPath,'r')
fout = open(writePath+"out.jpg","w")
fout.write(fin.read())
fout.close()
#plt.plot([1,2,3,4])
#plt.ylabel(workName)
#p = graph.PlotHorizontalBarPitchClassOffset(sc,doneAction=None)
#p.write('/Library/WebServer/Documents/OutputFiles/graph.jpg')
#templateStr = tempPath
templateStr = imageEmbedTemplate(tempPath,'/music21/OutputFiles/out.jpg')
response_headers = [('Content-type', 'text/html'),('Content-Length', str(len(templateStr)))]
start_response(status, response_headers)
return [templateStr]
application = music21ModWSGIVisualApplication
def reduction(sc):
reductionStream = sc.chordify()
for c in reductionStream.flat.getElementsByClass('Chord'):
c.closedPosition(forceOctave=4, inPlace=True)
c.removeRedundantPitches(inPlace=True)
c.annotateIntervals()
return reductionStream
def imageEmbedTemplate(title, imgSrc):
htmlData = """
<html>
<head>
<title>Music21 URL App Response</title>
</head>
<body onload="setup()">
<h1>"""
htmlData += title
htmlData += "</h1>"
htmlData += "<img src='"+imgSrc+"'/>"
htmlData += """
</body>
</html>
"""
return htmlData
|
ESOedX/edx-platform | common/lib/capa/capa/tests/test_input_templates.py | Python | agpl-3.0 | 46,422 | 0.001896 | """
Tests for the logic in input type mako templates.
"""
from __future__ import absolute_import
import json
import unittest
from collections import OrderedDict
from lxml import etree
from mako import exceptions
from six.moves import range
from capa.inputtypes import Status
from capa.tests.helpers import capa_render_template
from openedx.core.djangolib.markup import HTML
from xmodule.stringify import stringify_children
class TemplateError(Exception):
"""
Error occurred while rendering a Mako template.
"""
pass
class TemplateTestCase(unittest.TestCase):
"""
Utilities for testing templates.
"""
# Subclasses override this to specify the file name of the template
# to be loaded from capa/templates.
# The template name should include the .html extension:
# for example: choicegroup.html
TEMPLATE_NAME = None
DESCRIBEDBY = 'aria-describedby="desc-1 desc-2"'
DESCRIPTIONS = OrderedDict(
[
('desc-1', 'description text 1'),
('desc-2', '<em>description</em> <mark>text</mark> 2')
]
)
DESCRIPTION_IDS = ' '.join(list(DESCRIPTIONS.keys()))
RESPONSE_DATA = {
'label': 'question text 101',
'descriptions': DESCRIPTIONS
}
def setUp(self):
"""
Initialize the context.
"""
super(TemplateTestCase, self).setUp()
self.context = {}
def render_to_xml(self, context_dict):
"""
Render the template using the `context_dict` dict.
Returns an `etree` XML element.
"""
# add dummy STATIC_URL to template context
context_dict.setdefault("STATIC_URL", "/dummy-static/")
try:
xml_str = capa_render_template(self.TEMPLATE_NAME, context_dict)
except:
raise TemplateError(exceptions.text_error_template().render())
# Attempt to construct an XML tree from the template
# This makes it easy to use XPath to make assertions, rather
# than dealing with a string.
# We modify the string slightly by wrapping it in <test>
# tags, to ensure it has one root element.
try:
xml = etree.fromstring("<test>" + xml_str + "</test>")
except Exception as exc:
raise TemplateError("Could not parse XML from '{0}': {1}".format(
xml_str, str(exc)))
else:
return xml
def assert_has_xpath(self, xml_root, xpath, context_dict, exact_num=1):
"""
Asserts that the xml tree has an element satisfying `xpath`.
`xml_root` is an etree XML element
`xpath` is an XPath string, such as `'/foo/bar'`
`context` is used to print a debugging message
`exact_num` is the exact number of matches to expect.
"""
message = ("XML does not have %d match(es) for xpath '%s'\nXML: %s\nContext: %s"
% (exact_num, str(xpath), etree.tostring(xml_root), str(context_dict)))
self.assertEqual(len(xml_root.xpath(xpath)), exact_num, msg=message)
def assert_no_xpath(self, xml_root, xpath, context_dict):
"""
Asserts that the xml tree does NOT have an element
satisfying `xpath`.
`xml_root` is an etree XML element
`xpath` is an XPath string, such as `'/foo/bar'`
`context` is used to print a debugging message
"""
self.assert_has_xpath(xml_root, xpath, context_dict, exact_num=0)
def assert_has_text(self, xml_root, xpath, text, exact=True):
"""
Find the element at `xpath` in `xml_root` and assert
that its text is `text`.
`xml_root` is an etree XML element
`xpath` is an XPath string, such as `'/fo | o/bar'`
`text` is t | he expected text that the element should contain
If multiple elements are found, checks the first one.
If no elements are found, the assertion fails.
"""
element_list = xml_root.xpath(xpath)
self.assertGreater(len(element_list), 0, "Could not find element at '%s'\n%s" %
(str(xpath), etree.tostring(xml_root)))
if exact:
self.assertEqual(text, element_list[0].text.strip())
else:
self.assertIn(text, element_list[0].text.strip())
def assert_description(self, describedby_xpaths):
"""
Verify that descriptions information is correct.
Arguments:
describedby_xpaths (list): list of xpaths to check aria-describedby attribute
"""
xml = self.render_to_xml(self.context)
# Verify that each description <p> tag has correct id, text and order
descriptions = OrderedDict(
(tag.get('id'), stringify_children(tag)) for tag in xml.xpath('//p[@class="question-description"]')
)
self.assertEqual(self.DESCRIPTIONS, descriptions)
# for each xpath verify that description_ids are set correctly
for describedby_xpath in describedby_xpaths:
describedbys = xml.xpath(describedby_xpath)
# aria-describedby attributes must have ids
self.assertTrue(describedbys)
for describedby in describedbys:
self.assertEqual(describedby, self.DESCRIPTION_IDS)
def assert_describedby_attribute(self, describedby_xpaths):
"""
Verify that an element has no aria-describedby attribute if there are no descriptions.
Arguments:
describedby_xpaths (list): list of xpaths to check aria-describedby attribute
"""
self.context['describedby_html'] = ''
xml = self.render_to_xml(self.context)
# for each xpath verify that description_ids are set correctly
for describedby_xpath in describedby_xpaths:
describedbys = xml.xpath(describedby_xpath)
self.assertFalse(describedbys)
def assert_status(self, status_div=False, status_class=False):
"""
Verify status information.
Arguments:
status_div (bool): check presence of status div
status_class (bool): check presence of status class
"""
cases = [
('correct', 'correct'),
('unsubmitted', 'unanswered'),
('submitted', 'submitted'),
('incorrect', 'incorrect'),
('incomplete', 'incorrect')
]
for context_status, div_class in cases:
self.context['status'] = Status(context_status)
xml = self.render_to_xml(self.context)
# Expect that we get a <div> with correct class
if status_div:
xpath = "//div[normalize-space(@class)='%s']" % div_class
self.assert_has_xpath(xml, xpath, self.context)
# Expect that we get a <span> with class="status"
# (used to by CSS to draw the green check / red x)
self.assert_has_text(
xml,
"//span[@class='status {}']/span[@class='sr']".format(
div_class if status_class else ''
),
self.context['status'].display_name
)
def assert_label(self, xpath=None, aria_label=False):
"""
Verify label is rendered correctly.
Arguments:
xpath (str): xpath expression for label element
aria_label (bool): check aria-label attribute value
"""
labels = [
{
'actual': "You see, but you do not observe. The distinction is clear.",
'expected': "You see, but you do not observe. The distinction is clear.",
},
{
'actual': "I choose to have <mark>faith</mark> because without that, I have <em>nothing</em>.",
'expected': "I choose to have faith because without that, I have nothing.",
}
]
response_data = {
'response_data': {
'descriptions': {},
'label': ''
}
}
self.context.update(response_data)
for label in labels:
self.context['response_data' |
jimberlage/servo | tests/wpt/web-platform-tests/webdriver/tests/take_element_screenshot/__init__.py | Python | mpl-2.0 | 516 | 0 | def el | ement_rect(session, element):
return session.execute_script("""
let {devicePixelRatio} = window;
let rect = arguments[0].getBoundingClientRect();
return {
x: Math.floor((rect.left + window.pageXOffset) * devicePixelRatio),
y: Math.floor((rect.top + window.pageYOffset) * devicePixelRatio),
width: Math.floor(rect.width * devicePixelRatio),
height: Math.floor(rect.height * devicePixelRatio),
};
""", args=(ele | ment,))
|
pli3/enigma2-git | lib/python/Screens/MessageBox.py | Python | gpl-2.0 | 3,710 | 0.040162 | from Screen import Screen
from Components.ActionMap import ActionMap
from Components.Label import Label
from Components.Pixmap import Pixmap
from Components.Sources.StaticText import StaticText
from Components.MenuList import MenuList
from enigma import eTimer
class MessageBox(Screen):
TYPE_YESNO = 0
TYPE_INFO = 1
TYPE_WARNING = 2
TYPE_ERROR = 3
def __init__(self, session, text, type = TYPE_YESNO, timeout = -1, close_on_any_key = False, default = True, enable_input = True, msgBoxID = None, picon = None, simple = False):
self.type = type
Screen.__init__(self, session)
if simple:
self.skinName="MessageBoxSimple"
self.msgBoxID = msgBoxID
self["text"] = Label(text)
self["Text"] = StaticText(text)
self["selectedChoice"] = StaticText()
self.text = text
self.close_on_any_key = close_on_any_key
self["ErrorPixmap"] = Pixmap()
self["QuestionPixmap"] = Pixmap()
self["InfoPixmap"] = Pixmap()
self.timerRunning = False
self.initTimeout(timeout)
self.list = []
picon = picon or type
if picon != self.TYPE_ERROR:
self["ErrorPixmap"].hide()
if picon != self.TYPE_YESNO:
self["QuestionPixmap"].hide()
if picon != self.TYPE_INFO:
self["InfoPixmap"].hide()
if type == self.TYPE_YESNO:
if default == True:
self.list = [ (_("yes"), 0), (_("no"), 1) ]
else:
self.list = [ (_("no"), 1), (_("yes"), 0) ]
self["list"] = MenuList(self.list)
if self.list:
self["selectedChoice"].setText(self.list[0][0])
else:
self["list"].hide()
if enable_input:
self["actions"] = ActionMap(["MsgBoxActions", "DirectionActions"],
{
"cancel": self.cancel,
"ok": self.ok,
"alwaysOK": self.alwaysOK,
"up": self.up,
"down": self.down,
"left": self.left,
"right": self.right,
"upRepeated": self.up,
"downRepeated": self.down,
"leftRepeated": self.left,
"rightRepeated": self.right
}, -1)
def initTimeout(self, timeout):
self.timeout = timeout
if timeout > 0:
self.timer = eTimer()
self.timer.callback.append(self.timerTick)
self.onExecBegin.append(self.startTimer)
self.origTitle = None
if self.execing:
self.timerTick()
else:
self.onShown.append(self.__onShown)
self.timerRunning = True
else:
self.timerRunning = False
def __onShown(self):
self.onShown.remove(self.__onShown)
self.timerTick | ()
def startTimer(self):
self.timer.start(1000)
def stopTimer(self):
if self.timerRunning:
del self.timer
self.onExecBegin.remove(self.startTimer)
self.setTitle(self.origTitle)
self.timerRunning = | False
def timerTick(self):
if self.execing:
self.timeout -= 1
if self.origTitle is None:
self.origTitle = self.instance.getTitle()
self.setTitle(self.origTitle + " (" + str(self.timeout) + ")")
if self.timeout == 0:
self.timer.stop()
self.timerRunning = False
self.timeoutCallback()
def timeoutCallback(self):
print "Timeout!"
self.ok()
def cancel(self):
self.close(False)
def ok(self):
if self.type == self.TYPE_YESNO:
self.close(self["list"].getCurrent()[1] == 0)
else:
self.close(True)
def alwaysOK(self):
self.close(True)
def up(self):
self.move(self["list"].instance.moveUp)
def down(self):
self.move(self["list"].instance.moveDown)
def left(self):
self.move(self["list"].instance.pageUp)
def right(self):
self.move(self["list"].instance.pageDown)
def move(self, direction):
if self.close_on_any_key:
self.close(True)
self["list"].instance.moveSelection(direction)
if self.list:
self["selectedChoice"].setText(self["list"].getCurrent()[0])
self.stopTimer()
def __repr__(self):
return str(type(self)) + "(" + self.text + ")"
|
mfem/PyMFEM | mfem/common/generate_lininteg_ext.py | Python | bsd-3-clause | 1,212 | 0.00165 | file = "../_ser/lininteg.py"
out = ["namespace mfem {"]
fid = open(file, 'r')
for line in fid.readlines():
if line.startswith("class"):
cname = (line.split(' ')[1]).split('(')[0]
if line.startswith(" def __init__"):
pp = ""
if line.find("*args") != -1:
pp = " self._coeff = args"
elif line.find(", QG") != -1:
pp = " self._coeff = QG"
elif line.find(", QF)") != -1:
pp = " self._coeff = QF"
elif line.find(", F)") != -1:
| pp = " self._coeff = F"
elif line.find(", f, s=1.0, ir=None)") != -1:
pp = " self._coeff = (f, ir)"
elif | line.find(", uD_, lambda_, mu_, alpha_, kappa_)") != -1:
pp = " self._coeff = uD_"
elif line.find("(self)") != -1:
pass
else:
print(cname)
print(line)
assert False, "No recipt for this pattern "
if pp != "":
out.append("%pythonappend " + cname + "::" + cname + " %{")
out.append(pp)
out.append("%}")
fid.close()
out.append("}")
fid = open("lininteg_ext.i", "w")
fid.write("\n".join(out))
fid.close()
|
euccas/CodingPuzzles-Python | leet/source/twopointers/sort_colors.py | Python | mit | 3,224 | 0.004963 | class Solution():
def sortColors(nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
store = [[],[],[]]
for i in nums:
store[i].append(i)
return store[0]+store[1]+store[2]
class Solution1():
def sortColors(nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
from collections import Counter
n = Counter(nums)
nums = [0 for k in range(n[0])] + [1 for k in range(n[1])] + [2 for k in range(n[2])]
print(nums)
class Solution2():
def sortColors(nums):
cnt = {}
for i in nums:
cnt[i] = cnt.get(i, 0) + 1
nums = [0 for k in range(cnt[0])] + [1 for k in range(cnt[1])] + [2 for k in range(cnt[2])]
class Solution3():
def sortColors3(nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
if nums is None or len(nums) <= 1:
pass
def swap(nums, p_s, p_d):
if nums is None or len(nums) <= 1:
return nums
if p_s >= 0 and p_s <= len(nums) - 1 and p_d >= 0 and p_d <= len(nums) - 1:
nums[p_s], nums[p_d] = nums[p_d], nums[p_s]
return nums
p_red = 0
p_blue = len(nums) - 1
while nums[p_red] == 0 and p_red < len(nums) - 1:
p_red += 1
while nums[p_blue] == 2 and p_blue > 0:
p_blue -= 1
i = p_red
while (i <= p_blue):
if nums[i] == 0:
nums = swap(nums, i, p_red)
p_red += 1
| if nums[i] == 2:
nums = swap(nums, i, p_blue)
p_blue -= 1
else:
i += 1
print(nums)
class Solution4:
def sortColors(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
if nums is None or len(nums) <= 1:
return
pl = 0
pr = len(nums) - 1
i = 0
while i <= pr:
if | nums[i] == 0:
nums[i], nums[pl] = nums[pl], nums[i]
pl += 1
i += 1
elif nums[i] == 1:
i += 1
else:
nums[i], nums[pr] = nums[pr], nums[i]
pr -= 1
return nums
class Solution5:
def sortColors(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
pivot = 1
s, e, l = 0, 0 , len(nums) - 1
while e <= l:
if nums[e] < pivot:
nums[s], nums[e] = nums[e], nums[s]
s += 1
e += 1
elif nums[e] == pivot:
e += 1
else:
nums[l], nums[e] = nums[e], nums[l]
l -= 1
if __name__ == "__main__":
sln = Solution4()
#test_nums = [1,0]
test_nums = [1,2,0]
res = sln.sortColors(test_nums)
print(res)
|
rsmuc/health_monitoring_plugins | health_monitoring_plugins/ilo.py | Python | gpl-2.0 | 17,237 | 0.003017 | """
Module for check_snmp_ilo4
"""
# Copyright (C) 2016-2019 rsmuc <rsmuc@sec-dev.de>
# This file is part of "Health Monitoring Plugins".
# "Health Monitoring Plugins" is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# "Health Monitoring Plugins" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with "Health Monitoring Plugins". If not, see <https://www.gnu.org/licenses/>.
from __future__ import absolute_import, division, print_function
from pynag.Plugins import unknown, warning, critical, ok
DEVICE_INFORMATION_OIDS = {
"oid_product_name": '.1.3.6.1.4.1.232.2.2.4.2.0',
"oid_serial_number": '.1.3.6.1.4.1.232.2.2.2.1.0'
}
DEVICE_GLOBAL_OIDS = {
"oid_global_storage": '.1.3.6.1.4.1.232.3.1.3.0',
"oid_global_mem": '.1.3.6.1.4.1.232.6.2.14.4.0',
"oid_global_system": '.1.3.6.1.4.1.232.6.1.3.0',
"oid_global_power_supply": '.1.3.6.1.4.1.232.6.2.9.1.0',
"oid_global_power_state": '.1.3.6.1.4.1.232.9.2.2.32.0',
"oid_global_thermal_system": '.1.3.6.1.4.1.232.6.2.6.1.0',
"oid_global_temp_sensors": '.1.3.6.1.4.1.232.6.2.6.3.0',
"oid_global_fans": '.1.3.6.1.4.1.232.6.2.6.4.0',
}
DEVICE_STATES_OIDS = {
"oid_controllers": '.1.3.6.1.4.1.232.3.2.2.1.1.6',
"oid_power_supplies": '.1.3.6.1.4.1.232.6.2.9.3.1.4',
"oid_ps_redundant": '.1.3.6.1.4.1.232.6.2.9.3.1.9',
"oid_fan": '.1.3.6.1.4.1.232.6.2.6.7.1.9'
}
DRIVE_OIDS = {
"oid_status": '.1.3.6.1.4.1.232.3.2.5.1.1.6',
"oid_smart": '.1.3.6.1.4.1.232.3.2.5.1.1.57',
"oid_temperature": '.1.3.6.1.4.1.232.3.2.5.1.1.70',
"oid_temperature_threshold": '.1.3.6.1.4.1.232.3.2.5.1.1.71',
"oid_logical_drive_status": '.1.3.6.1.4.1.232.3.2.3.1.1.4'
}
TEMPE | RATURE_OIDS = {
"oid_env_temp": '.1.3.6.1.4.1.232.6.2.6.8.1.4',
"oid_env_temp_ | thres": '.1.3.6.1.4.1.232.6.2.6.8.1.5'
}
# State definitions
NORMAL_STATE = {
1: 'other',
2: 'ok',
3: 'degraded',
4: 'failed'
}
SERVER_POWER_STATE = {
1: 'unknown',
2: 'poweredOff',
3: 'poweredOn',
4: 'insufficientPowerOrPowerOnDenied'
}
LOG_DRV_STATE = {
1: 'other',
2: 'ok',
3: 'failed',
4: 'unconfigured',
5: 'recovering',
6: 'readyForRebuild',
7: 'rebuilding',
8: 'wrongDrive',
9: 'badConnect',
10: 'overheating',
11: 'shutdown',
12: 'expanding',
13: 'notAvailable',
14: 'queuedForExpansion',
15: 'multipathAccessDegraded',
16: 'erasing',
17: 'predictiveSpareRebuildReady',
18: 'rapidParityInitInProgress',
19: 'rapidParityInitPending',
20: 'noAccessEncryptedNoCntlrKey',
21: 'unencryptedToEncryptedInProgress',
22: 'newLogDrvKeyRekeyInProgress',
23: 'noAccessEncryptedCntlrEncryptnNotEnbld',
24: 'unencryptedToEncryptedNotStarted',
25: 'newLogDrvKeyRekeyRequestReceived'
}
PHY_DRV_STATES = {
1: 'other',
2: 'ok',
3: 'failed',
4: 'predictiveFailure',
5: 'erasing',
6: 'eraseDone',
7: 'eraseQueued',
8: 'ssdWearOut',
9: 'notAuthenticated'
}
PHY_DRV_SMART_STATES = {
1: 'other',
2: 'ok',
3: 'replaceDrive',
4: 'replaceDriveSSDWearOut'
}
PS_REDUNDANT_STATE = {
1: 'other',
2: 'notRedundant',
3: 'redundant'
}
def normal_check(name, status, device_type):
"""if the status is "ok" in the NORMAL_STATE dict, return ok + string
if the status is not "ok", return critical + string"""
status_string = NORMAL_STATE.get(int(status), "unknown")
if status_string == "ok":
return ok, "{} '{}': {}".format(device_type, name, status_string)
elif status_string == "unknown":
return warning, "{} '{}': {}".format(device_type, name, status_string)
return critical, "{} '{}': {}".format(device_type, name, status_string)
def power_check(name, status, device_type):
"""if the status is "ok" in the NORMAL_STATE dict, return ok + string
if the status is not "ok", return critical + string"""
status_string = SERVER_POWER_STATE.get(int(status), "unknown")
if status_string == "poweredOn":
return ok, "{} '{}': {}".format(device_type, name, status_string)
elif status_string == "unknown":
return warning, "{} '{}': {}".format(device_type, name, status_string)
return critical, "{} '{}': {}".format(device_type, name, status_string)
class ILo(object):
"""Class for check_meinberg_ntp"""
def __init__(self, session):
self.sess = session
@staticmethod
def add_device_information(helper, session):
""" add general device information to summary """
product_name = helper.get_snmp_value_or_exit(session, helper,
DEVICE_INFORMATION_OIDS['oid_product_name'])
serial_number = helper.get_snmp_value_or_exit(session, helper,
DEVICE_INFORMATION_OIDS['oid_serial_number'])
helper.add_summary('{} - Serial number: {}'.format(product_name, serial_number))
# TODO: remove the if else shit
def process_status(self, helper, session, check):
""""process a single status"""
snmp_result_status = helper.get_snmp_value_or_exit(session, helper,
DEVICE_GLOBAL_OIDS['oid_' + check])
if check == "global_storage":
helper.update_status(helper,
normal_check("global", snmp_result_status, "Global storage"))
elif check == "global_system":
helper.update_status(helper,
normal_check("global", snmp_result_status, "Global system"))
elif check == "global_power_supply":
helper.update_status(helper,
normal_check("global", snmp_result_status, "Global power supply"))
elif check == "global_power_state":
helper.update_status(helper,
power_check("global", snmp_result_status, "Global power state"))
elif check == "global_thermal_system":
helper.update_status(helper,
normal_check("global", snmp_result_status,
"Overall thermal environment"))
elif check == "global_temp_sensors":
helper.update_status(helper,
normal_check("global", snmp_result_status, "Temperature sensors"))
elif check == "global_fans":
helper.update_status(helper,
normal_check("global", snmp_result_status, "Fan(s)"))
elif check == "global_mem":
helper.update_status(helper,
normal_check("global", snmp_result_status, "Memory"))
@staticmethod
def process_storage_controllers(helper, session):
""" process the controller states """
snmp_result_status = helper.walk_snmp_values_or_exit(session, helper,
DEVICE_STATES_OIDS["oid_controllers"],
"controllers")
for i, _result in enumerate(snmp_result_status):
helper.update_status(
helper,
normal_check(i, snmp_result_status[i], "Storage Controller"))
@staticmethod
def process_physical_drives(helper, session):
drv_states = helper.walk_snmp_values_or_exit(session, helper,
DRIVE_OIDS["oid_status"],
"drive state")
drv_smart_states = helper.walk_snmp_values_or_exit(session, helper,
|
c0s/juju-apache-bigtop-namenode | tests/01-basic-deployment.py | Python | apache-2.0 | 578 | 0 | #!/usr/bin/env python3
import unittest
import amulet
class TestDeploy(unittest.TestCase):
"""
Trivial deployment test for Apache Hadoop NameNode.
This charm cannot do anything useful by | itself, so integrati | on testing
is done in the bundle.
"""
def test_deploy(self):
self.d = amulet.Deployment(series='trusty')
self.d.add('namenode', 'apache-hadoop-namenode')
self.d.setup(timeout=900)
self.d.sentry.wait(timeout=1800)
self.unit = self.d.sentry['namenode'][0]
if __name__ == '__main__':
unittest.main()
|
spin13/python_maclib | fsevent_dog.py | Python | mit | 1,485 | 0.005394 | # -*- coding: utf-8 -*-
import os
import time
import util
import urllib, urllib2
import base64
from datetime import datetime
import env
from glob import glob
END_POINT = env.END_POINT
BASE_DIR = env.BASE_DIR
STORAGE_URL = env.STORAGE_URL
def get_latest_file(dirname):
dest = o | s.path.join(dirname, '*.png')
files = [(f, os.path.getmtime(f)) for f in glob(dest)]
if len(files) <= 0:
return
latest_path = sorted(files, key=lambda files: files[1])[-1]
return latest_path[0]
def get_file_timestamp(filename):
if filename == None:
return
return os.stat(filename)
|
def getext(filename):
return os.path.splitext(filename)[-1].lower()
def post_data(filename):
now = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
params = {
'base64_data': base64.b64encode(open(filename, 'rt').read()),
'name': now
}
params = urllib.urlencode(params)
req = urllib2.Request(END_POINT)
req.add_data(params)
res = urllib2.urlopen(req)
file_url = STORAGE_URL + now + '.png'
util.clipboard_set_data(file_url)
print(file_url)
return 0
if __name__ in '__main__':
latest_file = get_latest_file(BASE_DIR)
latest_back_file = latest_file
while 1:
time.sleep(1)
latest_file = get_latest_file(BASE_DIR)
if latest_back_file != latest_file:
post_data(latest_file)
os.remove(latest_file)
|
Eric89GXL/scipy | scipy/linalg/basic.py | Python | bsd-3-clause | 55,293 | 0 | #
# Author: Pearu Peterson, March 2002
#
# w/ additions by Travis Oliphant, March 2002
# and Jake Vanderplas, August 2012
from __future__ import division, print_function, absolute_import
from warnings import warn
import numpy as np
from numpy import atleast_1d, atleast_2d
from .flinalg import get_flinalg_funcs
from .lapack import get_lapack_funcs, _compute_lwork
from .misc import LinAlgError, _datacopied, LinAlgWarning
from .decomp import _asarray_validated
from . import decomp, decomp_svd
from ._solve_toeplitz import levinson
__all__ = ['solve', 'solve_triangular', 'solveh_banded', 'solve_banded',
'solve_toeplitz', 'solve_circulant', 'inv', 'det', 'lstsq',
'pinv', 'pinv2', 'pinvh', 'matrix_balance']
# Linear equations
def _solve_check(n, info, lamch=None, rcond=None):
""" Check arguments during the different steps of the solution phase """
if info < 0:
raise ValueError('LAPACK reported an illegal value in {}-th argument'
'. | '.format(-info))
elif 0 < info:
raise LinAlgError('Matrix is singular.')
if lamch is None:
return
E = lamch('E')
if rcond < E:
warn('Ill-conditioned matrix (rcond={:.6g}): '
'result may not be accur | ate.'.format(rcond),
LinAlgWarning, stacklevel=3)
def solve(a, b, sym_pos=False, lower=False, overwrite_a=False,
overwrite_b=False, debug=None, check_finite=True, assume_a='gen',
transposed=False):
"""
Solves the linear equation set ``a * x = b`` for the unknown ``x``
for square ``a`` matrix.
If the data matrix is known to be a particular type then supplying the
corresponding string to ``assume_a`` key chooses the dedicated solver.
The available options are
=================== ========
generic matrix 'gen'
symmetric 'sym'
hermitian 'her'
positive definite 'pos'
=================== ========
If omitted, ``'gen'`` is the default structure.
The datatype of the arrays define which solver is called regardless
of the values. In other words, even when the complex array entries have
precisely zero imaginary parts, the complex solver will be called based
on the data type of the array.
Parameters
----------
a : (N, N) array_like
Square input data
b : (N, NRHS) array_like
Input data for the right hand side.
sym_pos : bool, optional
Assume `a` is symmetric and positive definite. This key is deprecated
and assume_a = 'pos' keyword is recommended instead. The functionality
is the same. It will be removed in the future.
lower : bool, optional
If True, only the data contained in the lower triangle of `a`. Default
is to use upper triangle. (ignored for ``'gen'``)
overwrite_a : bool, optional
Allow overwriting data in `a` (may enhance performance).
Default is False.
overwrite_b : bool, optional
Allow overwriting data in `b` (may enhance performance).
Default is False.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
assume_a : str, optional
Valid entries are explained above.
transposed: bool, optional
If True, ``a^T x = b`` for real matrices, raises `NotImplementedError`
for complex matrices (only for True).
Returns
-------
x : (N, NRHS) ndarray
The solution array.
Raises
------
ValueError
If size mismatches detected or input a is not square.
LinAlgError
If the matrix is singular.
LinAlgWarning
If an ill-conditioned input a is detected.
NotImplementedError
If transposed is True and input a is a complex matrix.
Examples
--------
Given `a` and `b`, solve for `x`:
>>> a = np.array([[3, 2, 0], [1, -1, 0], [0, 5, 1]])
>>> b = np.array([2, 4, -1])
>>> from scipy import linalg
>>> x = linalg.solve(a, b)
>>> x
array([ 2., -2., 9.])
>>> np.dot(a, x) == b
array([ True, True, True], dtype=bool)
Notes
-----
If the input b matrix is a 1D array with N elements, when supplied
together with an NxN input a, it is assumed as a valid column vector
despite the apparent size mismatch. This is compatible with the
numpy.dot() behavior and the returned result is still 1D array.
The generic, symmetric, hermitian and positive definite solutions are
obtained via calling ?GESV, ?SYSV, ?HESV, and ?POSV routines of
LAPACK respectively.
"""
# Flags for 1D or nD right hand side
b_is_1D = False
a1 = atleast_2d(_asarray_validated(a, check_finite=check_finite))
b1 = atleast_1d(_asarray_validated(b, check_finite=check_finite))
n = a1.shape[0]
overwrite_a = overwrite_a or _datacopied(a1, a)
overwrite_b = overwrite_b or _datacopied(b1, b)
if a1.shape[0] != a1.shape[1]:
raise ValueError('Input a needs to be a square matrix.')
if n != b1.shape[0]:
# Last chance to catch 1x1 scalar a and 1D b arrays
if not (n == 1 and b1.size != 0):
raise ValueError('Input b has to have same number of rows as '
'input a')
# accommodate empty arrays
if b1.size == 0:
return np.asfortranarray(b1.copy())
# regularize 1D b arrays to 2D
if b1.ndim == 1:
if n == 1:
b1 = b1[None, :]
else:
b1 = b1[:, None]
b_is_1D = True
# Backwards compatibility - old keyword.
if sym_pos:
assume_a = 'pos'
if assume_a not in ('gen', 'sym', 'her', 'pos'):
raise ValueError('{} is not a recognized matrix structure'
''.format(assume_a))
# Deprecate keyword "debug"
if debug is not None:
warn('Use of the "debug" keyword is deprecated '
'and this keyword will be removed in future '
'versions of SciPy.', DeprecationWarning, stacklevel=2)
# Get the correct lamch function.
# The LAMCH functions only exists for S and D
# So for complex values we have to convert to real/double.
if a1.dtype.char in 'fF': # single precision
lamch = get_lapack_funcs('lamch', dtype='f')
else:
lamch = get_lapack_funcs('lamch', dtype='d')
# Currently we do not have the other forms of the norm calculators
# lansy, lanpo, lanhe.
# However, in any case they only reduce computations slightly...
lange = get_lapack_funcs('lange', (a1,))
# Since the I-norm and 1-norm are the same for symmetric matrices
# we can collect them all in this one call
# Note however, that when issuing 'gen' and form!='none', then
# the I-norm should be used
if transposed:
trans = 1
norm = 'I'
if np.iscomplexobj(a1):
raise NotImplementedError('scipy.linalg.solve can currently '
'not solve a^T x = b or a^H x = b '
'for complex matrices.')
else:
trans = 0
norm = '1'
anorm = lange(norm, a1)
# Generalized case 'gesv'
if assume_a == 'gen':
gecon, getrf, getrs = get_lapack_funcs(('gecon', 'getrf', 'getrs'),
(a1, b1))
lu, ipvt, info = getrf(a1, overwrite_a=overwrite_a)
_solve_check(n, info)
x, info = getrs(lu, ipvt, b1,
trans=trans, overwrite_b=overwrite_b)
_solve_check(n, info)
rcond, info = gecon(lu, anorm, norm=norm)
# Hermitian case 'hesv'
elif assume_a == 'her':
hecon, hesv, hesv_lw = get_lapack_funcs(('hecon', 'hesv',
'hesv_lwork'), (a1, b1))
lwork = _compute_lwork(hesv_lw, n, lower)
lu, ipvt, x, info = hesv(a1, b1, lwork=lwork,
lower=lower,
|
twister/twister.github.io | binaries/JenkinsPlugin/Jenkins/Jenkins_Post_Script.py | Python | apache-2.0 | 810 | 0.006173 |
import xmlrpclib
# version: 2.001
from jenkinsapi import api
j | enkins = api.Jenkins('http://localhost:8080')
twister_job = jenkins.get_job('twister') # The Jenkins job is called `twister` !
twister_build = twister_job.get_ | last_build()
twister_status = twister_build.get_status()
print 'Status:', twister_status
if twister_build.is_good():
print 'The build passed successfully'
else:
print 'The build is not successful!'
exit(1)
# Central Engine is considered to run on localhost:8000
server = xmlrpclib.ServerProxy('http://127.0.0.1:8000/')
to_send = 'Jenkins: Job `{0}`, Build `{1}`, Status `{2}`!'.format(twister_job, twister_build, twister_status)
server.echo(to_send)
# The Twister user is called `jenkins`
server.runPlugin('jenkins', 'Jenkins', {"command":True, "build":twister_build})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.