repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
Beauhurst/django
|
refs/heads/master
|
django/core/management/commands/sendtestemail.py
|
48
|
import socket
from django.core.mail import mail_admins, mail_managers, send_mail
from django.core.management.base import BaseCommand
from django.utils import timezone
class Command(BaseCommand):
help = "Sends a test email to the email addresses specified as arguments."
missing_args_message = "You must specify some email recipients, or pass the --managers or --admin options."
def add_arguments(self, parser):
parser.add_argument(
'email', nargs='*',
help='One or more email addresses to send a test email to.',
)
parser.add_argument(
'--managers', action='store_true', dest='managers',
help='Send a test email to the addresses specified in settings.MANAGERS.',
)
parser.add_argument(
'--admins', action='store_true', dest='admins',
help='Send a test email to the addresses specified in settings.ADMINS.',
)
def handle(self, *args, **kwargs):
subject = 'Test email from %s on %s' % (socket.gethostname(), timezone.now())
send_mail(
subject=subject,
message="If you\'re reading this, it was successful.",
from_email=None,
recipient_list=kwargs['email'],
)
if kwargs['managers']:
mail_managers(subject, "This email was sent to the site managers.")
if kwargs['admins']:
mail_admins(subject, "This email was sent to the site admins.")
|
fnouama/intellij-community
|
refs/heads/master
|
python/testData/intentions/afterGoogleReturnSectionAfterKeywords.py
|
53
|
def f(**kwargs):
"""
Keyword arguments:
foo: bar
Returns:
object:
"""
|
Johnzero/erp
|
refs/heads/fga
|
openerp/addons/marketing/__init__.py
|
10
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
2ndQuadrant/ansible
|
refs/heads/master
|
lib/ansible/modules/remote_management/manageiq/manageiq_tenant.py
|
3
|
#!/usr/bin/python
#
# (c) 2018, Evert Mulder (base on manageiq_user.py by Daniel Korn <korndaniel1@gmail.com>)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: manageiq_tenant
short_description: Management of tenants in ManageIQ.
extends_documentation_fragment: manageiq
version_added: '2.8'
author: Evert Mulder (@evertmulder)
description:
- The manageiq_tenant module supports adding, updating and deleting tenants in ManageIQ.
options:
state:
description:
- absent - tenant should not exist, present - tenant should be.
choices: ['absent', 'present']
default: 'present'
name:
description:
- The tenant name.
required: true
default: null
description:
description:
- The tenant description.
required: true
default: null
parent_id:
description:
- The id of the parent tenant. If not supplied the root tenant is used.
- The C(parent_id) takes president over C(parent) when supplied
required: false
default: null
parent:
description:
- The name of the parent tenant. If not supplied and no C(parent_id) is supplied the root tenant is used.
required: false
default: null
quotas:
description:
- The tenant quotas.
- All parameters case sensitive.
- 'Valid attributes are:'
- ' - C(cpu_allocated) (int): use null to remove the quota.'
- ' - C(mem_allocated) (GB): use null to remove the quota.'
- ' - C(storage_allocated) (GB): use null to remove the quota.'
- ' - C(vms_allocated) (int): use null to remove the quota.'
- ' - C(templates_allocated) (int): use null to remove the quota.'
required: false
default: null
'''
EXAMPLES = '''
- name: Update the root tenant in ManageIQ
manageiq_tenant:
name: 'My Company'
description: 'My company name'
manageiq_connection:
url: 'http://127.0.0.1:3000'
username: 'admin'
password: 'smartvm'
validate_certs: False
- name: Create a tenant in ManageIQ
manageiq_tenant:
name: 'Dep1'
description: 'Manufacturing department'
parent_id: 1
manageiq_connection:
url: 'http://127.0.0.1:3000'
username: 'admin'
password: 'smartvm'
validate_certs: False
- name: Delete a tenant in ManageIQ
manageiq_tenant:
state: 'absent'
name: 'Dep1'
parent_id: 1
manageiq_connection:
url: 'http://127.0.0.1:3000'
username: 'admin'
password: 'smartvm'
validate_certs: False
- name: Set tenant quota for cpu_allocated, mem_allocated, remove quota for vms_allocated
manageiq_tenant:
name: 'Dep1'
parent_id: 1
quotas:
- cpu_allocated: 100
- mem_allocated: 50
- vms_allocated: null
manageiq_connection:
url: 'http://127.0.0.1:3000'
username: 'admin'
password: 'smartvm'
validate_certs: False
- name: Delete a tenant in ManageIQ using a token
manageiq_tenant:
state: 'absent'
name: 'Dep1'
parent_id: 1
manageiq_connection:
url: 'http://127.0.0.1:3000'
token: 'sometoken'
validate_certs: False
'''
RETURN = '''
tenant:
description: The tenant.
returned: success
type: complex
contains:
id:
description: The tenant id
returned: success
type: int
name:
description: The tenant name
returned: success
type: str
description:
description: The tenant description
returned: success
type: str
parent_id:
description: The id of the parent tenant
returned: success
type: int
quotas:
description: List of tenant quotas
returned: success
type: list
sample:
cpu_allocated: 100
mem_allocated: 50
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.manageiq import ManageIQ, manageiq_argument_spec
class ManageIQTenant(object):
"""
Object to execute tenant management operations in manageiq.
"""
def __init__(self, manageiq):
self.manageiq = manageiq
self.module = self.manageiq.module
self.api_url = self.manageiq.api_url
self.client = self.manageiq.client
def tenant(self, name, parent_id, parent):
""" Search for tenant object by name and parent_id or parent
or the root tenant if no parent or parent_id is supplied.
Returns:
the parent tenant, None for the root tenant
the tenant or None if tenant was not found.
"""
if parent_id:
parent_tenant_res = self.client.collections.tenants.find_by(id=parent_id)
if not parent_tenant_res:
self.module.fail_json(msg="Parent tenant with id '%s' not found in manageiq" % str(parent_id))
parent_tenant = parent_tenant_res[0]
tenants = self.client.collections.tenants.find_by(name=name)
for tenant in tenants:
try:
ancestry = tenant['ancestry']
except AttributeError:
ancestry = None
if ancestry:
tenant_parent_id = int(ancestry.split("/")[-1])
if int(tenant_parent_id) == parent_id:
return parent_tenant, tenant
return parent_tenant, None
else:
if parent:
parent_tenant_res = self.client.collections.tenants.find_by(name=parent)
if not parent_tenant_res:
self.module.fail_json(msg="Parent tenant '%s' not found in manageiq" % parent)
if len(parent_tenant_res) > 1:
self.module.fail_json(msg="Multiple parent tenants not found in manageiq with name '%s" % parent)
parent_tenant = parent_tenant_res[0]
parent_id = parent_tenant['id']
tenants = self.client.collections.tenants.find_by(name=name)
for tenant in tenants:
try:
ancestry = tenant['ancestry']
except AttributeError:
ancestry = None
if ancestry:
tenant_parent_id = int(ancestry.split("/")[-1])
if tenant_parent_id == parent_id:
return parent_tenant, tenant
return parent_tenant, None
else:
# No parent or parent id supplied we select the root tenant
return None, self.client.collections.tenants.find_by(ancestry=None)[0]
def compare_tenant(self, tenant, name, description):
""" Compare tenant fields with new field values.
Returns:
false if tenant fields have some difference from new fields, true o/w.
"""
found_difference = (
(name and tenant['name'] != name) or
(description and tenant['description'] != description)
)
return not found_difference
def delete_tenant(self, tenant):
""" Deletes a tenant from manageiq.
Returns:
dict with `msg` and `changed`
"""
try:
url = '%s/tenants/%s' % (self.api_url, tenant['id'])
result = self.client.post(url, action='delete')
except Exception as e:
self.module.fail_json(msg="failed to delete tenant %s: %s" % (tenant['name'], str(e)))
return dict(changed=True, msg=result['message'])
def edit_tenant(self, tenant, name, description):
""" Edit a manageiq tenant.
Returns:
dict with `msg` and `changed`
"""
resource = dict(name=name, description=description, use_config_for_attributes=False)
# check if we need to update ( compare_tenant is true is no difference found )
if self.compare_tenant(tenant, name, description):
return dict(
changed=False,
msg="tenant %s is not changed." % tenant['name'],
tenant=tenant['_data'])
# try to update tenant
try:
result = self.client.post(tenant['href'], action='edit', resource=resource)
except Exception as e:
self.module.fail_json(msg="failed to update tenant %s: %s" % (tenant['name'], str(e)))
return dict(
changed=True,
msg="successfully updated the tenant with id %s" % (tenant['id']))
def create_tenant(self, name, description, parent_tenant):
""" Creates the tenant in manageiq.
Returns:
dict with `msg`, `changed` and `tenant_id`
"""
parent_id = parent_tenant['id']
# check for required arguments
for key, value in dict(name=name, description=description, parent_id=parent_id).items():
if value in (None, ''):
self.module.fail_json(msg="missing required argument: %s" % key)
url = '%s/tenants' % self.api_url
resource = {'name': name, 'description': description, 'parent': {'id': parent_id}}
try:
result = self.client.post(url, action='create', resource=resource)
tenant_id = result['results'][0]['id']
except Exception as e:
self.module.fail_json(msg="failed to create tenant %s: %s" % (name, str(e)))
return dict(
changed=True,
msg="successfully created tenant '%s' with id '%s'" % (name, tenant_id),
tenant_id=tenant_id)
def tenant_quota(self, tenant, quota_key):
""" Search for tenant quota object by tenant and quota_key.
Returns:
the quota for the tenant, or None if the tenant quota was not found.
"""
tenant_quotas = self.client.get("%s/quotas?expand=resources&filter[]=name=%s" % (tenant['href'], quota_key))
return tenant_quotas['resources']
def tenant_quotas(self, tenant):
""" Search for tenant quotas object by tenant.
Returns:
the quotas for the tenant, or None if no tenant quotas were not found.
"""
tenant_quotas = self.client.get("%s/quotas?expand=resources" % (tenant['href']))
return tenant_quotas['resources']
def update_tenant_quotas(self, tenant, quotas):
""" Creates the tenant quotas in manageiq.
Returns:
dict with `msg` and `changed`
"""
changed = False
messages = []
for quota_key, quota_value in quotas.items():
current_quota_filtered = self.tenant_quota(tenant, quota_key)
if current_quota_filtered:
current_quota = current_quota_filtered[0]
else:
current_quota = None
if quota_value:
# Change the byte values to GB
if quota_key in ['storage_allocated', 'mem_allocated']:
quota_value_int = int(quota_value) * 1024 * 1024 * 1024
else:
quota_value_int = int(quota_value)
if current_quota:
res = self.edit_tenant_quota(tenant, current_quota, quota_key, quota_value_int)
else:
res = self.create_tenant_quota(tenant, quota_key, quota_value_int)
else:
if current_quota:
res = self.delete_tenant_quota(tenant, current_quota)
else:
res = dict(changed=False, msg="tenant quota '%s' does not exist" % quota_key)
if res['changed']:
changed = True
messages.append(res['msg'])
return dict(
changed=changed,
msg=', '.join(messages))
def edit_tenant_quota(self, tenant, current_quota, quota_key, quota_value):
""" Update the tenant quotas in manageiq.
Returns:
result
"""
if current_quota['value'] == quota_value:
return dict(
changed=False,
msg="tenant quota %s already has value %s" % (quota_key, quota_value))
else:
url = '%s/quotas/%s' % (tenant['href'], current_quota['id'])
resource = {'value': quota_value}
try:
self.client.post(url, action='edit', resource=resource)
except Exception as e:
self.module.fail_json(msg="failed to update tenant quota %s: %s" % (quota_key, str(e)))
return dict(
changed=True,
msg="successfully updated tenant quota %s" % quota_key)
def create_tenant_quota(self, tenant, quota_key, quota_value):
""" Creates the tenant quotas in manageiq.
Returns:
result
"""
url = '%s/quotas' % (tenant['href'])
resource = {'name': quota_key, 'value': quota_value}
try:
self.client.post(url, action='create', resource=resource)
except Exception as e:
self.module.fail_json(msg="failed to create tenant quota %s: %s" % (quota_key, str(e)))
return dict(
changed=True,
msg="successfully created tenant quota %s" % quota_key)
def delete_tenant_quota(self, tenant, quota):
""" deletes the tenant quotas in manageiq.
Returns:
result
"""
try:
result = self.client.post(quota['href'], action='delete')
except Exception as e:
self.module.fail_json(msg="failed to delete tenant quota '%s': %s" % (quota['name'], str(e)))
return dict(changed=True, msg=result['message'])
def create_tenant_response(self, tenant, parent_tenant):
""" Creates the ansible result object from a manageiq tenant entity
Returns:
a dict with the tenant id, name, description, parent id,
quota's
"""
tenant_quotas = self.create_tenant_quotas_response(tenant['tenant_quotas'])
try:
ancestry = tenant['ancestry']
tenant_parent_id = int(ancestry.split("/")[-1])
except AttributeError:
# The root tenant does not return the ancestry attribute
tenant_parent_id = None
return dict(
id=tenant['id'],
name=tenant['name'],
description=tenant['description'],
parent_id=tenant_parent_id,
quotas=tenant_quotas
)
@staticmethod
def create_tenant_quotas_response(tenant_quotas):
""" Creates the ansible result object from a manageiq tenant_quotas entity
Returns:
a dict with the applied quotas, name and value
"""
if not tenant_quotas:
return {}
result = {}
for quota in tenant_quotas:
if quota['unit'] == 'bytes':
value = float(quota['value']) / (1024 * 1024 * 1024)
else:
value = quota['value']
result[quota['name']] = value
return result
def main():
argument_spec = dict(
name=dict(required=True, type='str'),
description=dict(required=True, type='str'),
parent_id=dict(required=False, type='int'),
parent=dict(required=False, type='str'),
state=dict(choices=['absent', 'present'], default='present'),
quotas=dict(type='dict', default={})
)
# add the manageiq connection arguments to the arguments
argument_spec.update(manageiq_argument_spec())
module = AnsibleModule(
argument_spec=argument_spec
)
name = module.params['name']
description = module.params['description']
parent_id = module.params['parent_id']
parent = module.params['parent']
state = module.params['state']
quotas = module.params['quotas']
manageiq = ManageIQ(module)
manageiq_tenant = ManageIQTenant(manageiq)
parent_tenant, tenant = manageiq_tenant.tenant(name, parent_id, parent)
# tenant should not exist
if state == "absent":
# if we have a tenant, delete it
if tenant:
res_args = manageiq_tenant.delete_tenant(tenant)
# if we do not have a tenant, nothing to do
else:
res_args = dict(
changed=False,
msg="tenant %s: with parent: %i does not exist in manageiq" % (name, parent_id))
# tenant should exist
if state == "present":
# if we have a tenant, edit it
if tenant:
res_args = manageiq_tenant.edit_tenant(tenant, name, description)
# if we do not have a tenant, create it
else:
res_args = manageiq_tenant.create_tenant(name, description, parent_tenant)
tenant = manageiq.client.get_entity('tenants', res_args['tenant_id'])
# quotas as supplied and we have a tenant
if quotas:
tenant_quotas_res = manageiq_tenant.update_tenant_quotas(tenant, quotas)
if tenant_quotas_res['changed']:
res_args['changed'] = True
res_args['tenant_quotas_msg'] = tenant_quotas_res['msg']
tenant.reload(expand='resources', attributes=['tenant_quotas'])
res_args['tenant'] = manageiq_tenant.create_tenant_response(tenant, parent_tenant)
module.exit_json(**res_args)
if __name__ == "__main__":
main()
|
ARPASMR/IRIS_lombardia
|
refs/heads/master
|
html/OpenLayers-2.13.1/build/build.py
|
86
|
#!/usr/bin/env python
import sys
import os
sys.path.append("../tools")
import mergejs
import optparse
def build(config_file = None, output_file = None, options = None):
have_compressor = []
try:
import jsmin
have_compressor.append("jsmin")
except ImportError:
print "No jsmin"
try:
# tools/closure_library_jscompiler.py from:
# http://code.google.com/p/closure-library/source/browse/trunk/closure/bin/build/jscompiler.py
import closure_library_jscompiler as closureCompiler
have_compressor.append("closure")
except Exception, E:
print "No closure (%s)" % E
try:
import closure_ws
have_compressor.append("closure_ws")
except ImportError:
print "No closure_ws"
try:
import minimize
have_compressor.append("minimize")
except ImportError:
print "No minimize"
try:
import uglify_js
uglify_js.check_available()
have_compressor.append("uglify-js")
except Exception, E:
print "No uglify-js (%s)" % E
use_compressor = None
if options.compressor and options.compressor in have_compressor:
use_compressor = options.compressor
sourceDirectory = "../lib"
configFilename = "full.cfg"
outputFilename = "OpenLayers.js"
if config_file:
configFilename = config_file
extension = configFilename[-4:]
if extension != ".cfg":
configFilename = config_file + ".cfg"
if output_file:
outputFilename = output_file
print "Merging libraries."
try:
if use_compressor == "closure" or use_compressor == 'uglify-js':
sourceFiles = mergejs.getNames(sourceDirectory, configFilename)
else:
merged = mergejs.run(sourceDirectory, None, configFilename)
except mergejs.MissingImport, E:
print "\nAbnormal termination."
sys.exit("ERROR: %s" % E)
if options.amdname:
options.amdname = "'" + options.amdname + "',"
else:
options.amdname = ""
if options.amd == 'pre':
print "\nAdding AMD function."
merged = "define(%sfunction(){%sreturn OpenLayers;});" % (options.amdname, merged)
print "Compressing using %s" % use_compressor
if use_compressor == "jsmin":
minimized = jsmin.jsmin(merged)
elif use_compressor == "minimize":
minimized = minimize.minimize(merged)
elif use_compressor == "closure_ws":
if len(merged) > 1000000: # The maximum file size for this web service is 1000 KB.
print "\nPre-compressing using jsmin"
merged = jsmin.jsmin(merged)
print "\nIs being compressed using Closure Compiler Service."
try:
minimized = closure_ws.minimize(merged)
except Exception, E:
print "\nAbnormal termination."
sys.exit("ERROR: Closure Compilation using Web service failed!\n%s" % E)
if len(minimized) <= 2:
print "\nAbnormal termination due to compilation errors."
sys.exit("ERROR: Closure Compilation using Web service failed!")
else:
print "Closure Compilation using Web service has completed successfully."
elif use_compressor == "closure":
jscompilerJar = "../tools/closure-compiler.jar"
if not os.path.isfile(jscompilerJar):
print "\nNo closure-compiler.jar; read README.txt!"
sys.exit("ERROR: Closure Compiler \"%s\" does not exist! Read README.txt" % jscompilerJar)
minimized = closureCompiler.Compile(
jscompilerJar,
sourceFiles, [
"--externs", "closure-compiler/Externs.js",
"--jscomp_warning", "checkVars", # To enable "undefinedVars"
"--jscomp_error", "checkRegExp", # Also necessary to enable "undefinedVars"
"--jscomp_error", "undefinedVars"
]
)
if minimized is None:
print "\nAbnormal termination due to compilation errors."
sys.exit("ERROR: Closure Compilation failed! See compilation errors.")
print "Closure Compilation has completed successfully."
elif use_compressor == "uglify-js":
minimized = uglify_js.compile(sourceFiles)
if minimized is None:
print "\nAbnormal termination due to compilation errors."
sys.exit("ERROR: Uglify JS compilation failed! See compilation errors.")
print "Uglify JS compilation has completed successfully."
else: # fallback
minimized = merged
if options.amd == 'post':
print "\nAdding AMD function."
minimized = "define(%sfunction(){%sreturn OpenLayers;});" % (options.amdname, minimized)
if options.status:
print "\nAdding status file."
minimized = "// status: " + file(options.status).read() + minimized
print "\nAdding license file."
minimized = file("license.txt").read() + minimized
print "Writing to %s." % outputFilename
file(outputFilename, "w").write(minimized)
print "Done."
if __name__ == '__main__':
opt = optparse.OptionParser(usage="%s [options] [config_file] [output_file]\n Default config_file is 'full.cfg', Default output_file is 'OpenLayers.js'")
opt.add_option("-c", "--compressor", dest="compressor", help="compression method: one of 'jsmin' (default), 'minimize', 'closure_ws', 'closure', or 'none'", default="jsmin")
opt.add_option("-s", "--status", dest="status", help="name of a file whose contents will be added as a comment at the front of the output file. For example, when building from a git repo, you can save the output of 'git describe --tags' in this file. Default is no file.", default=False)
opt.add_option("--amd", dest="amd", help="output should be AMD module; wrap merged files in define function; can be either 'pre' (before compilation) or 'post' (after compilation). Wrapping the OpenLayers var in a function means the filesize can be reduced by the closure compiler using 'pre', but be aware that a few functions depend on the OpenLayers variable being present. Either option can be used with jsmin or minimize compression. Default false, not AMD.", default=False)
opt.add_option("--amdname", dest="amdname", help="only useful with amd option. Name of AMD module. Default no name, anonymous module.", default=False)
(options, args) = opt.parse_args()
if not len(args):
build(options=options)
elif len(args) == 1:
build(args[0], options=options)
elif len(args) == 2:
build(args[0], args[1], options=options)
else:
print "Wrong number of arguments"
|
indictranstech/fbd_erpnext
|
refs/heads/develop
|
erpnext/support/doctype/maintenance_visit/__init__.py
|
37694
|
from __future__ import unicode_literals
|
tymoreau/klusta_process_manager
|
refs/heads/master
|
klusta_process_manager/fileBrowser/fileBrowser.py
|
1
|
import os
#QT
import sip
sip.setapi('QVariant',2)
sip.setapi('QString',2)
from PyQt4 import QtCore,QtGui
from .tableDelegate import TableDelegate
from .folderView import FolderView
#------------------------------------------------------------------------------------------------------------
# Worker: Runs continuously in a separate thread
# Can do differents method / method can be interrupt by new method call
#------------------------------------------------------------------------------------------------------------
class Worker(QtCore.QObject):
valueChanged=QtCore.pyqtSignal(int)
folderDone=QtCore.pyqtSignal(int)
finished=QtCore.pyqtSignal()
def __init__(self):
super(Worker,self).__init__()
self._abort=False
self._interrupt=False
self._method="none"
self.mutex=QtCore.QMutex()
self.condition=QtCore.QWaitCondition()
def mainLoop(self):
while 1:
self.mutex.lock()
if not self._interrupt and not self._abort:
self.condition.wait(self.mutex)
self._interrupt=False
if self._abort:
self.finished.emit()
return
method=self._method
self.mutex.unlock()
if method=="icon_folder":
self.doMethod_icon_folder()
def requestMethod(self,method,arg=None):
locker=QtCore.QMutexLocker(self.mutex)
self._interrupt=True
self._method=method
self._arg=arg
self.condition.wakeOne()
def doMethod_icon_folder(self):
expList=self._arg
i=0
s=len(expList)
for exp in expList:
self.mutex.lock()
abort=self._abort
interrupt=self._interrupt
self.mutex.unlock()
if abort or interrupt:
self.valueChanged.emit(100)
break
exp.reset_folder_icon()
self.folderDone.emit(i)
i+=1
self.valueChanged.emit(i*100.0/s)
def abort(self):
locker=QtCore.QMutexLocker(self.mutex)
self._abort=True
self.condition.wakeOne()
#------------------------------------------------------------------------------------------------------------
# Model
#------------------------------------------------------------------------------------------------------------
class Model(QtCore.QAbstractTableModel):
def __init__(self,delegate=None,parent=None):
super(Model,self).__init__(parent)
#thread
self.working=False
self.thread=QtCore.QThread()
self.worker=Worker()
self.worker.moveToThread(self.thread)
self.thread.started.connect(self.worker.mainLoop)
self.thread.finished.connect(self.deleteLater)
self.thread.start()
self.worker.folderDone.connect(self.icon_done)
self.worker.finished.connect(self.thread.quit)
#list of current experiments to display
self.experimentList=[]
#Delegate
self.delegate=delegate
def rowCount(self,QModelIndex):
return len(self.experimentList)
def columnCount(self,QModelIndex):
return 4
def icon_done(self,row):
idx=self.index(row,3)
self.dataChanged.emit(idx,idx)
def reset_list(self,expList):
self.beginResetModel()
expList.sort()
self.experimentList=expList[:]
self.reset_horizontal_lines()
self.worker.requestMethod("icon_folder",self.experimentList)
self.endResetModel()
#To draw horizontal line according to date
def reset_horizontal_lines(self):
listDate=[exp.dateTime for exp in self.experimentList]
self.delegate.reset_horizontal_lines(listDate)
def clear(self):
self.beginResetModel()
self.experimentList=[]
self.endResetModel()
def data(self,index,role):
col=index.column()
row=index.row()
if role==QtCore.Qt.DisplayRole:
if col==0:
return self.experimentList[row].yearMonth
if col==1:
return self.experimentList[row].day
if col==2:
return self.experimentList[row].time
if col==3:
return self.experimentList[row].folderName
if role==QtCore.Qt.DecorationRole:
if col==3:
path=os.path.join(os.path.dirname(os.path.realpath(__file__)), '../icons/')
path=os.path.realpath(path)+"/"
return QtGui.QIcon(path+self.experimentList[row].folder.icon)
def flags(self,index):
if index.column()==3:
return QtCore.Qt.ItemIsEnabled|QtCore.Qt.ItemIsSelectable
return QtCore.Qt.NoItemFlags
def pathLocal_from_index(self,index):
exp=self.experimentList[index.row()]
return exp.pathLocal
def createFiles_onSelection(self,selection,prmModel,prbModel):
for index in selection:
self.experimentList[index.row()].create_files(prmModel=prmModel,prbModel=prbModel)
self.experimentList[index.row()].reset_folder_icon()
self.dataChanged.emit(selection[0],selection[-1])
def update_exp(self,exp):
if exp in self.experimentList:
row=self.experimentList.index(exp)
index=self.index(row,3)
self.dataChanged.emit(index,index)
#--------------------------------------------------------------------------------------------------------------
# FileBrowser Widget
#--------------------------------------------------------------------------------------------------------------
class FileBrowser(QtGui.QWidget):
def __init__(self,ROOT,parent=None):
super(FileBrowser,self).__init__(parent)
#Combo Box
self.animalComboBox=QtGui.QComboBox()
#model/view
self.delegate=TableDelegate(self)
self.model=Model(self.delegate,self)
self.view=FolderView(self.model,self)
self.model.worker.valueChanged.connect(self.display_load)
self.view.table.setItemDelegate(self.delegate)
#button
pathIcon=os.path.join(os.path.dirname(os.path.realpath(__file__)), '../icons/downarrow.png')
pathIcon=os.path.realpath(pathIcon)
self.button_add=QtGui.QPushButton(QtGui.QIcon(pathIcon)," ")
self.button_createFiles=QtGui.QPushButton("Create prm/prb")
self.button_createFiles.clicked.connect(self.createFiles)
self.button_createFiles.setEnabled(False)
self.button_loadModels=QtGui.QPushButton("Load models")
self.button_loadModels.clicked.connect(self.loadModels)
#label
labelPath=ROOT+os.sep
if len(labelPath)>20:
labelPath="..."+labelPath[-17:]
self.label_path=QtGui.QLabel(labelPath)
self.label_load=QtGui.QLabel(' ')
self.label_prmModel=QtGui.QLabel('no prm model')
self.label_prbModel=QtGui.QLabel('no prb model')
self.prmModel=QtCore.QFileInfo()
self.prbModel=QtCore.QFileInfo()
#Layout
hbox1=QtGui.QHBoxLayout()
hbox1.addWidget(self.label_path)
hbox1.addWidget(self.animalComboBox)
hbox1.addStretch()
hbox2=QtGui.QHBoxLayout()
hbox2.addWidget(self.view.label_hide)
hbox2.addWidget(self.view.edit_hide)
grid=QtGui.QHBoxLayout()
grid.addWidget(self.button_add)
grid.addWidget(self.button_loadModels)
grid.addWidget(self.label_prmModel)
grid.addWidget(self.label_prbModel)
grid.addWidget(self.button_createFiles)
grid.addWidget(self.label_load)
layout=QtGui.QGridLayout()
layout.addLayout(hbox1,1,1)
layout.addLayout(hbox2,1,2)
layout.addWidget(self.view,2,1,4,2)
layout.addLayout(grid,6,1,1,2)
self.setLayout(layout)
def set_animalComboBox(self,animalList):
for animalID in animalList:
self.animalComboBox.addItem(animalID)
def get_experiment_selection(self):
return self.view.table.selectedIndexes()
def createFiles(self):
if self.prmModel.exists() and self.prbModel.exists():
selection=self.get_experiment_selection()
self.model.createFiles_onSelection(selection,prmModel=self.prmModel,prbModel=self.prbModel)
self.view.refresh()
def loadModels(self):
filebox=QtGui.QFileDialog(self,"Load model for PRB and PRM files")
filebox.setFileMode(QtGui.QFileDialog.ExistingFiles)
filebox.setNameFilters(["PRB/PRM (*.prm *.prb)"])
filebox.setOptions(QtGui.QFileDialog.DontUseNativeDialog)
if filebox.exec_():
for selectedFile in filebox.selectedFiles():
if selectedFile.endswith(".prm"):
self.prmModel.setFile(selectedFile)
self.label_prmModel.setText(self.prmModel.fileName())
elif selectedFile.endswith(".prb"):
self.prbModel.setFile(selectedFile)
self.label_prbModel.setText(self.prbModel.fileName())
if self.prmModel.exists() and self.prbModel.exists():
self.button_createFiles.setEnabled(True)
def display_load(self,i):
percentage=str(i)+'%'
if i==100:
self.label_load.setText("")
else:
self.label_load.setText("Loading icons: "+percentage)
def reset_experimentList(self,experimentInfoList):
self.model.reset_list(experimentInfoList)
self.view.reset_view()
def on_close(self):
self.model.worker.abort()
#self.model.thread.wait()
|
simone/django-gb
|
refs/heads/master
|
django/contrib/admin/exceptions.py
|
95
|
from django.core.exceptions import SuspiciousOperation
class DisallowedModelAdminLookup(SuspiciousOperation):
"""Invalid filter was passed to admin view via URL querystring"""
pass
|
anirajk/RAMCloud
|
refs/heads/master
|
scripts/repostats.py
|
20
|
#!/usr/bin/env python
# Copyright (c) 2011 Stanford University
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR(S) DISCLAIM ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL AUTHORS BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Uploads a report to dumpstr with statistics about the git repo."""
from __future__ import division, print_function
from collections import defaultdict
import random
import re
import os
import subprocess
import sys
from common import captureSh, getDumpstr
from ordereddict import OrderedDict
# Utilities
def first(x): return x[0]
def second(x): return x[1]
def seq_to_freq(seq):
"""Given a list of things, count how many times each one occurs in the
list.
Returns these counts as a dictionary.
"""
freq = defaultdict(int)
for x in seq:
freq[x] += 1
return freq
class FileType:
"""Methods for classifying files as different types."""
PATTERNS = [
('script', '^scripts/'),
('misc', '^bindings/'),
('misc', '^misc/'),
('script', '^hooks/'),
('script', '\.py$'),
('script', '\.sh$'),
('script', '\.bash$'),
('test', 'Test\.cc$'),
('source', '^src/.*\.h$'),
('source', '^src/.*\.cc$'),
('misc', '.*')
]
ALL_FILETYPES = sorted(set(map(first, PATTERNS)))
@classmethod
def get(cls, filename):
"""Given a filename, return its type."""
for filetype, pattern in cls.PATTERNS:
if re.search(pattern, filename) is not None:
return filetype
@classmethod
def make_filetype_to_int_map(cls):
"""Return a dict mapping from all possible file types to 0."""
return OrderedDict([(filetype, 0) for filetype in cls.ALL_FILETYPES])
class Author:
"""Methods for dealing with author names."""
REMAPPING = {
'ankitak': 'Ankita Kejriwal',
}
@classmethod
def get(cls, author_name):
"""Given an author name from git, return the author name to use.
The returned value is often the same as the input, but occasionally
people will use multiple aliases that need to be collapsed down.
"""
try:
return cls.REMAPPING[author_name]
except KeyError:
return author_name
def blame(filename):
"""Run git blame on a file and return a mapping from each author to the
number of lines of code each author accounts for."""
try:
output = captureSh('git blame -p %s' % filename,
stderr=subprocess.PIPE)
except subprocess.CalledProcessError:
# may be a submodule
print('Warning: Skipping', filename, '- could not git blame')
return {}
authors = {}
current_commit = None
commit_to_author = {}
commit_to_num_lines = defaultdict(int)
num_lines = None
for line in output.split('\n'):
m = re.search('^([0-9a-f]{40}) \d+ \d+ (\d+)$', line)
if m is not None:
current_commit = m.group(1)
num_lines = int(m.group(2))
commit_to_num_lines[current_commit] += num_lines
continue
m = re.search('^author (.*)$', line)
if m is not None:
author = Author.get(m.group(1))
commit_to_author[current_commit] = author
continue
author_to_num_lines = defaultdict(int)
for commit, num_lines in commit_to_num_lines.items():
author = commit_to_author[commit]
author_to_num_lines[author] += num_lines
return author_to_num_lines
def print_and_upload_report(report, trends):
"""Dump the report to the screen and upload it."""
dumpstr = getDumpstr()
dumpstr.print_report(report)
s = dumpstr.upload_report('gitrepo', report, trends)
print('You can view your report at %s' % s['url'])
def get_commits_by_author():
"""Return a map from author to the number of commits that author has
made."""
output = captureSh('git log --pretty=format:%an').split('\n')
return seq_to_freq(map(Author.get, output))
if __name__ == '__main__':
# A complete list of files in the repo.
files = captureSh('git ls-files').split('\n')
# Uncomment this during testing to skip about 90% of the files and make the
# script run faster.
#files = filter(lambda x: random.randrange(10) == 0, files)
blame_data = dict([(filename, blame(filename))
for filename in files])
commits_by_author = get_commits_by_author()
num_commits = sum(commits_by_author.values())
report = []
trends = []
summary_lines = []
report.append({'key': 'Summary', 'lines': summary_lines})
# Number of files, broken up by file type
filetype_to_num_files = defaultdict(int)
for filename in files:
filetype = FileType.get(filename)
filetype_to_num_files[filetype] += 1
summary_lines.append({
'key': 'Number of files',
'summary': ['% 6d total' % len(files),
'% 6d source' % filetype_to_num_files['source']],
'points': sorted(filetype_to_num_files.items()),
'unit': ''
})
trends.append(('repo_num_files', len(files)))
trends.append(('repo_num_source_files', filetype_to_num_files['source']))
# Lines of code, broken up by file type
filetype_to_loc = defaultdict(int)
for filename, author_to_num_lines in blame_data.items():
filetype = FileType.get(filename)
loc = sum(author_to_num_lines.values())
filetype_to_loc[filetype] += loc
summary_lines.append({
'key': 'Lines of code',
'summary': ['% 6d total' % sum(filetype_to_loc.values()),
'% 6d source' % filetype_to_loc['source']],
'points': sorted(filetype_to_loc.items()),
'unit': ''
})
trends.append(('repo_loc', sum(filetype_to_loc.values())))
trends.append(('repo_source_loc', filetype_to_loc['source']))
# Number of commits
summary_lines.append({
'key': 'Number of commits',
'summary': '% 6d' % num_commits,
'points': num_commits,
'unit': ''
})
trends.append(('repo_num_commits', num_commits))
# Lines of code by author, broken up by file type
# map from author to (map from type to lines of code)
loc_by_author_type = defaultdict(FileType.make_filetype_to_int_map)
for filename, author_to_num_lines in blame_data.items():
filetype = FileType.get(filename)
for author, num_lines in author_to_num_lines.items():
loc_by_author_type[author][filetype] += num_lines
loc_by_author_lines = []
report.append({'key': 'Lines of code by author',
'lines': loc_by_author_lines})
for author, loc_by_type in sorted(loc_by_author_type.items(),
key=lambda x: sum(x[1].values()),
reverse=True):
loc_by_author_lines.append({
'key': author,
'summary': ['% 6d total' % sum(loc_by_type.values()),
'% 6d source' % loc_by_type['source']],
'points': loc_by_type.items(),
'unit': ''
})
# Number of commits by author
commits_by_author_lines = []
report.append({'key': 'Commits by author',
'lines': commits_by_author_lines})
for author, count in sorted(commits_by_author.items(),
key=second, reverse=True):
commits_by_author_lines.append({'key': author,
'summary': count,
'points': count,
'unit': ''})
print_and_upload_report(report, trends)
|
codeparticle/Tenable.io-SDK-for-Python
|
refs/heads/master
|
tests/integration/test_client.py
|
1
|
from tenable_io.client import TenableIOClient
from tenable_io.exceptions import TenableIOApiException, TenableIOErrorCode
from tests.base import BaseTest
class TestTenableIOClient(BaseTest):
def test_client_bad_keys(self):
try:
TenableIOClient('bad', 'key').session_api.get()
assert False, u'TenableIOApiException should be raised for bad api and secret keys.'
except TenableIOApiException as e:
assert e.code is TenableIOErrorCode.UNAUTHORIZED, u'Appropriate exception is raised.'
|
redhat-openstack/glance
|
refs/heads/f22-patches
|
glance/db/sqlalchemy/models.py
|
1
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SQLAlchemy models for glance data
"""
import uuid
from oslo.db.sqlalchemy import models
from sqlalchemy import BigInteger
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import ForeignKey
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy.orm import backref, relationship
from sqlalchemy import String
from sqlalchemy import Text
from sqlalchemy.types import TypeDecorator
from sqlalchemy import UniqueConstraint
from glance.openstack.common import jsonutils
from glance.openstack.common import timeutils
BASE = declarative_base()
@compiles(BigInteger, 'sqlite')
def compile_big_int_sqlite(type_, compiler, **kw):
return 'INTEGER'
class JSONEncodedDict(TypeDecorator):
"""Represents an immutable structure as a json-encoded string"""
impl = Text
def process_bind_param(self, value, dialect):
if value is not None:
value = jsonutils.dumps(value)
return value
def process_result_value(self, value, dialect):
if value is not None:
value = jsonutils.loads(value)
return value
class GlanceBase(models.ModelBase, models.TimestampMixin):
"""Base class for Glance Models."""
__table_args__ = {'mysql_engine': 'InnoDB'}
__table_initialized__ = False
__protected_attributes__ = set([
"created_at", "updated_at", "deleted_at", "deleted"])
def save(self, session=None):
from glance.db.sqlalchemy import api as db_api
super(GlanceBase, self).save(session or db_api.get_session())
created_at = Column(DateTime, default=lambda: timeutils.utcnow(),
nullable=False)
# TODO(vsergeyev): Column `updated_at` have no default value in
# openstack common code. We should decide, is this value
# required and make changes in oslo (if required) or
# in glance (if not).
updated_at = Column(DateTime, default=lambda: timeutils.utcnow(),
nullable=False, onupdate=lambda: timeutils.utcnow())
# TODO(boris-42): Use SoftDeleteMixin instead of deleted Column after
# migration that provides UniqueConstraints and change
# type of this column.
deleted_at = Column(DateTime)
deleted = Column(Boolean, nullable=False, default=False)
def delete(self, session=None):
"""Delete this object."""
self.deleted = True
self.deleted_at = timeutils.utcnow()
self.save(session=session)
def keys(self):
return self.__dict__.keys()
def values(self):
return self.__dict__.values()
def items(self):
return self.__dict__.items()
def to_dict(self):
d = self.__dict__.copy()
# NOTE(flaper87): Remove
# private state instance
# It is not serializable
# and causes CircularReference
d.pop("_sa_instance_state")
return d
class Image(BASE, GlanceBase):
"""Represents an image in the datastore."""
__tablename__ = 'images'
__table_args__ = (Index('checksum_image_idx', 'checksum'),
Index('ix_images_is_public', 'is_public'),
Index('ix_images_deleted', 'deleted'),
Index('owner_image_idx', 'owner'),)
id = Column(String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
name = Column(String(255))
disk_format = Column(String(20))
container_format = Column(String(20))
size = Column(BigInteger)
virtual_size = Column(BigInteger)
status = Column(String(30), nullable=False)
is_public = Column(Boolean, nullable=False, default=False)
checksum = Column(String(32))
min_disk = Column(Integer, nullable=False, default=0)
min_ram = Column(Integer, nullable=False, default=0)
owner = Column(String(255))
protected = Column(Boolean, nullable=False, default=False)
class ImageProperty(BASE, GlanceBase):
"""Represents an image properties in the datastore."""
__tablename__ = 'image_properties'
__table_args__ = (Index('ix_image_properties_image_id', 'image_id'),
Index('ix_image_properties_deleted', 'deleted'),
UniqueConstraint('image_id',
'name',
name='ix_image_properties_'
'image_id_name'),)
id = Column(Integer, primary_key=True)
image_id = Column(String(36), ForeignKey('images.id'),
nullable=False)
image = relationship(Image, backref=backref('properties'))
name = Column(String(255), nullable=False)
value = Column(Text)
class ImageTag(BASE, GlanceBase):
"""Represents an image tag in the datastore."""
__tablename__ = 'image_tags'
__table_args__ = (Index('ix_image_tags_image_id', 'image_id'),
Index('ix_image_tags_image_id_tag_value',
'image_id',
'value'),)
id = Column(Integer, primary_key=True, nullable=False)
image_id = Column(String(36), ForeignKey('images.id'), nullable=False)
image = relationship(Image, backref=backref('tags'))
value = Column(String(255), nullable=False)
class ImageLocation(BASE, GlanceBase):
"""Represents an image location in the datastore."""
__tablename__ = 'image_locations'
__table_args__ = (Index('ix_image_locations_image_id', 'image_id'),
Index('ix_image_locations_deleted', 'deleted'),)
id = Column(Integer, primary_key=True, nullable=False)
image_id = Column(String(36), ForeignKey('images.id'), nullable=False)
image = relationship(Image, backref=backref('locations'))
value = Column(Text(), nullable=False)
meta_data = Column(JSONEncodedDict(), default={})
status = Column(String(30), default='active', nullable=False)
class ImageMember(BASE, GlanceBase):
"""Represents an image members in the datastore."""
__tablename__ = 'image_members'
unique_constraint_key_name = 'image_members_image_id_member_deleted_at_key'
__table_args__ = (Index('ix_image_members_deleted', 'deleted'),
Index('ix_image_members_image_id', 'image_id'),
Index('ix_image_members_image_id_member',
'image_id',
'member'),
UniqueConstraint('image_id',
'member',
'deleted_at',
name=unique_constraint_key_name),)
id = Column(Integer, primary_key=True)
image_id = Column(String(36), ForeignKey('images.id'),
nullable=False)
image = relationship(Image, backref=backref('members'))
member = Column(String(255), nullable=False)
can_share = Column(Boolean, nullable=False, default=False)
status = Column(String(20), nullable=False, default="pending")
class Task(BASE, GlanceBase):
"""Represents an task in the datastore"""
__tablename__ = 'tasks'
__table_args__ = (Index('ix_tasks_type', 'type'),
Index('ix_tasks_status', 'status'),
Index('ix_tasks_owner', 'owner'),
Index('ix_tasks_deleted', 'deleted'),
Index('ix_tasks_updated_at', 'updated_at'))
id = Column(String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
type = Column(String(30))
status = Column(String(30))
owner = Column(String(255), nullable=False)
expires_at = Column(DateTime, nullable=True)
class TaskInfo(BASE, models.ModelBase):
"""Represents task info in the datastore"""
__tablename__ = 'task_info'
task_id = Column(String(36),
ForeignKey('tasks.id'),
primary_key=True,
nullable=False)
task = relationship(Task, backref=backref('info', uselist=False))
#NOTE(nikhil): input and result are stored as text in the DB.
# SQLAlchemy marshals the data to/from JSON using custom type
# JSONEncodedDict. It uses simplejson underneath.
input = Column(JSONEncodedDict())
result = Column(JSONEncodedDict())
message = Column(Text)
def register_models(engine):
"""Create database tables for all models with the given engine."""
models = (Image, ImageProperty, ImageMember)
for model in models:
model.metadata.create_all(engine)
def unregister_models(engine):
"""Drop database tables for all models with the given engine."""
models = (Image, ImageProperty)
for model in models:
model.metadata.drop_all(engine)
|
laurenrevere/osf.io
|
refs/heads/develop
|
osf_tests/test_guid_auto_include.py
|
20
|
from django.utils import timezone
import pytest
from django_bulk_update.helper import bulk_update
from django.db.models import DateTimeField
from osf_tests.factories import UserFactory, PreprintFactory, NodeFactory
@pytest.mark.django_db
class TestGuidAutoInclude:
guid_factories = [
UserFactory,
PreprintFactory,
NodeFactory
]
@pytest.mark.parametrize('Factory', guid_factories)
def test_filter_object(self, Factory):
obj = Factory()
assert '__guids' in str(obj._meta.model.objects.filter(id=obj.id).query), 'Guids were not included in filter query for {}'.format(obj._meta.model.__name__)
@pytest.mark.parametrize('Factory', guid_factories)
@pytest.mark.django_assert_num_queries
def test_all(self, Factory, django_assert_num_queries):
for _ in range(0, 5):
UserFactory()
with django_assert_num_queries(1):
wut = Factory._meta.model.objects.all()
for x in wut:
assert x._id is not None, 'Guid was None'
@pytest.mark.parametrize('Factory', guid_factories)
@pytest.mark.django_assert_num_queries
def test_filter(self, Factory, django_assert_num_queries):
objects = []
for _ in range(0, 5):
objects.append(Factory())
new_ids = [o.id for o in objects]
with django_assert_num_queries(1):
wut = Factory._meta.model.objects.filter(id__in=new_ids)
for x in wut:
assert x._id is not None, 'Guid was None'
@pytest.mark.parametrize('Factory', guid_factories)
@pytest.mark.django_assert_num_queries
def test_filter_order_by(self, Factory, django_assert_num_queries):
objects = []
for _ in range(0, 5):
objects.append(Factory())
new_ids = [o.id for o in objects]
with django_assert_num_queries(1):
wut = Factory._meta.model.objects.filter(id__in=new_ids).order_by('id')
for x in wut:
assert x._id is not None, 'Guid was None'
@pytest.mark.parametrize('Factory', guid_factories)
@pytest.mark.django_assert_num_queries
def test_values(self, Factory, django_assert_num_queries):
objects = []
for _ in range(0, 5):
objects.append(Factory())
with django_assert_num_queries(1):
wut = Factory._meta.model.objects.values('id')
for x in wut:
assert len(x) == 1, 'Too many keys in values'
@pytest.mark.parametrize('Factory', guid_factories)
@pytest.mark.django_assert_num_queries
def test_exclude(self, Factory, django_assert_num_queries):
objects = []
for _ in range(0, 5):
objects.append(Factory())
try:
dtfield = [x.name for x in objects[0]._meta.get_fields() if isinstance(x, DateTimeField)][0]
except IndexError:
pytest.skip('Thing doesn\'t have a DateTimeField')
with django_assert_num_queries(1):
wut = Factory._meta.model.objects.exclude(**{dtfield: timezone.now()})
for x in wut:
assert x._id is not None, 'Guid was None'
@pytest.mark.parametrize('Factory', guid_factories)
def test_update_objects(self, Factory):
objects = []
for _ in range(0, 5):
objects.append(Factory())
new_ids = [o.id for o in objects]
try:
dtfield = [x.name for x in objects[0]._meta.get_fields() if isinstance(x, DateTimeField)][0]
except IndexError:
pytest.skip('Thing doesn\'t have a DateTimeField')
qs = objects[0]._meta.model.objects.filter(id__in=new_ids)
assert len(qs) > 0, 'No results returned'
try:
qs.update(**{dtfield: timezone.now()})
except Exception as ex:
pytest.fail('Queryset update failed for {} with exception {}'.format(Factory._meta.model.__name__, ex))
@pytest.mark.parametrize('Factory', guid_factories)
def test_update_on_objects_filtered_by_guids(self, Factory):
objects = []
for _ in range(0, 5):
objects.append(Factory())
new__ids = [o._id for o in objects]
try:
dtfield = [x.name for x in objects[0]._meta.get_fields() if isinstance(x, DateTimeField)][0]
except IndexError:
pytest.skip('Thing doesn\'t have a DateTimeField')
qs = objects[0]._meta.model.objects.filter(guids___id__in=new__ids)
assert len(qs) > 0, 'No results returned'
try:
qs.update(**{dtfield: timezone.now()})
except Exception as ex:
pytest.fail('Queryset update failed for {} with exception {}'.format(Factory._meta.model.__name__, ex))
@pytest.mark.parametrize('Factory', guid_factories)
@pytest.mark.django_assert_num_queries
def test_related_manager(self, Factory, django_assert_num_queries):
thing_with_contributors = Factory()
if not hasattr(thing_with_contributors, 'contributors'):
pytest.skip('Thing must have contributors')
try:
with django_assert_num_queries(1):
[x._id for x in thing_with_contributors.contributors.all()]
except Exception as ex:
pytest.fail('Related manager failed for {} with exception {}'.format(Factory._meta.model.__name__, ex))
@pytest.mark.parametrize('Factory', guid_factories)
@pytest.mark.django_assert_num_queries
def test_count_objects(self, Factory, django_assert_num_queries):
objects = []
for _ in range(0, 5):
objects.append(Factory())
new_ids = [o.id for o in objects]
with django_assert_num_queries(1):
qs = objects[0]._meta.model.objects.filter(id__in=new_ids)
count = qs.count()
assert count == len(objects)
@pytest.mark.parametrize('Factory', guid_factories)
@pytest.mark.django_assert_num_queries
def test_bulk_create_objects(self, Factory, django_assert_num_queries):
objects = []
Model = Factory._meta.model
kwargs = {}
if Factory == PreprintFactory:
# Don't try to save preprints on build when neither the subject nor provider have been saved
kwargs['finish'] = False
for _ in range(0, 5):
objects.append(Factory.build(**kwargs))
with django_assert_num_queries(1):
things = Model.objects.bulk_create(objects)
assert len(things) == len(objects)
@pytest.mark.parametrize('Factory', guid_factories)
@pytest.mark.django_assert_num_queries
def test_bulk_update_objects(self, Factory, django_assert_num_queries):
objects = []
ids = range(0, 5)
for id in ids:
objects.append(Factory())
try:
dtfield = [x.name for x in objects[0]._meta.get_fields() if isinstance(x, DateTimeField)][0]
except IndexError:
pytest.skip('Thing doesn\'t have a DateTimeField')
for obj in objects:
setattr(obj, dtfield, timezone.now())
with django_assert_num_queries(1):
bulk_update(objects)
|
kayhayen/Nuitka
|
refs/heads/develop
|
tests/programs/package_missing_init/PackageMissingInitMain.py
|
1
|
# Copyright 2021, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python tests originally created or extracted from other peoples work. The
# parts were too small to be protected.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import some_package.some_module
import some_package.sub_package.some_sub_module
print(some_package.__package__)
print(some_package.sub_package.__package__)
|
product-definition-center/product-definition-center
|
refs/heads/master
|
pdc/apps/releaseschedule/__init__.py
|
2
|
#
# Copyright (c) 2017 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
default_app_config = 'pdc.apps.releaseschedule.apps.ReleaseScheduleConfig'
|
vvv1559/intellij-community
|
refs/heads/master
|
python/testData/completion/isInstanceTuple.after.py
|
83
|
class Foo:
def test(self): pass
class Foo2:
def test(self): pass
def x(p):
if isinstance(p, (Foo, Foo2)):
p.test()
|
bbci/pyff
|
refs/heads/master
|
src/Feedbacks/Tutorial/Lesson04.py
|
3
|
# Lesson04 - Reacting on control- and interaction events
# Copyright (C) 2007-2009 Bastian Venthur
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from FeedbackBase.Feedback import Feedback
class Lesson04(Feedback):
def on_init(self):
self.myVariable = None
self.eegTuple = None
def on_interaction_event(self, data):
# this one is equivalent to:
# self.myVariable = self._someVariable
self.myVariable = data.get("someVariable")
print self.myVariable
def on_control_event(self, data):
# this one is equivalent to:
# self.eegTuple = self._data
self.eegTuple = data
print self.eegTuple
|
jmhodges/letsencrypt
|
refs/heads/master
|
letsencrypt/tests/cli_test.py
|
14
|
"""Tests for letsencrypt.cli."""
import itertools
import os
import shutil
import traceback
import tempfile
import unittest
import mock
from letsencrypt import account
from letsencrypt import configuration
from letsencrypt import errors
class CLITest(unittest.TestCase):
"""Tests for different commands."""
def setUp(self):
self.tmp_dir = tempfile.mkdtemp()
self.config_dir = os.path.join(self.tmp_dir, 'config')
self.work_dir = os.path.join(self.tmp_dir, 'work')
self.logs_dir = os.path.join(self.tmp_dir, 'logs')
def tearDown(self):
shutil.rmtree(self.tmp_dir)
def _call(self, args):
from letsencrypt import cli
args = ['--text', '--config-dir', self.config_dir,
'--work-dir', self.work_dir, '--logs-dir', self.logs_dir,
'--agree-eula'] + args
with mock.patch('letsencrypt.cli.sys.stdout') as stdout:
with mock.patch('letsencrypt.cli.sys.stderr') as stderr:
with mock.patch('letsencrypt.cli.client') as client:
ret = cli.main(args)
return ret, stdout, stderr, client
def test_no_flags(self):
self.assertRaises(SystemExit, self._call, [])
def test_help(self):
self.assertRaises(SystemExit, self._call, ['--help'])
self.assertRaises(SystemExit, self._call, ['--help all'])
def test_rollback(self):
_, _, _, client = self._call(['rollback'])
self.assertEqual(1, client.rollback.call_count)
_, _, _, client = self._call(['rollback', '--checkpoints', '123'])
client.rollback.assert_called_once_with(
mock.ANY, 123, mock.ANY, mock.ANY)
def test_config_changes(self):
_, _, _, client = self._call(['config_changes'])
self.assertEqual(1, client.view_config_changes.call_count)
def test_plugins(self):
flags = ['--init', '--prepare', '--authenticators', '--installers']
for args in itertools.chain(
*(itertools.combinations(flags, r)
for r in xrange(len(flags)))):
self._call(['plugins',] + list(args))
@mock.patch("letsencrypt.cli.sys")
def test_handle_exception(self, mock_sys):
# pylint: disable=protected-access
from letsencrypt import cli
mock_open = mock.mock_open()
with mock.patch("letsencrypt.cli.open", mock_open, create=True):
exception = Exception("detail")
cli._handle_exception(
Exception, exc_value=exception, trace=None, args=None)
mock_open().write.assert_called_once_with("".join(
traceback.format_exception_only(Exception, exception)))
error_msg = mock_sys.exit.call_args_list[0][0][0]
self.assertTrue("unexpected error" in error_msg)
with mock.patch("letsencrypt.cli.open", mock_open, create=True):
mock_open.side_effect = [KeyboardInterrupt]
error = errors.Error("detail")
cli._handle_exception(
errors.Error, exc_value=error, trace=None, args=None)
# assert_any_call used because sys.exit doesn't exit in cli.py
mock_sys.exit.assert_any_call("".join(
traceback.format_exception_only(errors.Error, error)))
args = mock.MagicMock(debug=False)
cli._handle_exception(
Exception, exc_value=Exception("detail"), trace=None, args=args)
error_msg = mock_sys.exit.call_args_list[-1][0][0]
self.assertTrue("unexpected error" in error_msg)
interrupt = KeyboardInterrupt("detail")
cli._handle_exception(
KeyboardInterrupt, exc_value=interrupt, trace=None, args=None)
mock_sys.exit.assert_called_with("".join(
traceback.format_exception_only(KeyboardInterrupt, interrupt)))
class DetermineAccountTest(unittest.TestCase):
"""Tests for letsencrypt.cli._determine_account."""
def setUp(self):
self.args = mock.MagicMock(account=None, email=None)
self.config = configuration.NamespaceConfig(self.args)
self.accs = [mock.MagicMock(id="x"), mock.MagicMock(id="y")]
self.account_storage = account.AccountMemoryStorage()
def _call(self):
# pylint: disable=protected-access
from letsencrypt.cli import _determine_account
with mock.patch("letsencrypt.cli.account.AccountFileStorage") as mock_storage:
mock_storage.return_value = self.account_storage
return _determine_account(self.args, self.config)
def test_args_account_set(self):
self.account_storage.save(self.accs[1])
self.args.account = self.accs[1].id
self.assertEqual((self.accs[1], None), self._call())
self.assertEqual(self.accs[1].id, self.args.account)
self.assertTrue(self.args.email is None)
def test_single_account(self):
self.account_storage.save(self.accs[0])
self.assertEqual((self.accs[0], None), self._call())
self.assertEqual(self.accs[0].id, self.args.account)
self.assertTrue(self.args.email is None)
@mock.patch("letsencrypt.client.display_ops.choose_account")
def test_multiple_accounts(self, mock_choose_accounts):
for acc in self.accs:
self.account_storage.save(acc)
mock_choose_accounts.return_value = self.accs[1]
self.assertEqual((self.accs[1], None), self._call())
self.assertEqual(
set(mock_choose_accounts.call_args[0][0]), set(self.accs))
self.assertEqual(self.accs[1].id, self.args.account)
self.assertTrue(self.args.email is None)
@mock.patch("letsencrypt.client.display_ops.get_email")
def test_no_accounts_no_email(self, mock_get_email):
mock_get_email.return_value = "foo@bar.baz"
with mock.patch("letsencrypt.cli.client") as client:
client.register.return_value = (
self.accs[0], mock.sentinel.acme)
self.assertEqual((self.accs[0], mock.sentinel.acme), self._call())
client.register.assert_called_once_with(
self.config, self.account_storage, tos_cb=mock.ANY)
self.assertEqual(self.accs[0].id, self.args.account)
self.assertEqual("foo@bar.baz", self.args.email)
def test_no_accounts_email(self):
self.args.email = "other email"
with mock.patch("letsencrypt.cli.client") as client:
client.register.return_value = (self.accs[1], mock.sentinel.acme)
self._call()
self.assertEqual(self.accs[1].id, self.args.account)
self.assertEqual("other email", self.args.email)
if __name__ == '__main__':
unittest.main() # pragma: no cover
|
fqez/JdeRobot
|
refs/heads/master
|
src/drivers/MAVLinkServer/MAVProxy/modules/lib/wxsettings_ui.py
|
8
|
from wx_loader import wx
class TabbedDialog(wx.Dialog):
def __init__(self, tab_names, title='Title', size=wx.DefaultSize):
wx.Dialog.__init__(self, None, -1, title,
style=wx.DEFAULT_DIALOG_STYLE|wx.RESIZE_BORDER)
self.tab_names = tab_names
self.notebook = wx.Notebook(self, -1, size=size)
self.panels = {}
self.sizers = {}
for t in tab_names:
self.panels[t] = wx.Panel(self.notebook)
self.notebook.AddPage(self.panels[t], t)
self.sizers[t] = wx.BoxSizer(wx.VERTICAL)
self.panels[t].SetSizer(self.sizers[t])
self.dialog_sizer = wx.BoxSizer(wx.VERTICAL)
self.dialog_sizer.Add(self.notebook, 1, wx.EXPAND|wx.ALL, 5)
self.controls = {}
self.browse_option_map = {}
self.control_map = {}
self.setting_map = {}
button_box = wx.BoxSizer(wx.HORIZONTAL)
self.button_apply = wx.Button(self, -1, "Apply")
self.button_cancel = wx.Button(self, -1, "Cancel")
self.button_save = wx.Button(self, -1, "Save")
self.button_load = wx.Button(self, -1, "Load")
button_box.Add(self.button_cancel, 0, wx.ALL)
button_box.Add(self.button_apply, 0, wx.ALL)
button_box.Add(self.button_save, 0, wx.ALL)
button_box.Add(self.button_load, 0, wx.ALL)
self.dialog_sizer.Add(button_box, 0, wx.GROW|wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5)
wx.EVT_BUTTON(self, self.button_cancel.GetId(), self.on_cancel)
wx.EVT_BUTTON(self, self.button_apply.GetId(), self.on_apply)
wx.EVT_BUTTON(self, self.button_save.GetId(), self.on_save)
wx.EVT_BUTTON(self, self.button_load.GetId(), self.on_load)
self.Centre()
def on_cancel(self, event):
'''called on cancel'''
self.Destroy()
def on_apply(self, event):
'''called on apply'''
for label in self.setting_map.keys():
setting = self.setting_map[label]
ctrl = self.controls[label]
value = ctrl.GetValue()
if str(value) != str(setting.value):
oldvalue = setting.value
if not setting.set(value):
print("Invalid value %s for %s" % (value, setting.name))
continue
if str(oldvalue) != str(setting.value):
self.parent_pipe.send(setting)
def on_save(self, event):
'''called on save button'''
dlg = wx.FileDialog(None, self.settings.get_title(), '', "", '*.*',
wx.FD_SAVE | wx.FD_OVERWRITE_PROMPT)
if dlg.ShowModal() == wx.ID_OK:
self.settings.save(dlg.GetPath())
def on_load(self, event):
'''called on load button'''
dlg = wx.FileDialog(None, self.settings.get_title(), '', "", '*.*', wx.FD_OPEN)
if dlg.ShowModal() == wx.ID_OK:
self.settings.load(dlg.GetPath())
# update the controls with new values
for label in self.setting_map.keys():
setting = self.setting_map[label]
ctrl = self.controls[label]
value = ctrl.GetValue()
if isinstance(value, str) or isinstance(value, unicode):
ctrl.SetValue(str(setting.value))
else:
ctrl.SetValue(setting.value)
def panel(self, tab_name):
'''return the panel for a named tab'''
return self.panels[tab_name]
def sizer(self, tab_name):
'''return the sizer for a named tab'''
return self.sizers[tab_name]
def refit(self):
'''refit after elements are added'''
self.SetSizerAndFit(self.dialog_sizer)
def _add_input(self, setting, ctrl, ctrl2=None, value=None):
tab_name = setting.tab
label = setting.label
tab = self.panel(tab_name)
box = wx.BoxSizer(wx.HORIZONTAL)
labelctrl = wx.StaticText(tab, -1, label )
box.Add(labelctrl, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
box.Add( ctrl, 1, wx.ALIGN_CENTRE|wx.ALL, 5 )
if ctrl2 is not None:
box.Add( ctrl2, 0, wx.ALIGN_CENTRE|wx.ALL, 5 )
self.sizer(tab_name).Add(box, 0, wx.GROW|wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5)
self.controls[label] = ctrl
if value is not None:
ctrl.Value = value
else:
ctrl.Value = str(setting.value)
self.control_map[ctrl.GetId()] = label
self.setting_map[label] = setting
def add_text(self, setting, width=300, height=100, multiline=False):
'''add a text input line'''
tab = self.panel(setting.tab)
if multiline:
ctrl = wx.TextCtrl(tab, -1, "", size=(width,height), style=wx.TE_MULTILINE|wx.TE_PROCESS_ENTER)
else:
ctrl = wx.TextCtrl(tab, -1, "", size=(width,-1) )
self._add_input(setting, ctrl)
def add_choice(self, setting, choices):
'''add a choice input line'''
tab = self.panel(setting.tab)
default = setting.value
if default is None:
default = choices[0]
ctrl = wx.ComboBox(tab, -1, choices=choices,
value = str(default),
style = wx.CB_DROPDOWN | wx.CB_READONLY | wx.CB_SORT )
self._add_input(setting, ctrl)
def add_intspin(self, setting):
'''add a spin control'''
tab = self.panel(setting.tab)
default = setting.value
(minv, maxv) = setting.range
ctrl = wx.SpinCtrl(tab, -1,
initial = default,
min = minv,
max = maxv)
self._add_input(setting, ctrl, value=default)
def add_floatspin(self, setting):
'''add a floating point spin control'''
from wx.lib.agw.floatspin import FloatSpin
tab = self.panel(setting.tab)
default = setting.value
(minv, maxv) = setting.range
ctrl = FloatSpin(tab, -1,
value = default,
min_val = minv,
max_val = maxv,
increment = setting.increment)
if setting.format is not None:
ctrl.SetFormat(setting.format)
if setting.digits is not None:
ctrl.SetDigits(setting.digits)
self._add_input(setting, ctrl, value=default)
#----------------------------------------------------------------------
class SettingsDlg(TabbedDialog):
def __init__(self, settings):
title = "Resize the dialog and see how controls adapt!"
self.settings = settings
tabs = []
for k in self.settings.list():
setting = self.settings.get_setting(k)
tab = setting.tab
if tab is None:
tab = 'Settings'
if not tab in tabs:
tabs.append(tab)
title = self.settings.get_title()
if title is None:
title = 'Settings'
TabbedDialog.__init__(self, tabs, title)
for name in self.settings.list():
setting = self.settings.get_setting(name)
if setting.type == bool:
self.add_choice(setting, ['True', 'False'])
elif setting.choice is not None:
self.add_choice(setting, setting.choice)
elif setting.type == int and setting.increment is not None and setting.range is not None:
self.add_intspin(setting)
elif setting.type == float and setting.increment is not None and setting.range is not None:
self.add_floatspin(setting)
else:
self.add_text(setting)
self.refit()
|
elaske/mufund
|
refs/heads/develop
|
html5lib/treewalkers/lxmletree.py
|
618
|
from __future__ import absolute_import, division, unicode_literals
from six import text_type
from lxml import etree
from ..treebuilders.etree import tag_regexp
from gettext import gettext
_ = gettext
from . import _base
from .. import ihatexml
def ensure_str(s):
if s is None:
return None
elif isinstance(s, text_type):
return s
else:
return s.decode("utf-8", "strict")
class Root(object):
def __init__(self, et):
self.elementtree = et
self.children = []
if et.docinfo.internalDTD:
self.children.append(Doctype(self,
ensure_str(et.docinfo.root_name),
ensure_str(et.docinfo.public_id),
ensure_str(et.docinfo.system_url)))
root = et.getroot()
node = root
while node.getprevious() is not None:
node = node.getprevious()
while node is not None:
self.children.append(node)
node = node.getnext()
self.text = None
self.tail = None
def __getitem__(self, key):
return self.children[key]
def getnext(self):
return None
def __len__(self):
return 1
class Doctype(object):
def __init__(self, root_node, name, public_id, system_id):
self.root_node = root_node
self.name = name
self.public_id = public_id
self.system_id = system_id
self.text = None
self.tail = None
def getnext(self):
return self.root_node.children[1]
class FragmentRoot(Root):
def __init__(self, children):
self.children = [FragmentWrapper(self, child) for child in children]
self.text = self.tail = None
def getnext(self):
return None
class FragmentWrapper(object):
def __init__(self, fragment_root, obj):
self.root_node = fragment_root
self.obj = obj
if hasattr(self.obj, 'text'):
self.text = ensure_str(self.obj.text)
else:
self.text = None
if hasattr(self.obj, 'tail'):
self.tail = ensure_str(self.obj.tail)
else:
self.tail = None
def __getattr__(self, name):
return getattr(self.obj, name)
def getnext(self):
siblings = self.root_node.children
idx = siblings.index(self)
if idx < len(siblings) - 1:
return siblings[idx + 1]
else:
return None
def __getitem__(self, key):
return self.obj[key]
def __bool__(self):
return bool(self.obj)
def getparent(self):
return None
def __str__(self):
return str(self.obj)
def __unicode__(self):
return str(self.obj)
def __len__(self):
return len(self.obj)
class TreeWalker(_base.NonRecursiveTreeWalker):
def __init__(self, tree):
if hasattr(tree, "getroot"):
tree = Root(tree)
elif isinstance(tree, list):
tree = FragmentRoot(tree)
_base.NonRecursiveTreeWalker.__init__(self, tree)
self.filter = ihatexml.InfosetFilter()
def getNodeDetails(self, node):
if isinstance(node, tuple): # Text node
node, key = node
assert key in ("text", "tail"), _("Text nodes are text or tail, found %s") % key
return _base.TEXT, ensure_str(getattr(node, key))
elif isinstance(node, Root):
return (_base.DOCUMENT,)
elif isinstance(node, Doctype):
return _base.DOCTYPE, node.name, node.public_id, node.system_id
elif isinstance(node, FragmentWrapper) and not hasattr(node, "tag"):
return _base.TEXT, node.obj
elif node.tag == etree.Comment:
return _base.COMMENT, ensure_str(node.text)
elif node.tag == etree.Entity:
return _base.ENTITY, ensure_str(node.text)[1:-1] # strip &;
else:
# This is assumed to be an ordinary element
match = tag_regexp.match(ensure_str(node.tag))
if match:
namespace, tag = match.groups()
else:
namespace = None
tag = ensure_str(node.tag)
attrs = {}
for name, value in list(node.attrib.items()):
name = ensure_str(name)
value = ensure_str(value)
match = tag_regexp.match(name)
if match:
attrs[(match.group(1), match.group(2))] = value
else:
attrs[(None, name)] = value
return (_base.ELEMENT, namespace, self.filter.fromXmlName(tag),
attrs, len(node) > 0 or node.text)
def getFirstChild(self, node):
assert not isinstance(node, tuple), _("Text nodes have no children")
assert len(node) or node.text, "Node has no children"
if node.text:
return (node, "text")
else:
return node[0]
def getNextSibling(self, node):
if isinstance(node, tuple): # Text node
node, key = node
assert key in ("text", "tail"), _("Text nodes are text or tail, found %s") % key
if key == "text":
# XXX: we cannot use a "bool(node) and node[0] or None" construct here
# because node[0] might evaluate to False if it has no child element
if len(node):
return node[0]
else:
return None
else: # tail
return node.getnext()
return (node, "tail") if node.tail else node.getnext()
def getParentNode(self, node):
if isinstance(node, tuple): # Text node
node, key = node
assert key in ("text", "tail"), _("Text nodes are text or tail, found %s") % key
if key == "text":
return node
# else: fallback to "normal" processing
return node.getparent()
|
calancha/DIRAC
|
refs/heads/rel-v6r12
|
RequestManagementSystem/test/FileTests.py
|
4
|
########################################################################
# $HeadURL$
# File: FileTest.py
# Author: Krzysztof.Ciba@NOSPAMgmail.com
# Date: 2012/08/06 13:48:54
########################################################################
""" :mod: FileTest
=======================
.. module: FileTest
:synopsis: test cases for Files
.. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
test cases for Files
"""
__RCSID__ = "$Id$"
# #
# @file FileTest.py
# @author Krzysztof.Ciba@NOSPAMgmail.com
# @date 2012/08/06 13:49:05
# @brief Definition of FileTest class.
# # imports
import unittest
# # from DIRAC
from DIRAC.RequestManagementSystem.Client.Operation import Operation
# # SUT
from DIRAC.RequestManagementSystem.Client.File import File
########################################################################
class FileTests( unittest.TestCase ):
"""
.. class:: FileTest
"""
def setUp( self ):
""" test setup """
self.fromDict = { "Size" : 1, "LFN" : "/test/lfn", "ChecksumType" : "ADLER32", "Checksum" : "123456", "Status" : "Waiting" }
def tearDown( self ):
""" test tear down """
del self.fromDict
def test01ctors( self ):
""" File construction and (de)serialisation """
# # empty default ctor
theFile = File()
self.assertEqual( isinstance( theFile, File ), True )
# # fromDict
try:
theFile = File( self.fromDict )
except AttributeError, error:
print "AttributeError: %s" % str( error )
self.assertEqual( isinstance( theFile, File ), True )
for key, value in self.fromDict.items():
self.assertEqual( getattr( theFile, key ), value )
toJSON = theFile.toJSON()
self.assertEqual( toJSON["OK"], True, "JSON serialization error" )
def test02props( self ):
""" test props and attributes """
theFile = File()
# valid props
theFile.FileID = 1
self.assertEqual( theFile.FileID, 1 )
theFile.Status = "Done"
self.assertEqual( theFile.Status, "Done" )
theFile.LFN = "/some/path/somewhere"
self.assertEqual( theFile.LFN, "/some/path/somewhere" )
theFile.PFN = "/some/path/somewhere"
self.assertEqual( theFile.PFN, "/some/path/somewhere" )
theFile.Attempt = 1
self.assertEqual( theFile.Attempt, 1 )
theFile.Size = 1
self.assertEqual( theFile.Size, 1 )
theFile.GUID = "2bbabe80-e2f1-11e1-9b23-0800200c9a66"
self.assertEqual( theFile.GUID, "2bbabe80-e2f1-11e1-9b23-0800200c9a66" )
theFile.ChecksumType = "adler32"
self.assertEqual( theFile.ChecksumType, "ADLER32" )
theFile.Checksum = "123456"
self.assertEqual( theFile.Checksum, "123456" )
# #
theFile.Checksum = None
theFile.ChecksumType = None
self.assertEqual( theFile.Checksum, "" )
self.assertEqual( theFile.ChecksumType, "" )
# # invalid props
# FileID
try:
theFile.FileID = "foo"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
# parent
parent = Operation( { "OperationID" : 99999 } )
parent += theFile
theFile.FileID = 0
self.assertEqual( parent.OperationID, theFile.OperationID )
try:
theFile.OperationID = 111111
except Exception, error:
self.assertEqual( isinstance( error, AttributeError ), True )
self.assertEqual( str( error ), "can't set attribute" )
# LFN
try:
theFile.LFN = 1
except Exception, error:
self.assertEqual( isinstance( error, TypeError ), True )
self.assertEqual( str( error ), "LFN has to be a string!" )
try:
theFile.LFN = "../some/path"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "LFN should be an absolute path!" )
# PFN
try:
theFile.PFN = 1
except Exception, error:
self.assertEqual( isinstance( error, TypeError ), True )
self.assertEqual( str( error ), "PFN has to be a string!" )
try:
theFile.PFN = "snafu"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "Wrongly formatted PFN!" )
# Size
try:
theFile.Size = "snafu"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
try:
theFile.Size = -1
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "Size should be a positive integer!" )
# GUID
try:
theFile.GUID = "snafuu-uuu-uuu-uuu-uuu-u"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "'snafuu-uuu-uuu-uuu-uuu-u' is not a valid GUID!" )
try:
theFile.GUID = 2233345
except Exception, error:
self.assertEqual( isinstance( error, TypeError ), True )
self.assertEqual( str( error ), "GUID should be a string!" )
# Attempt
try:
theFile.Attempt = "snafu"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
try:
theFile.Attempt = -1
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "Attempt should be a positive integer!" )
# Status
try:
theFile.Status = None
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "Unknown Status: None!" )
# Error
try:
theFile.Error = Exception( "test" )
except Exception, error:
self.assertEqual( isinstance( error, TypeError ), True )
self.assertEqual( str( error ), "Error has to be a string!" )
# # test execution
if __name__ == "__main__":
testLoader = unittest.TestLoader()
fileTests = testLoader.loadTestsFromTestCase( FileTests )
suite = unittest.TestSuite( [ fileTests ] )
unittest.TextTestRunner( verbosity = 3 ).run( suite )
|
Lilykos/invenio
|
refs/heads/master
|
invenio/modules/uploader/errors.py
|
13
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
class UploaderException(Exception):
"""Base exception"""
pass
class UploaderWorkflowException(UploaderException):
"""Raised by the workflows containing useful information to undo"""
def __init__(self, step, msg):
super(UploaderWorkflowException, self).__init__(msg)
self.step = step
|
kmoocdev2/edx-platform
|
refs/heads/real_2019
|
openedx/core/djangoapps/user_api/course_tag/api.py
|
12
|
"""
A service-like user_info interface. Could be made into an http API later, but for now
just in-process. Exposes global and per-course key-value pairs for users.
Implementation note:
Stores global metadata using the UserPreference model, and per-course metadata using the
UserCourseTag model.
"""
from collections import defaultdict
from openedx.core.djangoapps.request_cache import get_cache
from ..models import UserCourseTag
# Scopes
# (currently only allows per-course tags. Can be expanded to support
# global tags (e.g. using the existing UserPreferences table))
COURSE_SCOPE = 'course'
class BulkCourseTags(object):
CACHE_NAMESPACE = u'user_api.course_tag.api'
@classmethod
def prefetch(cls, course_id, users):
"""
Prefetches the value of the course tags for the specified users
for the specified course_id.
Args:
users: iterator of User objects
course_id: course identifier (CourseKey)
Returns:
course_tags: a dict of dicts,
where the primary key is the user's id
and the secondary key is the course tag's key
"""
course_tags = defaultdict(dict)
for tag in UserCourseTag.objects.filter(user__in=users, course_id=course_id).select_related('user'):
course_tags[tag.user.id][tag.key] = tag.value
get_cache(cls.CACHE_NAMESPACE)[cls._cache_key(course_id)] = course_tags
@classmethod
def get_course_tag(cls, user_id, course_id, key):
return get_cache(cls.CACHE_NAMESPACE)[cls._cache_key(course_id)][user_id][key]
@classmethod
def is_prefetched(cls, course_id):
return cls._cache_key(course_id) in get_cache(cls.CACHE_NAMESPACE)
@classmethod
def _cache_key(cls, course_id):
return u'course_tag.{}'.format(course_id)
def get_course_tag(user, course_id, key):
"""
Gets the value of the user's course tag for the specified key in the specified
course_id.
Args:
user: the User object for the course tag
course_id: course identifier (string)
key: arbitrary (<=255 char string)
Returns:
string value, or None if there is no value saved
"""
if BulkCourseTags.is_prefetched(course_id):
try:
return BulkCourseTags.get_course_tag(user.id, course_id, key)
except KeyError:
return None
try:
record = UserCourseTag.objects.get(
user=user,
course_id=course_id,
key=key)
return record.value
except UserCourseTag.DoesNotExist:
return None
def set_course_tag(user, course_id, key, value):
"""
Sets the value of the user's course tag for the specified key in the specified
course_id. Overwrites any previous value.
The intention is that the values are fairly short, as they will be included in all
analytics events about this user.
Args:
user: the User object
course_id: course identifier (string)
key: arbitrary (<=255 char string)
value: arbitrary string
"""
# pylint: disable=fixme
# TODO: There is a risk of IntegrityErrors being thrown here given
# simultaneous calls from many processes. Handle by retrying after
# a short delay?
record, _ = UserCourseTag.objects.get_or_create(
user=user,
course_id=course_id,
key=key)
record.value = value
record.save()
|
abramhindle/UnnaturalCodeFork
|
refs/heads/master
|
python/testdata/launchpad/lib/lp/app/browser/tales.py
|
1
|
# Copyright 2009-2013 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Implementation of the lp: htmlform: fmt: namespaces in TALES."""
__metaclass__ = type
from bisect import bisect
from datetime import (
datetime,
timedelta,
)
from email.Utils import formatdate
import math
import os.path
import rfc822
import sys
from textwrap import dedent
import urllib
from lazr.enum import enumerated_type_registry
from lazr.restful.utils import get_current_browser_request
from lazr.uri import URI
import pytz
from z3c.ptcompat import ViewPageTemplateFile
from zope.component import (
adapts,
getMultiAdapter,
getUtility,
queryAdapter,
)
from zope.error.interfaces import IErrorReportingUtility
from zope.interface import (
Attribute,
implements,
Interface,
)
from zope.publisher.browser import BrowserView
from zope.publisher.defaultview import getDefaultViewName
from zope.schema import TextLine
from zope.security.interfaces import Unauthorized
from zope.security.proxy import isinstance as zope_isinstance
from zope.traversing.interfaces import (
IPathAdapter,
ITraversable,
TraversalError,
)
from lp import _
from lp.app.browser.badge import IHasBadges
from lp.app.browser.stringformatter import FormattersAPI
from lp.app.enums import PRIVATE_INFORMATION_TYPES
from lp.app.interfaces.launchpad import (
IHasIcon,
IHasLogo,
IHasMugshot,
IPrivacy,
)
from lp.blueprints.interfaces.specification import ISpecification
from lp.blueprints.interfaces.sprint import ISprint
from lp.bugs.interfaces.bug import IBug
from lp.buildmaster.enums import BuildStatus
from lp.code.interfaces.branch import IBranch
from lp.layers import LaunchpadLayer
from lp.registry.interfaces.distribution import IDistribution
from lp.registry.interfaces.distributionsourcepackage import (
IDistributionSourcePackage,
)
from lp.registry.interfaces.person import IPerson
from lp.registry.interfaces.product import IProduct
from lp.registry.interfaces.projectgroup import IProjectGroup
from lp.services.utils import total_seconds
from lp.services.webapp.authorization import check_permission
from lp.services.webapp.canonicalurl import nearest_adapter
from lp.services.webapp.error import SystemErrorView
from lp.services.webapp.escaping import (
html_escape,
structured,
)
from lp.services.webapp.interfaces import (
IApplicationMenu,
IContextMenu,
IFacetMenu,
ILaunchBag,
INavigationMenu,
IPrimaryContext,
NoCanonicalUrl,
)
from lp.services.webapp.menu import (
get_current_view,
get_facet,
)
from lp.services.webapp.publisher import (
canonical_url,
LaunchpadView,
nearest,
)
from lp.services.webapp.session import get_cookie_domain
from lp.services.webapp.url import urlappend
from lp.soyuz.enums import ArchivePurpose
from lp.soyuz.interfaces.archive import (
IArchive,
IPPA,
)
from lp.soyuz.interfaces.binarypackagename import IBinaryAndSourcePackageName
SEPARATOR = ' : '
def format_link(obj, view_name=None, empty_value='None'):
"""Return the equivalent of obj/fmt:link as a string."""
if obj is None:
return empty_value
adapter = queryAdapter(obj, IPathAdapter, 'fmt')
link = getattr(adapter, 'link', None)
if link is None:
raise NotImplementedError("Missing link function on adapter.")
return link(view_name)
class MenuLinksDict(dict):
"""A dict class to construct menu links when asked for and not before.
We store all the information we need to construct the requested links,
including the menu object and request url.
"""
def __init__(self, menu, request_url, request):
self._request_url = request_url
self._menu = menu
self._all_link_names = []
self._extra_link_names = []
dict.__init__(self)
# The object has the facet, but does not have a menu, this
# is probably the overview menu with is the default facet.
if menu is None or getattr(menu, 'disabled', False):
return
menu.request = request
# We get all the possible link names for the menu.
# The link names are the defined menu links plus any extras.
self._all_link_names = list(menu.links)
extras = menu.extra_attributes
if extras is not None:
self._extra_link_names = list(extras)
self._all_link_names.extend(extras)
def __getitem__(self, link_name):
if not link_name in self._all_link_names:
raise KeyError(link_name)
link = dict.get(self, link_name, None)
if link is None:
if link_name in self._extra_link_names:
link = getattr(self._menu, link_name, None)
else:
link = self._menu.initLink(link_name, self._request_url)
if not link_name in self._extra_link_names:
self._menu.updateLink(link, self._request_url)
self[link_name] = link
return link
def __delitem__(self, key):
self._all_link_names.remove(key)
dict.__delitem__(self, key)
def items(self):
return zip(self._all_link_names, self.values())
def values(self):
return [self[key] for key in self._all_link_names]
def keys(self):
return self._all_link_names
def iterkeys(self):
return iter(self._all_link_names)
__iter__ = iterkeys
class MenuAPI:
"""Namespace to give access to the facet menus.
The facet menu can be accessed with an expression like:
tal:define="facetmenu view/menu:facet"
which gives the facet menu of the nearest object along the canonical url
chain that has an IFacetMenu adapter.
"""
def __init__(self, context):
self._tales_context = context
if zope_isinstance(context, (LaunchpadView, BrowserView)):
# The view is a LaunchpadView or a SimpleViewClass from a
# template. The facet is added to the call by the ZCML.
self.view = context
self._context = self.view.context
self._request = self.view.request
self._selectedfacetname = getattr(
self.view, '__launchpad_facetname__', None)
else:
self._context = context
self._request = get_current_browser_request()
self.view = None
self._selectedfacetname = None
def __getattribute__(self, facet):
"""Retrieve the links associated with a facet.
It's used with expressions like context/menu:bugs/subscribe.
:return: A dictionary mapping the link name to the associated Link
object.
:raise AttributeError: when there is no application menu for the
facet.
"""
# Use __getattribute__ instead of __getattr__, since __getattr__
# gets called if any of the other properties raise an AttributeError,
# which makes troubleshooting confusing. The has_facet can't easily
# be placed first, since all the properties it uses would need to
# be retrieved with object.__getattribute().
missing = object()
if (getattr(MenuAPI, facet, missing) is not missing
or facet in object.__getattribute__(self, '__dict__')):
return object.__getattribute__(self, facet)
has_facet = object.__getattribute__(self, '_has_facet')
if not has_facet(facet):
raise AttributeError(facet)
menu = queryAdapter(self._context, IApplicationMenu, facet)
if menu is None:
menu = queryAdapter(self._context, INavigationMenu, facet)
links = self._getMenuLinksAndAttributes(menu)
object.__setattr__(self, facet, links)
return links
def _getMenuLinksAndAttributes(self, menu):
"""Return a dict of the links and attributes of the menu."""
return MenuLinksDict(menu, self._request_url(), self._request)
def _has_facet(self, facet):
"""Does the object have the named facet?"""
menu = self._facet_menu()
if menu is None:
return False
return facet in menu.links
def _request_url(self):
request = self._request
if request is None:
return None
request_urlobj = URI(request.getURL())
# If the default view name is being used, we will want the url
# without the default view name.
defaultviewname = getDefaultViewName(self._context, request)
if request_urlobj.path.rstrip('/').endswith(defaultviewname):
request_urlobj = URI(request.getURL(1))
query = request.get('QUERY_STRING')
if query:
request_urlobj = request_urlobj.replace(query=query)
return request_urlobj
def facet(self):
"""Return the IFacetMenu links related to the context."""
menu = self._facet_menu()
if menu is None:
return []
menu.request = self._request
return list(menu.iterlinks(
request_url=self._request_url(),
selectedfacetname=self._selectedfacetname))
def _facet_menu(self):
"""Return the IFacetMenu related to the context."""
try:
try:
context = IPrimaryContext(self._context).context
except TypeError:
# Could not adapt raises a type error. If there was no
# way to adapt, then just use self._context.
context = self._context
menu = nearest_adapter(context, IFacetMenu)
except NoCanonicalUrl:
menu = None
return menu
def selectedfacetname(self):
if self._selectedfacetname is None:
return 'unknown'
else:
return self._selectedfacetname
@property
def context(self):
menu = IContextMenu(self._context, None)
return self._getMenuLinksAndAttributes(menu)
@property
def navigation(self):
"""Navigation menu links list."""
try:
# NavigationMenus may be associated with a content object or one
# of its views. The context we need is the one from the TAL
# expression.
context = self._tales_context
if self._selectedfacetname is not None:
selectedfacetname = self._selectedfacetname
else:
# XXX sinzui 2008-05-09 bug=226917: We should be retrieving
# the facet name from the layer implemented by the request.
view = get_current_view(self._request)
selectedfacetname = get_facet(view)
try:
menu = nearest_adapter(
context, INavigationMenu, name=selectedfacetname)
except NoCanonicalUrl:
menu = None
return self._getMenuLinksAndAttributes(menu)
except AttributeError as e:
# If this method gets an AttributeError, we rethrow it as a
# AssertionError. Otherwise, zope will hide the root cause
# of the error and just say that "navigation" can't be traversed.
new_exception = AssertionError(
'AttributError in MenuAPI.navigation: %s' % e)
# We cannot use parens around the arguments to `raise`,
# since that will cause it to ignore the third argument,
# which is the original traceback.
new_exception.addinfo(sys.exc_info()[2])
raise
class CountAPI:
"""Namespace to provide counting-related functions, such as length.
This is available for all objects. Individual operations may fail for
objects that do not support them.
"""
def __init__(self, context):
self._context = context
def len(self):
"""somelist/count:len gives you an int that is len(somelist)."""
return len(self._context)
class EnumValueAPI:
"""Namespace to test the value of an EnumeratedType Item.
The value is given in the next path step.
tal:condition="somevalue/enumvalue:BISCUITS"
Registered for canonical.lazr.enum.Item.
"""
implements(ITraversable)
def __init__(self, item):
self.item = item
def traverse(self, name, furtherPath):
if self.item.name == name:
return True
else:
# Check whether this was an allowed value for this
# enumerated type.
enum = self.item.enum
try:
enum.getTermByToken(name)
except LookupError:
raise TraversalError(
'The enumerated type %s does not have a value %s.' %
(enum.name, name))
return False
class HTMLFormAPI:
"""HTML form helper API, available as request/htmlform:.
Use like:
request/htmlform:fieldname/selected/literalvalue
if request.form[fieldname] == literalvalue:
return "selected"
else:
return None
"""
implements(ITraversable)
def __init__(self, request):
self.form = request.form
def traverse(self, name, furtherPath):
if len(furtherPath) == 1:
operation = furtherPath.pop()
return HTMLFormOperation(self.form.get(name), operation)
else:
operation = furtherPath.pop()
value = furtherPath.pop()
if htmlmatch(self.form.get(name), value):
return operation
else:
return None
def htmlmatch(formvalue, value):
value = str(value)
if isinstance(formvalue, list):
return value in formvalue
else:
return formvalue == value
class HTMLFormOperation:
implements(ITraversable)
def __init__(self, formvalue, operation):
self.formvalue = formvalue
self.operation = operation
def traverse(self, name, furtherPath):
if htmlmatch(self.formvalue, name):
return self.operation
else:
return None
class IRequestAPI(Interface):
"""Launchpad lp:... API available for an IApplicationRequest."""
person = Attribute("The IPerson for the request's principal.")
cookie_scope = Attribute("The scope parameters for cookies.")
class RequestAPI:
"""Adapter from IApplicationRequest to IRequestAPI."""
implements(IRequestAPI)
def __init__(self, request):
self.request = request
@property
def person(self):
return IPerson(self.request.principal, None)
@property
def cookie_scope(self):
params = '; Path=/'
uri = URI(self.request.getURL())
if uri.scheme == 'https':
params += '; Secure'
domain = get_cookie_domain(uri.host)
if domain is not None:
params += '; Domain=%s' % domain
return params
class DBSchemaAPI:
"""Adapter from integers to things that can extract information from
DBSchemas.
"""
implements(ITraversable)
def __init__(self, number):
self._number = number
def traverse(self, name, furtherPath):
if name in enumerated_type_registry:
enum = enumerated_type_registry[name]
return enum.items[self._number].title
else:
raise TraversalError(name)
class NoneFormatter:
"""Adapter from None to various string formats.
In general, these will return an empty string. They are provided for ease
of handling NULL values from the database, which become None values for
attributes in content classes.
"""
implements(ITraversable)
allowed_names = set([
'approximatedate',
'approximateduration',
'break-long-words',
'date',
'datetime',
'displaydate',
'isodate',
'email-to-html',
'exactduration',
'lower',
'nice_pre',
'nl_to_br',
'pagetitle',
'rfc822utcdatetime',
'text-to-html',
'time',
'url',
'link',
])
def __init__(self, context):
self.context = context
def traverse(self, name, furtherPath):
if name == 'shorten':
if not len(furtherPath):
raise TraversalError(
"you need to traverse a number after fmt:shorten")
# Remove the maxlength from the path as it is a parameter
# and not another traversal command.
furtherPath.pop()
return ''
# We need to check to see if the name has been augmented with optional
# evaluation parameters, delimited by ":". These parameters are:
# param1 = rootsite (used with link and url)
# param2 = default value (in case of context being None)
# We are interested in the default value (param2).
result = ''
for nm in self.allowed_names:
if name.startswith(nm + ":"):
name_parts = name.split(":")
name = name_parts[0]
if len(name_parts) > 2:
result = name_parts[2]
break
if name in self.allowed_names:
return result
else:
raise TraversalError(name)
class ObjectFormatterAPI:
"""Adapter for any object to a formatted string."""
implements(ITraversable)
# Although we avoid mutables as class attributes, the two ones below are
# constants, so it's not a problem. We might want to use something like
# frozenset (http://code.activestate.com/recipes/414283/) here, though.
# The names which can be traversed further (e.g context/fmt:url/+edit).
traversable_names = {
'api_url': 'api_url',
'link': 'link',
'url': 'url',
}
# Names which are allowed but can't be traversed further.
final_traversable_names = {
'pagetitle': 'pagetitle',
'global-css': 'global_css',
}
def __init__(self, context):
self._context = context
def url(self, view_name=None, rootsite=None):
"""Return the object's canonical URL.
:param view_name: If not None, return the URL to the page with that
name on this object.
:param rootsite: If not None, return the URL to the page on the
specified rootsite. Note this is available only for subclasses
that allow specifying the rootsite.
"""
try:
url = canonical_url(
self._context, path_only_if_possible=True,
rootsite=rootsite, view_name=view_name)
except Unauthorized:
url = ""
return url
def api_url(self, context):
"""Return the object's (partial) canonical web service URL.
This method returns everything that goes after the web service version
number. Effectively the canonical URL but only the relative part with
no site.
"""
try:
url = canonical_url(self._context, force_local_path=True)
except Unauthorized:
url = ""
return url
def traverse(self, name, furtherPath):
"""Traverse the specified path, processing any optional parameters.
Up to 2 parameters are currently supported, and the path name will be
of the form:
name:param1:param2
where
param1 = rootsite (only used for link and url paths).
param2 = default (used when self.context is None). The context
is not None here so this parameter is ignored.
"""
if name.startswith('link:') or name.startswith('url:'):
name_parts = name.split(':')
name = name_parts[0]
rootsite = name_parts[1]
if rootsite != '':
extra_path = None
if len(furtherPath) > 0:
extra_path = '/'.join(reversed(furtherPath))
# Remove remaining entries in furtherPath so that traversal
# stops here.
del furtherPath[:]
if name == 'link':
if rootsite is None:
return self.link(extra_path)
else:
return self.link(extra_path, rootsite=rootsite)
else:
if rootsite is None:
self.url(extra_path)
else:
return self.url(extra_path, rootsite=rootsite)
if '::' in name:
name = name.split(':')[0]
if name in self.traversable_names:
if len(furtherPath) >= 1:
extra_path = '/'.join(reversed(furtherPath))
del furtherPath[:]
else:
extra_path = None
method_name = self.traversable_names[name]
return getattr(self, method_name)(extra_path)
elif name in self.final_traversable_names:
method_name = self.final_traversable_names[name]
return getattr(self, method_name)()
else:
raise TraversalError(name)
def link(self, view_name, rootsite=None):
"""Return an HTML link to the object's page.
The link consists of an icon followed by the object's name.
:param view_name: If not None, the link will point to the page with
that name on this object.
:param rootsite: If not None, return the URL to the page on the
specified rootsite. Note this is available only for subclasses
that allow specifying the rootsite.
"""
raise NotImplementedError(
"No link implementation for %r, IPathAdapter implementation "
"for %r." % (self, self._context))
def global_css(self):
css_classes = set([])
view = self._context
# XXX: Bug #1076074
private = getattr(view, 'private', False)
if private:
css_classes.add('private')
else:
css_classes.add('public')
beta = getattr(view, 'beta_features', [])
if beta:
css_classes.add('beta')
return ' '.join(list(css_classes))
def _getSaneBreadcrumbDetail(self, breadcrumb):
text = breadcrumb.detail
if len(text) > 64:
truncated = '%s...' % text[0:64]
if truncated.count(u'\u201c') > truncated.count(u'\u201cd'):
# Close the open smartquote if it was dropped.
truncated += u'\u201d'
return truncated
return text
def pagetitle(self):
"""The page title to be used.
By default, reverse breadcrumbs are always used if they are available.
If not available, then the view's .page_title attribut is used.
"""
ROOT_TITLE = 'Launchpad'
view = self._context
request = get_current_browser_request()
hierarchy_view = getMultiAdapter(
(view.context, request), name='+hierarchy')
if (isinstance(view, SystemErrorView) or
hierarchy_view is None or
not hierarchy_view.display_breadcrumbs):
# The breadcrumbs are either not available or are overridden. If
# the view has a .page_title attribute use that.
page_title = getattr(view, 'page_title', None)
if page_title is not None:
return page_title
# If there is no template for the view, just use the default
# Launchpad title.
template = getattr(view, 'template', None)
if template is None:
template = getattr(view, 'index', None)
if template is None:
return ROOT_TITLE
# Use the reverse breadcrumbs.
breadcrumbs = list(reversed(hierarchy_view.items))
if len(breadcrumbs) == 0:
# This implies there are no breadcrumbs, but this more often
# is caused when an Unauthorized error is being raised.
return ''
detail_breadcrumb = self._getSaneBreadcrumbDetail(breadcrumbs[0])
title_breadcrumbs = [breadcrumb.text for breadcrumb in breadcrumbs[1:]]
title_text = SEPARATOR.join([detail_breadcrumb] + title_breadcrumbs)
return FormattersAPI(title_text).obfuscate_email()
class ObjectImageDisplayAPI:
"""Base class for producing the HTML that presents objects
as an icon, a logo, a mugshot or a set of badges.
"""
def __init__(self, context):
self._context = context
#def default_icon_resource(self, context):
def sprite_css(self):
"""Return the CSS class for the sprite"""
# XXX: mars 2008-08-22 bug=260468
# This should be refactored. We shouldn't have to do type-checking
# using interfaces.
context = self._context
sprite_string = None
if IProduct.providedBy(context):
sprite_string = 'product'
elif IProjectGroup.providedBy(context):
sprite_string = 'project'
elif IPerson.providedBy(context):
if context.is_team:
sprite_string = 'team'
else:
if context.is_valid_person:
sprite_string = 'person'
else:
sprite_string = 'person-inactive'
elif IDistribution.providedBy(context):
sprite_string = 'distribution'
elif IDistributionSourcePackage.providedBy(context):
sprite_string = 'package-source'
elif ISprint.providedBy(context):
sprite_string = 'meeting'
elif IBug.providedBy(context):
sprite_string = 'bug'
elif IPPA.providedBy(context):
if context.enabled:
sprite_string = 'ppa-icon'
else:
sprite_string = 'ppa-icon-inactive'
elif IArchive.providedBy(context):
sprite_string = 'distribution'
elif IBranch.providedBy(context):
sprite_string = 'branch'
elif ISpecification.providedBy(context):
sprite_string = 'blueprint'
elif IBinaryAndSourcePackageName.providedBy(context):
sprite_string = 'package-source'
if sprite_string is None:
return None
else:
if hasattr(context, 'private') and context.private:
sprite_string = sprite_string + ' private'
return "sprite %s" % sprite_string
def default_logo_resource(self, context):
# XXX: mars 2008-08-22 bug=260468
# This should be refactored. We shouldn't have to do type-checking
# using interfaces.
if IProjectGroup.providedBy(context):
return '/@@/project-logo'
elif IPerson.providedBy(context):
if context.is_team:
return '/@@/team-logo'
else:
if context.is_valid_person:
return '/@@/person-logo'
else:
return '/@@/person-inactive-logo'
elif IProduct.providedBy(context):
return '/@@/product-logo'
elif IDistribution.providedBy(context):
return '/@@/distribution-logo'
elif ISprint.providedBy(context):
return '/@@/meeting-logo'
return None
def default_mugshot_resource(self, context):
# XXX: mars 2008-08-22 bug=260468
# This should be refactored. We shouldn't have to do type-checking
# using interfaces.
if IProjectGroup.providedBy(context):
return '/@@/project-mugshot'
elif IPerson.providedBy(context):
if context.is_team:
return '/@@/team-mugshot'
else:
if context.is_valid_person:
return '/@@/person-mugshot'
else:
return '/@@/person-inactive-mugshot'
elif IProduct.providedBy(context):
return '/@@/product-mugshot'
elif IDistribution.providedBy(context):
return '/@@/distribution-mugshot'
elif ISprint.providedBy(context):
return '/@@/meeting-mugshot'
return None
def custom_icon_url(self):
"""Return the URL for this object's icon."""
context = self._context
if IHasIcon.providedBy(context) and context.icon is not None:
icon_url = context.icon.getURL()
return icon_url
elif context is None:
return ''
else:
return None
def icon(self):
#XXX: this should go away as soon as all image:icon where replaced
return None
def logo(self):
"""Return the appropriate <img> tag for this object's logo.
:return: A string, or None if the context object doesn't have
a logo.
"""
context = self._context
if not IHasLogo.providedBy(context):
context = nearest(context, IHasLogo)
if context is None:
# we use the Launchpad logo for anything which is in no way
# related to a Pillar (for example, a buildfarm)
url = '/@@/launchpad-logo'
elif context.logo is not None:
url = context.logo.getURL()
else:
url = self.default_logo_resource(context)
if url is None:
# We want to indicate that there is no logo for this
# object.
return None
logo = '<img alt="" width="64" height="64" src="%s" />'
return logo % url
def mugshot(self):
"""Return the appropriate <img> tag for this object's mugshot.
:return: A string, or None if the context object doesn't have
a mugshot.
"""
context = self._context
assert IHasMugshot.providedBy(context), 'No Mugshot for this item'
if context.mugshot is not None:
url = context.mugshot.getURL()
else:
url = self.default_mugshot_resource(context)
if url is None:
# We want to indicate that there is no mugshot for this
# object.
return None
mugshot = """<img alt="" class="mugshot"
width="192" height="192" src="%s" />"""
return mugshot % url
def badges(self):
raise NotImplementedError(
"Badge display not implemented for this item")
def boolean(self):
"""Return an icon representing the context as a boolean value."""
if bool(self._context):
icon = 'yes'
else:
icon = 'no'
markup = '<span class="sprite %(icon)s action-icon">%(icon)s</span>'
return markup % dict(icon=icon)
class BugTaskImageDisplayAPI(ObjectImageDisplayAPI):
"""Adapter for IBugTask objects to a formatted string. This inherits
from the generic ObjectImageDisplayAPI and overrides the icon
presentation method.
Used for image:icon.
"""
implements(ITraversable)
allowed_names = set([
'icon',
'logo',
'mugshot',
'badges',
'sprite_css',
])
icon_template = (
'<span alt="%s" title="%s" class="%s"></span>')
linked_icon_template = (
'<a href="%s" alt="%s" title="%s" class="%s"></a>')
def traverse(self, name, furtherPath):
"""Special-case traversal for icons with an optional rootsite."""
if name in self.allowed_names:
return getattr(self, name)()
else:
raise TraversalError(name)
def sprite_css(self):
"""Return the CSS class for the sprite"""
if self._context.importance:
importance = self._context.importance.title.lower()
return "sprite bug-%s" % importance
else:
return "sprite bug"
def icon(self):
"""Display the icon dependent on the IBugTask.importance."""
if self._context.importance:
importance = self._context.importance.title.lower()
alt = "(%s)" % importance
title = importance.capitalize()
if importance not in ("undecided", "wishlist"):
# The other status names do not make a lot of sense on
# their own, so tack on a noun here.
title += " importance"
css = "sprite bug-%s" % importance
else:
alt = ""
title = ""
css = self.sprite_css()
return self.icon_template % (alt, title, css)
def _hasBugBranch(self):
"""Return whether the bug has a branch linked to it."""
return not self._context.bug.linked_branches.is_empty()
def _hasSpecification(self):
"""Return whether the bug is linked to a specification."""
return not self._context.bug.specifications.is_empty()
def _hasPatch(self):
"""Return whether the bug has a patch."""
return self._context.bug.has_patches
def badges(self):
badges = []
information_type = self._context.bug.information_type
if information_type in PRIVATE_INFORMATION_TYPES:
badges.append(self.icon_template % (
information_type.title, information_type.description,
"sprite private"))
if self._hasBugBranch():
badges.append(self.icon_template % (
"branch", "Branch exists", "sprite branch"))
if self._hasSpecification():
badges.append(self.icon_template % (
"blueprint", "Related to a blueprint", "sprite blueprint"))
if self._context.milestone:
milestone_text = "milestone %s" % self._context.milestone.name
badges.append(self.linked_icon_template % (
canonical_url(self._context.milestone),
milestone_text, "Linked to %s" % milestone_text,
"sprite milestone"))
if self._hasPatch():
badges.append(self.icon_template % (
"haspatch", "Has a patch", "sprite haspatch-icon"))
# Join with spaces to avoid the icons smashing into each other
# when multiple ones are presented.
return " ".join(badges)
class BugTaskListingItemImageDisplayAPI(BugTaskImageDisplayAPI):
"""Formatter for image:badges for BugTaskListingItem.
The BugTaskListingItem has some attributes to decide whether a badge
should be displayed, which don't require a DB query when they are
accessed.
"""
def _hasBugBranch(self):
"""See `BugTaskImageDisplayAPI`"""
return self._context.has_bug_branch
def _hasSpecification(self):
"""See `BugTaskImageDisplayAPI`"""
return self._context.has_specification
def _hasPatch(self):
"""See `BugTaskImageDisplayAPI`"""
return self._context.has_patch
class QuestionImageDisplayAPI(ObjectImageDisplayAPI):
"""Adapter for IQuestion to a formatted string. Used for image:icon."""
def sprite_css(self):
return "sprite question"
class SpecificationImageDisplayAPI(ObjectImageDisplayAPI):
"""Adapter for ISpecification objects to a formatted string. This inherits
from the generic ObjectImageDisplayAPI and overrides the icon
presentation method.
Used for image:icon.
"""
icon_template = (
'<span alt="%s" title="%s" class="%s" />')
def sprite_css(self):
"""Return the CSS class for the sprite"""
sprite_str = "sprite blueprint"
if self._context.priority:
priority = self._context.priority.title.lower()
sprite_str = sprite_str + "-%s" % priority
if self._context.private:
sprite_str = sprite_str + ' private'
return sprite_str
def badges(self):
badges = ''
if len(self._context.linked_branches) > 0:
badges += self.icon_template % (
"branch", "Branch is available", "sprite branch")
if self._context.informational:
badges += self.icon_template % (
"informational", "Blueprint is purely informational",
"sprite info")
return badges
class KarmaCategoryImageDisplayAPI(ObjectImageDisplayAPI):
"""Adapter for IKarmaCategory objects to an image.
Used for image:icon.
"""
icons_for_karma_categories = {
'bugs': '/@@/bug',
'code': '/@@/branch',
'translations': '/@@/translation',
'specs': '/@@/blueprint',
'soyuz': '/@@/package-source',
'answers': '/@@/question'}
def icon(self):
icon = self.icons_for_karma_categories[self._context.name]
return ('<img height="14" width="14" alt="" title="%s" src="%s" />'
% (self._context.title, icon))
class MilestoneImageDisplayAPI(ObjectImageDisplayAPI):
"""Adapter for IMilestone objects to an image.
Used for image:icon.
"""
def icon(self):
"""Return the appropriate <img> tag for the milestone icon."""
return '<img height="14" width="14" alt="" src="/@@/milestone" />'
class BuildImageDisplayAPI(ObjectImageDisplayAPI):
"""Adapter for IBuild objects to an image.
Used for image:icon.
"""
icon_template = (
'<img width="%(width)s" height="14" alt="%(alt)s" '
'title="%(title)s" src="%(src)s" />')
def icon(self):
"""Return the appropriate <img> tag for the build icon."""
icon_map = {
BuildStatus.NEEDSBUILD: {'src': "/@@/build-needed"},
BuildStatus.FULLYBUILT: {'src': "/@@/build-success"},
BuildStatus.FAILEDTOBUILD: {
'src': "/@@/build-failed",
'width': '16',
},
BuildStatus.MANUALDEPWAIT: {'src': "/@@/build-depwait"},
BuildStatus.CHROOTWAIT: {'src': "/@@/build-chrootwait"},
BuildStatus.SUPERSEDED: {'src': "/@@/build-superseded"},
BuildStatus.BUILDING: {'src': "/@@/processing"},
BuildStatus.UPLOADING: {'src': "/@@/processing"},
BuildStatus.FAILEDTOUPLOAD: {'src': "/@@/build-failedtoupload"},
BuildStatus.CANCELLING: {'src': "/@@/processing"},
BuildStatus.CANCELLED: {'src': "/@@/build-failed"},
}
alt = '[%s]' % self._context.status.name
title = self._context.status.title
source = icon_map[self._context.status].get('src')
width = icon_map[self._context.status].get('width', '14')
return self.icon_template % {
'alt': alt,
'title': title,
'src': source,
'width': width,
}
class ArchiveImageDisplayAPI(ObjectImageDisplayAPI):
"""Adapter for IArchive objects to an image.
Used for image:icon.
"""
icon_template = """
<img width="14" height="14" alt="%s" title="%s" src="%s" />
"""
def icon(self):
"""Return the appropriate <img> tag for an archive."""
icon_map = {
ArchivePurpose.PRIMARY: '/@@/distribution',
ArchivePurpose.PARTNER: '/@@/distribution',
ArchivePurpose.PPA: '/@@/ppa-icon',
ArchivePurpose.COPY: '/@@/distribution',
}
alt = '[%s]' % self._context.purpose.title
title = self._context.purpose.title
source = icon_map[self._context.purpose]
return self.icon_template % (alt, title, source)
class BadgeDisplayAPI:
"""Adapter for IHasBadges to the images for the badges.
Used for context/badges:small and context/badges:large.
"""
def __init__(self, context):
# Adapt the context.
self.context = IHasBadges(context)
def small(self):
"""Render the visible badge's icon images."""
badges = self.context.getVisibleBadges()
return ''.join([badge.renderIconImage() for badge in badges])
def large(self):
"""Render the visible badge's heading images."""
badges = self.context.getVisibleBadges()
return ''.join([badge.renderHeadingImage() for badge in badges])
class PersonFormatterAPI(ObjectFormatterAPI):
"""Adapter for `IPerson` objects to a formatted string."""
traversable_names = {'link': 'link', 'url': 'url', 'api_url': 'api_url',
'icon': 'icon',
'displayname': 'displayname',
'unique_displayname': 'unique_displayname',
'link-display-name-id': 'link_display_name_id',
}
final_traversable_names = {'local-time': 'local_time'}
final_traversable_names.update(ObjectFormatterAPI.final_traversable_names)
def local_time(self):
"""Return the local time for this person."""
time_zone = 'UTC'
if self._context.time_zone is not None:
time_zone = self._context.time_zone
return datetime.now(pytz.timezone(time_zone)).strftime('%T %Z')
def url(self, view_name=None, rootsite='mainsite'):
"""See `ObjectFormatterAPI`.
The default URL for a person is to the mainsite.
"""
return super(PersonFormatterAPI, self).url(view_name, rootsite)
def _makeLink(self, view_name, rootsite, text):
person = self._context
url = self.url(view_name, rootsite)
custom_icon = ObjectImageDisplayAPI(person).custom_icon_url()
if custom_icon is None:
css_class = ObjectImageDisplayAPI(person).sprite_css()
return structured(
'<a href="%s" class="%s">%s</a>',
url, css_class, text).escapedtext
else:
return structured(
'<a href="%s" class="bg-image" '
'style="background-image: url(%s)">%s</a>',
url, custom_icon, text).escapedtext
def link(self, view_name, rootsite='mainsite'):
"""See `ObjectFormatterAPI`.
Return an HTML link to the person's page containing an icon
followed by the person's name. The default URL for a person is to
the mainsite.
"""
return self._makeLink(view_name, rootsite, self._context.displayname)
def displayname(self, view_name, rootsite=None):
"""Return the displayname as a string."""
person = self._context
return person.displayname
def unique_displayname(self, view_name):
"""Return the unique_displayname as a string."""
person = self._context
return person.unique_displayname
def icon(self, view_name):
"""Return the URL for the person's icon."""
custom_icon = ObjectImageDisplayAPI(
self._context).custom_icon_url()
if custom_icon is None:
css_class = ObjectImageDisplayAPI(self._context).sprite_css()
return '<span class="' + css_class + '"></span>'
else:
return '<img src="%s" width="14" height="14" />' % custom_icon
def link_display_name_id(self, view_name):
"""Return a link to the user's profile page.
The link text uses both the display name and Launchpad id to clearly
indicate which user profile is linked.
"""
text = self.unique_displayname(None)
return self._makeLink(view_name, 'mainsite', text)
class MixedVisibilityError(Exception):
"""An informational error that visibility is being mixed."""
class TeamFormatterAPI(PersonFormatterAPI):
"""Adapter for `ITeam` objects to a formatted string."""
hidden = u'<hidden>'
def url(self, view_name=None, rootsite='mainsite'):
"""See `ObjectFormatterAPI`.
The default URL for a team is to the mainsite. None is returned
when the user does not have permission to review the team.
"""
if not check_permission('launchpad.LimitedView', self._context):
# This person has no permission to view the team details.
self._report_visibility_leak()
return None
return super(TeamFormatterAPI, self).url(view_name, rootsite)
def api_url(self, context):
"""See `ObjectFormatterAPI`."""
if not check_permission('launchpad.LimitedView', self._context):
# This person has no permission to view the team details.
self._report_visibility_leak()
return None
return super(TeamFormatterAPI, self).api_url(context)
def link(self, view_name, rootsite='mainsite'):
"""See `ObjectFormatterAPI`.
The default URL for a team is to the mainsite. None is returned
when the user does not have permission to review the team.
"""
person = self._context
if not check_permission('launchpad.LimitedView', person):
# This person has no permission to view the team details.
self._report_visibility_leak()
return structured(
'<span class="sprite team">%s</span>', self.hidden).escapedtext
return super(TeamFormatterAPI, self).link(view_name, rootsite)
def icon(self, view_name):
team = self._context
if not check_permission('launchpad.LimitedView', team):
css_class = ObjectImageDisplayAPI(team).sprite_css()
return '<span class="' + css_class + '"></span>'
else:
return super(TeamFormatterAPI, self).icon(view_name)
def displayname(self, view_name, rootsite=None):
"""See `PersonFormatterAPI`."""
person = self._context
if not check_permission('launchpad.LimitedView', person):
# This person has no permission to view the team details.
self._report_visibility_leak()
return self.hidden
return super(TeamFormatterAPI, self).displayname(view_name, rootsite)
def unique_displayname(self, view_name):
"""See `PersonFormatterAPI`."""
person = self._context
if not check_permission('launchpad.LimitedView', person):
# This person has no permission to view the team details.
self._report_visibility_leak()
return self.hidden
return super(TeamFormatterAPI, self).unique_displayname(view_name)
def _report_visibility_leak(self):
request = get_current_browser_request()
try:
raise MixedVisibilityError()
except MixedVisibilityError:
getUtility(IErrorReportingUtility).raising(
sys.exc_info(), request)
class CustomizableFormatter(ObjectFormatterAPI):
"""A ObjectFormatterAPI that is easy to customize.
This provides fmt:url and fmt:link support for the object it
adapts.
For most object types, only the _link_summary_template class
variable and _link_summary_values method need to be overridden.
This assumes that:
1. canonical_url produces appropriate urls for this type,
2. the launchpad.View permission alone is required to view this
object's url, and,
3. if there is an icon for this object type, image:icon is
implemented and appropriate.
For greater control over the summary, overrride
_make_link_summary.
If a different permission is required, override _link_permission.
"""
_link_permission = 'launchpad.View'
def _link_summary_values(self):
"""Return a dict of values to use for template substitution.
These values should not be escaped, as this will be performed later.
For this reason, only string values should be supplied.
"""
raise NotImplementedError(self._link_summary_values)
def _make_link_summary(self):
"""Create a summary from _template and _link_summary_values().
This summary is for use in fmt:link, which is meant to be used in
contexts like lists of items.
"""
values = dict(
(k, v if v is not None else '')
for k, v in self._link_summary_values().iteritems())
return structured(self._link_summary_template, **values).escapedtext
def _title_values(self):
"""Return a dict of values to use for template substitution.
These values should not be escaped, as this will be performed later.
For this reason, only string values should be supplied.
"""
return {}
def _make_title(self):
"""Create a title from _title_template and _title_values().
This title is for use in fmt:link, which is meant to be used in
contexts like lists of items.
"""
title_template = getattr(self, '_title_template', None)
if title_template is None:
return None
values = dict(
(k, v if v is not None else '')
for k, v in self._title_values().iteritems())
return structured(title_template, **values).escapedtext
def sprite_css(self):
"""Retrieve the icon for the _context, if any.
:return: The icon css or None if no icon is available.
"""
return queryAdapter(self._context, IPathAdapter, 'image').sprite_css()
def link(self, view_name, rootsite=None):
"""Return html including a link, description and icon.
Icon and link are optional, depending on type and permissions.
Uses self._make_link_summary for the summary, self._get_icon
for the icon, self._should_link to determine whether to link, and
self.url() to generate the url.
"""
sprite = self.sprite_css()
if sprite is None:
css = ''
else:
css = ' class="' + sprite + '"'
summary = self._make_link_summary()
title = self._make_title()
if title is None:
title = ''
else:
title = ' title="%s"' % title
if check_permission(self._link_permission, self._context):
url = self.url(view_name, rootsite)
else:
url = ''
if url:
return '<a href="%s"%s%s>%s</a>' % (url, css, title, summary)
else:
return summary
class PillarFormatterAPI(CustomizableFormatter):
"""Adapter for IProduct, IDistribution and IProjectGroup objects to a
formatted string."""
_link_summary_template = '%(displayname)s'
_link_permission = 'zope.Public'
traversable_names = {
'api_url': 'api_url',
'link': 'link',
'url': 'url',
'link_with_displayname': 'link_with_displayname'
}
def _link_summary_values(self):
displayname = self._context.displayname
return {'displayname': displayname}
def url(self, view_name=None, rootsite=None):
"""See `ObjectFormatterAPI`.
The default URL for a pillar is to the mainsite.
"""
return super(PillarFormatterAPI, self).url(view_name, rootsite)
def _getLinkHTML(self, view_name, rootsite,
template, custom_icon_template):
"""Generates html, mapping a link context to given templates.
The html is generated using given `template` or `custom_icon_template`
based on the presence of a custom icon for Products/ProjectGroups.
Named string substitution is used to render the final html
(see below for a list of allowed keys).
The link context is a dict containing info about current
Products or ProjectGroups.
Keys are `url`, `name`, `displayname`, `custom_icon` (if present),
`css_class` (if a custom icon does not exist),
'summary' (see CustomizableFormatter._make_link_summary()).
"""
context = self._context
# XXX wgrant: the structured() in this dict is evil; refactor.
mapping = {
'url': self.url(view_name, rootsite),
'name': context.name,
'displayname': context.displayname,
'summary': structured(self._make_link_summary()),
}
custom_icon = ObjectImageDisplayAPI(context).custom_icon_url()
if custom_icon is None:
mapping['css_class'] = ObjectImageDisplayAPI(context).sprite_css()
return structured(template, **mapping).escapedtext
mapping['custom_icon'] = custom_icon
return structured(custom_icon_template, **mapping).escapedtext
def link(self, view_name, rootsite='mainsite'):
"""The html to show a link to a Product, ProjectGroup or distribution.
In the case of Products or ProjectGroups we display the custom
icon, if one exists. The default URL for a pillar is to the mainsite.
"""
super(PillarFormatterAPI, self).link(view_name)
template = u'<a href="%(url)s" class="%(css_class)s">%(summary)s</a>'
custom_icon_template = (
u'<a href="%(url)s" class="bg-image" '
u'style="background-image: url(%(custom_icon)s)">%(summary)s</a>'
)
return self._getLinkHTML(
view_name, rootsite, template, custom_icon_template)
def link_with_displayname(self, view_name, rootsite='mainsite'):
"""The html to show a link to a Product, ProjectGroup or
distribution, including displayname and name.
In the case of Products or ProjectGroups we display the custom
icon, if one exists. The default URL for a pillar is to the mainsite.
"""
super(PillarFormatterAPI, self).link(view_name)
template = (
u'<a href="%(url)s" class="%(css_class)s">%(displayname)s</a>'
u' (<a href="%(url)s">%(name)s</a>)'
)
custom_icon_template = (
u'<a href="%(url)s" class="bg-image" '
u'style="background-image: url(%(custom_icon)s)">'
u'%(displayname)s</a> (<a href="%(url)s">%(name)s</a>)'
)
return self._getLinkHTML(
view_name, rootsite, template, custom_icon_template)
class DistroSeriesFormatterAPI(CustomizableFormatter):
"""Adapter for IDistroSeries objects to a formatted string."""
_link_summary_template = '%(displayname)s'
_link_permission = 'zope.Public'
def _link_summary_values(self):
displayname = self._context.displayname
return {'displayname': displayname}
class SourcePackageReleaseFormatterAPI(CustomizableFormatter):
"""Adapter for ISourcePackageRelease objects to a formatted string."""
_link_summary_template = '%(sourcepackage)s %(version)s'
def _link_summary_values(self):
return {'sourcepackage':
self._context.distrosourcepackage.displayname,
'version': self._context.version}
class ProductReleaseFileFormatterAPI(ObjectFormatterAPI):
"""Adapter for `IProductReleaseFile` objects to a formatted string."""
traversable_names = {'link': 'link', 'url': 'url'}
def link(self, view_name):
"""A hyperlinked ProductReleaseFile.
This consists of a download icon, the link to the ProductReleaseFile
itself (with a tooltip stating its size) and links to that file's
signature and MD5 hash.
"""
file_ = self._context
file_size = NumberFormatterAPI(
file_.libraryfile.content.filesize).bytes()
if file_.description is not None:
description = file_.description
else:
description = file_.libraryfile.filename
link_title = "%s (%s)" % (description, file_size)
download_url = self._getDownloadURL(file_.libraryfile)
md5_url = urlappend(download_url, '+md5')
replacements = dict(
url=download_url, filename=file_.libraryfile.filename,
md5_url=md5_url, link_title=link_title)
html = (
'<img alt="download icon" src="/@@/download" />'
'<strong>'
' <a title="%(link_title)s" href="%(url)s">%(filename)s</a> '
'</strong>'
'(<a href="%(md5_url)s">md5</a>')
if file_.signature is not None:
html += ', <a href="%(signature_url)s">sig</a>)'
replacements['signature_url'] = self._getDownloadURL(
file_.signature)
else:
html += ')'
return structured(html, **replacements).escapedtext
def url(self, view_name=None, rootsite=None):
"""Return the URL to download the file."""
return self._getDownloadURL(self._context.libraryfile)
@property
def _release(self):
return self._context.productrelease
def _getDownloadURL(self, lfa):
"""Return the download URL for the given `LibraryFileAlias`."""
url = urlappend(canonical_url(self._release), '+download')
# Quote the filename to eliminate non-ascii characters which
# are invalid in the url.
return urlappend(url, urllib.quote(lfa.filename.encode('utf-8')))
class BranchFormatterAPI(ObjectFormatterAPI):
"""Adapter for IBranch objects to a formatted string."""
traversable_names = {
'link': 'link', 'url': 'url',
'title-link': 'titleLink', 'bzr-link': 'bzrLink',
'api_url': 'api_url'}
def _args(self, view_name):
"""Generate a dict of attributes for string template expansion."""
branch = self._context
return {
'bzr_identity': branch.bzr_identity,
'display_name': branch.displayname,
'name': branch.name,
'unique_name': branch.unique_name,
'url': self.url(view_name),
}
def link(self, view_name):
"""A hyperlinked branch icon with the displayname."""
return structured(
'<a href="%(url)s" class="sprite branch">'
'%(display_name)s</a>', **self._args(view_name)).escapedtext
def bzrLink(self, view_name):
"""A hyperlinked branch icon with the bazaar identity."""
# Defer to link.
return self.link(view_name)
def titleLink(self, view_name):
"""A hyperlinked branch name with following title."""
return structured(
'<a href="%(url)s" title="%(display_name)s">'
'%(name)s</a>: %(title)s', **self._args(view_name)).escapedtext
class BranchSubscriptionFormatterAPI(CustomizableFormatter):
"""Adapter for IBranchSubscription objects to a formatted string."""
_link_summary_template = _('Subscription of %(person)s to %(branch)s')
def _link_summary_values(self):
"""Provide values for template substitution"""
return {
'person': self._context.person.displayname,
'branch': self._context.branch.displayname,
}
class BranchMergeProposalFormatterAPI(CustomizableFormatter):
_link_summary_template = _('%(title)s')
def _link_summary_values(self):
return {
'title': self._context.title,
}
class BugBranchFormatterAPI(CustomizableFormatter):
"""Adapter providing fmt support for BugBranch objects"""
def _get_task_formatter(self):
task = self._context.bug.getBugTask(self._context.branch.product)
if task is None:
task = self._context.bug.bugtasks[0]
return BugTaskFormatterAPI(task)
def _make_link_summary(self):
"""Return the summary of the related product's bug task"""
return self._get_task_formatter()._make_link_summary()
def _get_icon(self):
"""Return the icon of the related product's bugtask"""
return self._get_task_formatter()._get_icon()
class BugFormatterAPI(CustomizableFormatter):
"""Adapter for IBug objects to a formatted string."""
_link_summary_template = 'Bug #%(id)s: %(title)s'
def _link_summary_values(self):
"""See CustomizableFormatter._link_summary_values."""
return {'id': str(self._context.id), 'title': self._context.title}
class BugTaskFormatterAPI(CustomizableFormatter):
"""Adapter for IBugTask objects to a formatted string."""
_title_template = '%(importance)s - %(status)s'
def _title_values(self):
return {'importance': self._context.importance.title,
'status': self._context.status.title}
def _make_link_summary(self):
return BugFormatterAPI(self._context.bug)._make_link_summary()
class CodeImportFormatterAPI(CustomizableFormatter):
"""Adapter providing fmt support for CodeImport objects"""
_link_summary_template = _('Import of %(target)s: %(branch)s')
_link_permission = 'zope.Public'
def _link_summary_values(self):
"""See CustomizableFormatter._link_summary_values."""
return {'target': self._context.branch.target.displayname,
'branch': self._context.branch.bzr_identity,
}
def url(self, view_name=None, rootsite=None):
"""See `ObjectFormatterAPI`."""
# The url of a code import is the associated branch.
# This is still here primarily for supporting branch deletion,
# which does a fmt:link of the other entities that will be deleted.
url = canonical_url(
self._context.branch, path_only_if_possible=True,
view_name=view_name)
return url
class PackageBuildFormatterAPI(ObjectFormatterAPI):
"""Adapter providing fmt support for `IPackageBuild` objects."""
def _composeArchiveReference(self, archive):
if archive.is_ppa:
return " [%s/%s]" % (archive.owner.name, archive.name)
else:
return ""
def link(self, view_name, rootsite=None):
build = self._context
if (not check_permission('launchpad.View', build) or
not check_permission('launchpad.View', build.archive.owner)):
return 'private job'
url = self.url(view_name=view_name, rootsite=rootsite)
archive = self._composeArchiveReference(build.archive)
return structured(
'<a href="%s">%s</a>%s', url, build.title, archive).escapedtext
class CodeImportMachineFormatterAPI(CustomizableFormatter):
"""Adapter providing fmt support for CodeImport objects"""
_link_summary_template = _('%(hostname)s')
_link_permission = 'zope.Public'
def _link_summary_values(self):
"""See CustomizableFormatter._link_summary_values."""
return {'hostname': self._context.hostname}
class MilestoneFormatterAPI(CustomizableFormatter):
"""Adapter providing fmt support for Milestone objects."""
_link_summary_template = _('%(title)s')
_link_permission = 'zope.Public'
def _link_summary_values(self):
"""See CustomizableFormatter._link_summary_values."""
return {'title': self._context.title}
class ProductReleaseFormatterAPI(CustomizableFormatter):
"""Adapter providing fmt support for Milestone objects."""
_link_summary_template = _('%(displayname)s %(code_name)s')
_link_permission = 'zope.Public'
def _link_summary_values(self):
"""See CustomizableFormatter._link_summary_values."""
code_name = self._context.milestone.code_name
if code_name is None or code_name.strip() == '':
code_name = ''
else:
code_name = '(%s)' % code_name.strip()
return dict(displayname=self._context.milestone.displayname,
code_name=code_name)
class ProductSeriesFormatterAPI(CustomizableFormatter):
"""Adapter providing fmt support for ProductSeries objects"""
_link_summary_template = _('%(product)s %(series)s series')
def _link_summary_values(self):
"""See CustomizableFormatter._link_summary_values."""
return {'series': self._context.name,
'product': self._context.product.displayname}
class QuestionFormatterAPI(CustomizableFormatter):
"""Adapter providing fmt support for question objects."""
_link_summary_template = _('%(id)s: %(title)s')
_link_permission = 'zope.Public'
def _link_summary_values(self):
"""See CustomizableFormatter._link_summary_values."""
return {'id': str(self._context.id), 'title': self._context.title}
class SourcePackageRecipeFormatterAPI(CustomizableFormatter):
"""Adapter providing fmt support for ISourcePackageRecipe objects."""
_link_summary_template = 'Recipe %(name)s for %(owner)s'
def _link_summary_values(self):
return {'name': self._context.name,
'owner': self._context.owner.displayname}
class SpecificationFormatterAPI(CustomizableFormatter):
"""Adapter providing fmt support for Specification objects"""
_link_summary_template = _('%(title)s')
_link_permission = 'zope.Public'
def _link_summary_values(self):
"""See CustomizableFormatter._link_summary_values."""
return {'title': self._context.title}
class CodeReviewCommentFormatterAPI(CustomizableFormatter):
"""Adapter providing fmt support for CodeReviewComment objects"""
_link_summary_template = _('Comment by %(author)s')
_link_permission = 'zope.Public'
def _link_summary_values(self):
"""See CustomizableFormatter._link_summary_values."""
return {'author': self._context.message.owner.displayname}
class ArchiveFormatterAPI(CustomizableFormatter):
"""Adapter providing fmt support for `IArchive` objects."""
_link_summary_template = '%(display_name)s'
_link_permission = 'launchpad.View'
_reference_permission = 'launchpad.SubscriberView'
_reference_template = "ppa:%(owner_name)s/%(ppa_name)s"
final_traversable_names = {'reference': 'reference'}
final_traversable_names.update(
CustomizableFormatter.final_traversable_names)
def _link_summary_values(self):
"""See CustomizableFormatter._link_summary_values."""
return {'display_name': self._context.displayname}
def link(self, view_name):
"""Return html including a link for the context archive.
Render a link using CSS sprites for users with permission to view
the archive.
Disabled PPAs are listed with sprites but not linkified.
Inaccessible private PPAs are not rendered at all (empty string
is returned).
"""
summary = self._make_link_summary()
css = self.sprite_css()
if check_permission(self._link_permission, self._context):
if self._context.is_main:
url = queryAdapter(
self._context.distribution, IPathAdapter, 'fmt').url(
view_name)
else:
url = self.url(view_name)
return '<a href="%s" class="%s">%s</a>' % (url, css, summary)
else:
if not self._context.private:
return '<span class="%s">%s</span>' % (css, summary)
else:
return ''
def reference(self, view_name=None, rootsite=None):
"""Return the text PPA reference for a PPA."""
if not IPPA.providedBy(self._context):
raise NotImplementedError(
"No reference implementation for non-PPA archive %r." %
self._context)
if not check_permission(self._reference_permission, self._context):
return ''
return self._reference_template % {
'owner_name': self._context.owner.name,
'ppa_name': self._context.name}
class SpecificationBranchFormatterAPI(CustomizableFormatter):
"""Adapter for ISpecificationBranch objects to a formatted string."""
def _make_link_summary(self):
"""Provide the summary of the linked spec"""
formatter = SpecificationFormatterAPI(self._context.specification)
return formatter._make_link_summary()
def _get_icon(self):
"""Provide the icon of the linked spec"""
formatter = SpecificationFormatterAPI(self._context.specification)
return formatter._get_icon()
def sprite_css(self):
return queryAdapter(
self._context.specification, IPathAdapter, 'image').sprite_css()
class BugTrackerFormatterAPI(ObjectFormatterAPI):
"""Adapter for `IBugTracker` objects to a formatted string."""
final_traversable_names = {
'aliases': 'aliases',
'external-link': 'external_link',
'external-title-link': 'external_title_link'}
final_traversable_names.update(ObjectFormatterAPI.final_traversable_names)
def link(self, view_name):
"""Return an HTML link to the bugtracker page.
If the user is not logged-in, the title of the bug tracker is
modified to obfuscate all email addresses.
"""
url = self.url(view_name)
title = self._context.title
if getUtility(ILaunchBag).user is None:
title = FormattersAPI(title).obfuscate_email()
return structured('<a href="%s">%s</a>', url, title).escapedtext
def external_link(self):
"""Return an HTML link to the external bugtracker.
If the user is not logged-in, and the URL of the bugtracker
contains an email address, this returns the obfuscated URL as
text (i.e. no <a/> link).
"""
url = self._context.baseurl
if url.startswith('mailto:') and getUtility(ILaunchBag).user is None:
return html_escape(u'mailto:<email address hidden>')
else:
return structured(
'<a class="link-external" href="%(url)s">%(url)s</a>',
url=url).escapedtext
def external_title_link(self):
"""Return an HTML link to the external bugtracker.
If the user is not logged-in, the title of the bug tracker is
modified to obfuscate all email addresses. Additonally, if the
URL is a mailto: address then no link is returned, just the
title text.
"""
url = self._context.baseurl
title = self._context.title
if getUtility(ILaunchBag).user is None:
title = FormattersAPI(title).obfuscate_email()
if url.startswith('mailto:'):
return html_escape(title)
return structured(
'<a class="link-external" href="%s">%s</a>',
url, title).escapedtext
def aliases(self):
"""Generate alias URLs, obfuscating where necessary.
If the user is not logged-in, email addresses should be
hidden.
"""
anonymous = getUtility(ILaunchBag).user is None
for alias in self._context.aliases:
if anonymous and alias.startswith('mailto:'):
yield u'mailto:<email address hidden>'
else:
yield alias
class BugWatchFormatterAPI(ObjectFormatterAPI):
"""Adapter for `IBugWatch` objects to a formatted string."""
final_traversable_names = {
'external-link': 'external_link',
'external-link-short': 'external_link_short'}
final_traversable_names.update(ObjectFormatterAPI.final_traversable_names)
def _make_external_link(self, summary=None):
"""Return an external HTML link to the target of the bug watch.
If a summary is not specified or empty, an em-dash is used as
the content of the link.
If the user is not logged in and the URL of the bug watch is
an email address, only the summary is returned (i.e. no link).
"""
if summary is None or len(summary) == 0:
summary = structured(u'—')
url = self._context.url
if url.startswith('mailto:') and getUtility(ILaunchBag).user is None:
return html_escape(summary)
else:
return structured(
'<a class="link-external" href="%s">%s</a>',
url, summary).escapedtext
def external_link(self):
"""Return an HTML link with a detailed link text.
The link text is formed from the bug tracker name and the
remote bug number.
"""
summary = self._context.bugtracker.name
remotebug = self._context.remotebug
if remotebug is not None and len(remotebug) > 0:
summary = u'%s #%s' % (summary, remotebug)
return self._make_external_link(summary)
def external_link_short(self):
"""Return an HTML link with a short link text.
The link text is formed from the bug tracker name and the
remote bug number.
"""
return self._make_external_link(self._context.remotebug)
class NumberFormatterAPI:
"""Adapter for converting numbers to formatted strings."""
implements(ITraversable)
def __init__(self, number):
self._number = number
def traverse(self, name, furtherPath):
if name == 'float':
if len(furtherPath) != 1:
raise TraversalError(
"fmt:float requires a single decimal argument")
# coerce the argument to float to ensure it's safe
format = furtherPath.pop()
return self.float(float(format))
elif name == 'bytes':
return self.bytes()
elif name == 'intcomma':
return self.intcomma()
else:
raise TraversalError(name)
def intcomma(self):
"""Return this number with its thousands separated by comma.
This can only be used for integers.
"""
if not isinstance(self._number, int):
raise AssertionError("This can't be used with non-integers")
L = []
for index, char in enumerate(reversed(str(self._number))):
if index != 0 and (index % 3) == 0:
L.insert(0, ',')
L.insert(0, char)
return ''.join(L)
def bytes(self):
"""Render number as byte contractions according to IEC60027-2."""
# See http://en.wikipedia.org/wiki
# /Binary_prefixes#Specific_units_of_IEC_60027-2_A.2
assert not float(self._number) < 0, "Expected a non-negative number."
n = int(self._number)
if n == 1:
# Handle the singular case.
return "1 byte"
if n == 0:
# To avoid math.log(0, X) blowing up.
return "0 bytes"
suffixes = ["KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"]
exponent = int(math.log(n, 1024))
exponent = min(len(suffixes), exponent)
if exponent < 1:
# If this is less than 1 KiB, no need for rounding.
return "%s bytes" % n
return "%.1f %s" % (n / 1024.0 ** exponent, suffixes[exponent - 1])
def float(self, format):
"""Use like tal:content="context/foo/fmt:float/.2".
Will return a string formatted to the specification provided in
the manner Python "%f" formatter works. See
http://docs.python.org/lib/typesseq-strings.html for details and
doc.displaying-numbers for various examples.
"""
value = "%" + str(format) + "f"
return value % float(self._number)
class DateTimeFormatterAPI:
"""Adapter from datetime objects to a formatted string."""
def __init__(self, datetimeobject):
self._datetime = datetimeobject
def time(self):
if self._datetime.tzinfo:
value = self._datetime.astimezone(
getUtility(ILaunchBag).time_zone)
return value.strftime('%T %Z')
else:
return self._datetime.strftime('%T')
def date(self):
value = self._datetime
if value.tzinfo:
value = value.astimezone(
getUtility(ILaunchBag).time_zone)
return value.strftime('%Y-%m-%d')
def _now(self):
# This method exists to be overridden in tests.
if self._datetime.tzinfo:
# datetime is offset-aware
return datetime.now(pytz.timezone('UTC'))
else:
# datetime is offset-naive
return datetime.utcnow()
def displaydate(self):
delta = abs(self._now() - self._datetime)
if delta > timedelta(1, 0, 0):
# far in the past or future, display the date
return 'on ' + self.date()
return self.approximatedate()
def approximatedate(self):
delta = self._now() - self._datetime
if abs(delta) > timedelta(1, 0, 0):
# far in the past or future, display the date
return self.date()
future = delta < timedelta(0, 0, 0)
delta = abs(delta)
days = delta.days
hours = delta.seconds / 3600
minutes = (delta.seconds - (3600 * hours)) / 60
seconds = delta.seconds % 60
result = ''
if future:
result += 'in '
if days != 0:
amount = days
unit = 'day'
elif hours != 0:
amount = hours
unit = 'hour'
elif minutes != 0:
amount = minutes
unit = 'minute'
else:
if seconds <= 10:
result += 'a moment'
if not future:
result += ' ago'
return result
else:
amount = seconds
unit = 'second'
if amount != 1:
unit += 's'
result += '%s %s' % (amount, unit)
if not future:
result += ' ago'
return result
def datetime(self):
return "%s %s" % (self.date(), self.time())
def rfc822utcdatetime(self):
return formatdate(
rfc822.mktime_tz(self._datetime.utctimetuple() + (0, )))
def isodate(self):
return self._datetime.isoformat()
@staticmethod
def _yearDelta(old, new):
"""Return the difference in years between two datetimes.
:param old: The old date
:param new: The new date
"""
year_delta = new.year - old.year
year_timedelta = datetime(new.year, 1, 1) - datetime(old.year, 1, 1)
if new - old < year_timedelta:
year_delta -= 1
return year_delta
def durationsince(self):
"""How long since the datetime, as a string."""
now = self._now()
number = self._yearDelta(self._datetime, now)
unit = 'year'
if number < 1:
delta = now - self._datetime
if delta.days > 0:
number = delta.days
unit = 'day'
else:
number = delta.seconds / 60
if number == 0:
return 'less than a minute'
unit = 'minute'
if number >= 60:
number /= 60
unit = 'hour'
if number != 1:
unit += 's'
return '%d %s' % (number, unit)
class SeriesSourcePackageBranchFormatter(ObjectFormatterAPI):
"""Formatter for a SourcePackage, Pocket -> Branch link.
Since the link object is never really interesting in and of itself, we
always link to the source package instead.
"""
def url(self, view_name=None, rootsite=None):
return queryAdapter(
self._context.sourcepackage, IPathAdapter, 'fmt').url(
view_name, rootsite)
def link(self, view_name):
return queryAdapter(
self._context.sourcepackage, IPathAdapter, 'fmt').link(view_name)
class DurationFormatterAPI:
"""Adapter from timedelta objects to a formatted string."""
implements(ITraversable)
def __init__(self, duration):
self._duration = duration
def traverse(self, name, furtherPath):
if name == 'exactduration':
return self.exactduration()
elif name == 'approximateduration':
return self.approximateduration()
elif name == 'millisecondduration':
return self.millisecondduration()
else:
raise TraversalError(name)
def exactduration(self):
"""Format timedeltas as "v days, w hours, x minutes, y.z seconds"."""
parts = []
minutes, seconds = divmod(self._duration.seconds, 60)
hours, minutes = divmod(minutes, 60)
seconds = seconds + (float(self._duration.microseconds) / 10 ** 6)
if self._duration.days > 0:
if self._duration.days == 1:
parts.append('%d day' % self._duration.days)
else:
parts.append('%d days' % self._duration.days)
if parts or hours > 0:
if hours == 1:
parts.append('%d hour' % hours)
else:
parts.append('%d hours' % hours)
if parts or minutes > 0:
if minutes == 1:
parts.append('%d minute' % minutes)
else:
parts.append('%d minutes' % minutes)
if parts or seconds > 0:
parts.append('%0.1f seconds' % seconds)
return ', '.join(parts)
def approximateduration(self):
"""Return a nicely-formatted approximate duration.
E.g. '1 hour', '3 minutes', '1 hour 10 minutes' and so forth.
See https://launchpad.canonical.com/PresentingLengthsOfTime.
"""
# NOTE: There are quite a few "magic numbers" in this
# implementation; they are generally just figures pulled
# directly out of the PresentingLengthsOfTime spec, and so
# it's not particularly easy to give each and every number of
# a useful name. It's also unlikely that these numbers will be
# changed.
seconds = total_seconds(self._duration)
# First we'll try to calculate an approximate number of
# seconds up to a minute. We'll start by defining a sorted
# list of (boundary, display value) tuples. We want to show
# the display value corresponding to the lowest boundary that
# 'seconds' is less than, if one exists.
representation_in_seconds = [
(1.5, '1 second'),
(2.5, '2 seconds'),
(3.5, '3 seconds'),
(4.5, '4 seconds'),
(7.5, '5 seconds'),
(12.5, '10 seconds'),
(17.5, '15 seconds'),
(22.5, '20 seconds'),
(27.5, '25 seconds'),
(35, '30 seconds'),
(45, '40 seconds'),
(55, '50 seconds'),
(90, '1 minute'),
]
# Break representation_in_seconds into two pieces, to simplify
# finding the correct display value, through the use of the
# built-in bisect module.
second_boundaries, display_values = zip(*representation_in_seconds)
# Is seconds small enough that we can produce a representation
# in seconds (up to '1 minute'?)
if seconds < second_boundaries[-1]:
# Use the built-in bisection algorithm to locate the index
# of the item which "seconds" sorts after.
matching_element_index = bisect(second_boundaries, seconds)
# Return the corresponding display value.
return display_values[matching_element_index]
# Convert seconds into minutes, and round it.
minutes, remaining_seconds = divmod(seconds, 60)
minutes += remaining_seconds / 60.0
minutes = int(round(minutes))
if minutes <= 59:
return "%d minutes" % minutes
# Is the duration less than an hour and 5 minutes?
if seconds < (60 + 5) * 60:
return "1 hour"
# Next phase: try and calculate an approximate duration
# greater than one hour, but fewer than ten hours, to a 10
# minute granularity.
hours, remaining_seconds = divmod(seconds, 3600)
ten_minute_chunks = int(round(remaining_seconds / 600.0))
minutes = ten_minute_chunks * 10
hours += (minutes / 60)
minutes %= 60
if hours < 10:
if minutes:
# If there is a minutes portion to display, the number
# of hours is always shown as a digit.
if hours == 1:
return "1 hour %s minutes" % minutes
else:
return "%d hours %s minutes" % (hours, minutes)
else:
return "%d hours" % hours
# Is the duration less than ten and a half hours?
if seconds < (10.5 * 3600):
return '10 hours'
# Try to calculate the approximate number of hours, to a
# maximum of 47.
hours = int(round(seconds / 3600.0))
if hours <= 47:
return "%d hours" % hours
# Is the duration fewer than two and a half days?
if seconds < (2.5 * 24 * 3600):
return '2 days'
# Try to approximate to day granularity, up to a maximum of 13
# days.
days = int(round(seconds / (24 * 3600)))
if days <= 13:
return "%s days" % days
# Is the duration fewer than two and a half weeks?
if seconds < (2.5 * 7 * 24 * 3600):
return '2 weeks'
# If we've made it this far, we'll calculate the duration to a
# granularity of weeks, once and for all.
weeks = int(round(seconds / (7 * 24 * 3600.0)))
return "%d weeks" % weeks
def millisecondduration(self):
return '%sms' % (total_seconds(self._duration) * 1000,)
class LinkFormatterAPI(ObjectFormatterAPI):
"""Adapter from Link objects to a formatted anchor."""
final_traversable_names = {
'icon': 'icon',
'icon-link': 'link',
'link-icon': 'link',
}
final_traversable_names.update(ObjectFormatterAPI.final_traversable_names)
def icon(self):
"""Return the icon representation of the link."""
request = get_current_browser_request()
return getMultiAdapter(
(self._context, request), name="+inline-icon")()
def link(self, view_name=None, rootsite=None):
"""Return the default representation of the link."""
return self._context.render()
def url(self, view_name=None, rootsite=None):
"""Return the URL representation of the link."""
if self._context.enabled:
return self._context.url
else:
return u''
class RevisionAuthorFormatterAPI(ObjectFormatterAPI):
"""Adapter for `IRevisionAuthor` links."""
traversable_names = {'link': 'link'}
def link(self, view_name=None, rootsite='mainsite'):
"""See `ObjectFormatterAPI`."""
context = self._context
if context.person is not None:
return PersonFormatterAPI(self._context.person).link(
view_name, rootsite)
elif context.name_without_email:
return html_escape(context.name_without_email)
elif context.email and getUtility(ILaunchBag).user is not None:
return html_escape(context.email)
elif context.email:
return html_escape("<email address hidden>")
else:
# The RevisionAuthor name and email is None.
return ''
def clean_path_segments(request):
"""Returns list of path segments, excluding system-related segments."""
proto_host_port = request.getApplicationURL()
clean_url = request.getURL()
clean_path = clean_url[len(proto_host_port):]
clean_path_split = clean_path.split('/')
return clean_path_split
class PermissionRequiredQuery:
"""Check if the logged in user has a given permission on a given object.
Example usage::
tal:condition="person/required:launchpad.Edit"
"""
implements(ITraversable)
def __init__(self, context):
self.context = context
def traverse(self, name, furtherPath):
if len(furtherPath) > 0:
raise TraversalError(
"There should be no further path segments after "
"required:permission")
return check_permission(name, self.context)
class IMainTemplateFile(Interface):
path = TextLine(title=u'The absolute path to this main template.')
class LaunchpadLayerToMainTemplateAdapter:
adapts(LaunchpadLayer)
implements(IMainTemplateFile)
def __init__(self, context):
here = os.path.dirname(os.path.realpath(__file__))
self.path = os.path.join(here, '../templates/base-layout.pt')
class PageMacroDispatcher:
"""Selects a macro, while storing information about page layout.
view/macro:page
view/macro:page/main_side
view/macro:page/main_only
view/macro:page/searchless
view/macro:pagehas/applicationtabs
view/macro:pagehas/globalsearch
view/macro:pagehas/portlets
view/macro:pagehas/main
view/macro:pagetype
view/macro:is-page-contentless
view/macro:has-watermark
"""
implements(ITraversable)
def __init__(self, context):
# The context of this object is a view object.
self.context = context
@property
def base(self):
return ViewPageTemplateFile(
IMainTemplateFile(self.context.request).path)
def traverse(self, name, furtherPath):
if name == 'page':
if len(furtherPath) == 1:
pagetype = furtherPath.pop()
elif not furtherPath:
pagetype = 'default'
else:
raise TraversalError("Max one path segment after macro:page")
return self.page(pagetype)
elif name == 'pagehas':
if len(furtherPath) != 1:
raise TraversalError(
"Exactly one path segment after macro:haspage")
layoutelement = furtherPath.pop()
return self.haspage(layoutelement)
elif name == 'pagetype':
return self.pagetype()
elif name == 'is-page-contentless':
return self.isPageContentless()
elif name == 'has-watermark':
return self.hasWatermark()
else:
raise TraversalError(name)
def page(self, pagetype):
if pagetype not in self._pagetypes:
raise TraversalError('unknown pagetype: %s' % pagetype)
self.context.__pagetype__ = pagetype
return self.base.macros['master']
def haspage(self, layoutelement):
pagetype = getattr(self.context, '__pagetype__', None)
if pagetype is None:
pagetype = 'unset'
return self._pagetypes[pagetype][layoutelement]
def hasWatermark(self):
"""Does the page havethe watermark block.
The default value is True, but the view can provide has_watermark
to force the page not render the standard location information.
"""
return getattr(self.context, 'has_watermark', True)
def isPageContentless(self):
"""Should the template avoid rendering detailed information.
Circumstances such as not possessing launchpad.View on a private
context require the template to not render detailed information. The
user may only know identifying information about the context.
"""
view_context = self.context.context
privacy = IPrivacy(view_context, None)
if privacy is None or not privacy.private:
return False
return not (
check_permission('launchpad.SubscriberView', view_context) or
check_permission('launchpad.View', view_context))
def pagetype(self):
return getattr(self.context, '__pagetype__', 'unset')
class LayoutElements:
def __init__(self,
applicationtabs=False,
globalsearch=False,
portlets=False,
pagetypewasset=True,
):
self.elements = vars()
def __getitem__(self, name):
return self.elements[name]
_pagetypes = {
'main_side':
LayoutElements(
applicationtabs=True,
globalsearch=True,
portlets=True),
'main_only':
LayoutElements(
applicationtabs=True,
globalsearch=True,
portlets=False),
'searchless':
LayoutElements(
applicationtabs=True,
globalsearch=False,
portlets=False),
}
class TranslationGroupFormatterAPI(ObjectFormatterAPI):
"""Adapter for `ITranslationGroup` objects to a formatted string."""
traversable_names = {
'link': 'link',
'url': 'url',
'displayname': 'displayname',
}
def url(self, view_name=None, rootsite='translations'):
"""See `ObjectFormatterAPI`."""
return super(TranslationGroupFormatterAPI, self).url(
view_name, rootsite)
def link(self, view_name, rootsite='translations'):
"""See `ObjectFormatterAPI`."""
group = self._context
url = self.url(view_name, rootsite)
return structured('<a href="%s">%s</a>', url, group.title).escapedtext
def displayname(self, view_name, rootsite=None):
"""Return the displayname as a string."""
return self._context.title
class LanguageFormatterAPI(ObjectFormatterAPI):
"""Adapter for `ILanguage` objects to a formatted string."""
traversable_names = {
'link': 'link',
'url': 'url',
'displayname': 'displayname',
}
def url(self, view_name=None, rootsite='translations'):
"""See `ObjectFormatterAPI`."""
return super(LanguageFormatterAPI, self).url(view_name, rootsite)
def link(self, view_name, rootsite='translations'):
"""See `ObjectFormatterAPI`."""
url = self.url(view_name, rootsite)
return structured(
'<a href="%s" class="sprite language">%s</a>',
url, self._context.englishname).escapedtext
def displayname(self, view_name, rootsite=None):
"""See `ObjectFormatterAPI`."""
return self._context.englishname
class POFileFormatterAPI(ObjectFormatterAPI):
"""Adapter for `IPOFile` objects to a formatted string."""
traversable_names = {
'link': 'link',
'url': 'url',
'displayname': 'displayname',
}
def url(self, view_name=None, rootsite='translations'):
"""See `ObjectFormatterAPI`."""
return super(POFileFormatterAPI, self).url(view_name, rootsite)
def link(self, view_name, rootsite='translations'):
"""See `ObjectFormatterAPI`."""
pofile = self._context
url = self.url(view_name, rootsite)
return structured('<a href="%s">%s</a>', url, pofile.title).escapedtext
def displayname(self, view_name, rootsite=None):
"""Return the displayname as a string."""
return self._context.title
def download_link(url, description, file_size):
"""Return HTML for downloading an item."""
file_size = NumberFormatterAPI(file_size).bytes()
formatted = structured(
'<a href="%s">%s</a> (%s)', url, description, file_size)
return formatted.escapedtext
class PackageDiffFormatterAPI(ObjectFormatterAPI):
def link(self, view_name, rootsite=None):
diff = self._context
if not diff.date_fulfilled:
return structured('%s (pending)', diff.title).escapedtext
else:
return download_link(
diff.diff_content.http_url, diff.title,
diff.diff_content.content.filesize)
class CSSFormatter:
"""A tales path adapter used for CSS rules.
Using an expression like this:
value/css:select/visible/hidden
You will get "visible" if value evaluates to true, and "hidden" if the
value evaluates to false.
"""
implements(ITraversable)
def __init__(self, context):
self.context = context
def select(self, furtherPath):
if len(furtherPath) < 2:
raise TraversalError('select needs two subsequent path elements.')
true_value = furtherPath.pop()
false_value = furtherPath.pop()
if self.context:
return true_value
else:
return false_value
def traverse(self, name, furtherPath):
try:
return getattr(self, name)(furtherPath)
except AttributeError:
raise TraversalError(name)
class IRCNicknameFormatterAPI(ObjectFormatterAPI):
"""Adapter from IrcID objects to a formatted string."""
implements(ITraversable)
traversable_names = {
'displayname': 'displayname',
'formatted_displayname': 'formatted_displayname',
}
def displayname(self, view_name=None):
return "%s on %s" % (self._context.nickname, self._context.network)
def formatted_displayname(self, view_name=None):
return structured(
dedent("""\
<strong>%s</strong>
<span class="lesser"> on </span>
<strong>%s</strong>
"""),
self._context.nickname, self._context.network).escapedtext
|
kawamon/hue
|
refs/heads/master
|
desktop/core/ext-py/SQLAlchemy-1.3.17/test/ext/test_extendedattr.py
|
3
|
import sqlalchemy as sa
from sqlalchemy import event
from sqlalchemy import util
from sqlalchemy.ext import instrumentation
from sqlalchemy.orm import attributes
from sqlalchemy.orm import class_mapper
from sqlalchemy.orm import clear_mappers
from sqlalchemy.orm import events
from sqlalchemy.orm.attributes import del_attribute
from sqlalchemy.orm.attributes import get_attribute
from sqlalchemy.orm.attributes import set_attribute
from sqlalchemy.orm.instrumentation import is_instrumented
from sqlalchemy.orm.instrumentation import manager_of_class
from sqlalchemy.orm.instrumentation import register_class
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import ne_
from sqlalchemy.testing.util import decorator
@decorator
def modifies_instrumentation_finders(fn, *args, **kw):
pristine = instrumentation.instrumentation_finders[:]
try:
fn(*args, **kw)
finally:
del instrumentation.instrumentation_finders[:]
instrumentation.instrumentation_finders.extend(pristine)
class _ExtBase(object):
@classmethod
def teardown_class(cls):
instrumentation._reinstall_default_lookups()
class MyTypesManager(instrumentation.InstrumentationManager):
def instrument_attribute(self, class_, key, attr):
pass
def install_descriptor(self, class_, key, attr):
pass
def uninstall_descriptor(self, class_, key):
pass
def instrument_collection_class(self, class_, key, collection_class):
return MyListLike
def get_instance_dict(self, class_, instance):
return instance._goofy_dict
def initialize_instance_dict(self, class_, instance):
instance.__dict__["_goofy_dict"] = {}
def install_state(self, class_, instance, state):
instance.__dict__["_my_state"] = state
def state_getter(self, class_):
return lambda instance: instance.__dict__["_my_state"]
class MyListLike(list):
# add @appender, @remover decorators as needed
_sa_iterator = list.__iter__
_sa_linker = None
_sa_converter = None
def _sa_appender(self, item, _sa_initiator=None):
if _sa_initiator is not False:
self._sa_adapter.fire_append_event(item, _sa_initiator)
list.append(self, item)
append = _sa_appender
def _sa_remover(self, item, _sa_initiator=None):
self._sa_adapter.fire_pre_remove_event(_sa_initiator)
if _sa_initiator is not False:
self._sa_adapter.fire_remove_event(item, _sa_initiator)
list.remove(self, item)
remove = _sa_remover
MyBaseClass, MyClass = None, None
class UserDefinedExtensionTest(_ExtBase, fixtures.ORMTest):
@classmethod
def setup_class(cls):
global MyBaseClass, MyClass
class MyBaseClass(object):
__sa_instrumentation_manager__ = (
instrumentation.InstrumentationManager
)
class MyClass(object):
# This proves that a staticmethod will work here; don't
# flatten this back to a class assignment!
def __sa_instrumentation_manager__(cls):
return MyTypesManager(cls)
__sa_instrumentation_manager__ = staticmethod(
__sa_instrumentation_manager__
)
# This proves SA can handle a class with non-string dict keys
if not util.pypy and not util.jython:
locals()[42] = 99 # Don't remove this line!
def __init__(self, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
def __getattr__(self, key):
if is_instrumented(self, key):
return get_attribute(self, key)
else:
try:
return self._goofy_dict[key]
except KeyError:
raise AttributeError(key)
def __setattr__(self, key, value):
if is_instrumented(self, key):
set_attribute(self, key, value)
else:
self._goofy_dict[key] = value
def __hasattr__(self, key):
if is_instrumented(self, key):
return True
else:
return key in self._goofy_dict
def __delattr__(self, key):
if is_instrumented(self, key):
del_attribute(self, key)
else:
del self._goofy_dict[key]
def teardown(self):
clear_mappers()
def test_instance_dict(self):
class User(MyClass):
pass
register_class(User)
attributes.register_attribute(
User, "user_id", uselist=False, useobject=False
)
attributes.register_attribute(
User, "user_name", uselist=False, useobject=False
)
attributes.register_attribute(
User, "email_address", uselist=False, useobject=False
)
u = User()
u.user_id = 7
u.user_name = "john"
u.email_address = "lala@123.com"
eq_(
u.__dict__,
{
"_my_state": u._my_state,
"_goofy_dict": {
"user_id": 7,
"user_name": "john",
"email_address": "lala@123.com",
},
},
)
def test_basic(self):
for base in (object, MyBaseClass, MyClass):
class User(base):
pass
register_class(User)
attributes.register_attribute(
User, "user_id", uselist=False, useobject=False
)
attributes.register_attribute(
User, "user_name", uselist=False, useobject=False
)
attributes.register_attribute(
User, "email_address", uselist=False, useobject=False
)
u = User()
u.user_id = 7
u.user_name = "john"
u.email_address = "lala@123.com"
eq_(u.user_id, 7)
eq_(u.user_name, "john")
eq_(u.email_address, "lala@123.com")
attributes.instance_state(u)._commit_all(
attributes.instance_dict(u)
)
eq_(u.user_id, 7)
eq_(u.user_name, "john")
eq_(u.email_address, "lala@123.com")
u.user_name = "heythere"
u.email_address = "foo@bar.com"
eq_(u.user_id, 7)
eq_(u.user_name, "heythere")
eq_(u.email_address, "foo@bar.com")
def test_deferred(self):
for base in (object, MyBaseClass, MyClass):
class Foo(base):
pass
data = {"a": "this is a", "b": 12}
def loader(state, keys):
for k in keys:
state.dict[k] = data[k]
return attributes.ATTR_WAS_SET
manager = register_class(Foo)
manager.deferred_scalar_loader = loader
attributes.register_attribute(
Foo, "a", uselist=False, useobject=False
)
attributes.register_attribute(
Foo, "b", uselist=False, useobject=False
)
if base is object:
assert Foo not in (
instrumentation._instrumentation_factory._state_finders
)
else:
assert Foo in (
instrumentation._instrumentation_factory._state_finders
)
f = Foo()
attributes.instance_state(f)._expire(
attributes.instance_dict(f), set()
)
eq_(f.a, "this is a")
eq_(f.b, 12)
f.a = "this is some new a"
attributes.instance_state(f)._expire(
attributes.instance_dict(f), set()
)
eq_(f.a, "this is a")
eq_(f.b, 12)
attributes.instance_state(f)._expire(
attributes.instance_dict(f), set()
)
f.a = "this is another new a"
eq_(f.a, "this is another new a")
eq_(f.b, 12)
attributes.instance_state(f)._expire(
attributes.instance_dict(f), set()
)
eq_(f.a, "this is a")
eq_(f.b, 12)
del f.a
eq_(f.a, None)
eq_(f.b, 12)
attributes.instance_state(f)._commit_all(
attributes.instance_dict(f)
)
eq_(f.a, None)
eq_(f.b, 12)
def test_inheritance(self):
"""tests that attributes are polymorphic"""
for base in (object, MyBaseClass, MyClass):
class Foo(base):
pass
class Bar(Foo):
pass
register_class(Foo)
register_class(Bar)
def func1(state, passive):
return "this is the foo attr"
def func2(state, passive):
return "this is the bar attr"
def func3(state, passive):
return "this is the shared attr"
attributes.register_attribute(
Foo, "element", uselist=False, callable_=func1, useobject=True
)
attributes.register_attribute(
Foo, "element2", uselist=False, callable_=func3, useobject=True
)
attributes.register_attribute(
Bar, "element", uselist=False, callable_=func2, useobject=True
)
x = Foo()
y = Bar()
assert x.element == "this is the foo attr"
assert y.element == "this is the bar attr", y.element
assert x.element2 == "this is the shared attr"
assert y.element2 == "this is the shared attr"
def test_collection_with_backref(self):
for base in (object, MyBaseClass, MyClass):
class Post(base):
pass
class Blog(base):
pass
register_class(Post)
register_class(Blog)
attributes.register_attribute(
Post,
"blog",
uselist=False,
backref="posts",
trackparent=True,
useobject=True,
)
attributes.register_attribute(
Blog,
"posts",
uselist=True,
backref="blog",
trackparent=True,
useobject=True,
)
b = Blog()
(p1, p2, p3) = (Post(), Post(), Post())
b.posts.append(p1)
b.posts.append(p2)
b.posts.append(p3)
self.assert_(b.posts == [p1, p2, p3])
self.assert_(p2.blog is b)
p3.blog = None
self.assert_(b.posts == [p1, p2])
p4 = Post()
p4.blog = b
self.assert_(b.posts == [p1, p2, p4])
p4.blog = b
p4.blog = b
self.assert_(b.posts == [p1, p2, p4])
# assert no failure removing None
p5 = Post()
p5.blog = None
del p5.blog
def test_history(self):
for base in (object, MyBaseClass, MyClass):
class Foo(base):
pass
class Bar(base):
pass
register_class(Foo)
register_class(Bar)
attributes.register_attribute(
Foo, "name", uselist=False, useobject=False
)
attributes.register_attribute(
Foo, "bars", uselist=True, trackparent=True, useobject=True
)
attributes.register_attribute(
Bar, "name", uselist=False, useobject=False
)
f1 = Foo()
f1.name = "f1"
eq_(
attributes.get_state_history(
attributes.instance_state(f1), "name"
),
(["f1"], (), ()),
)
b1 = Bar()
b1.name = "b1"
f1.bars.append(b1)
eq_(
attributes.get_state_history(
attributes.instance_state(f1), "bars"
),
([b1], [], []),
)
attributes.instance_state(f1)._commit_all(
attributes.instance_dict(f1)
)
attributes.instance_state(b1)._commit_all(
attributes.instance_dict(b1)
)
eq_(
attributes.get_state_history(
attributes.instance_state(f1), "name"
),
((), ["f1"], ()),
)
eq_(
attributes.get_state_history(
attributes.instance_state(f1), "bars"
),
((), [b1], ()),
)
f1.name = "f1mod"
b2 = Bar()
b2.name = "b2"
f1.bars.append(b2)
eq_(
attributes.get_state_history(
attributes.instance_state(f1), "name"
),
(["f1mod"], (), ["f1"]),
)
eq_(
attributes.get_state_history(
attributes.instance_state(f1), "bars"
),
([b2], [b1], []),
)
f1.bars.remove(b1)
eq_(
attributes.get_state_history(
attributes.instance_state(f1), "bars"
),
([b2], [], [b1]),
)
def test_null_instrumentation(self):
class Foo(MyBaseClass):
pass
register_class(Foo)
attributes.register_attribute(
Foo, "name", uselist=False, useobject=False
)
attributes.register_attribute(
Foo, "bars", uselist=True, trackparent=True, useobject=True
)
assert Foo.name == attributes.manager_of_class(Foo)["name"]
assert Foo.bars == attributes.manager_of_class(Foo)["bars"]
def test_alternate_finders(self):
"""Ensure the generic finder front-end deals with edge cases."""
class Unknown(object):
pass
class Known(MyBaseClass):
pass
register_class(Known)
k, u = Known(), Unknown()
assert instrumentation.manager_of_class(Unknown) is None
assert instrumentation.manager_of_class(Known) is not None
assert instrumentation.manager_of_class(None) is None
assert attributes.instance_state(k) is not None
assert_raises((AttributeError, KeyError), attributes.instance_state, u)
assert_raises(
(AttributeError, KeyError), attributes.instance_state, None
)
def test_unmapped_not_type_error(self):
"""extension version of the same test in test_mapper.
fixes #3408
"""
assert_raises_message(
sa.exc.ArgumentError,
"Class object expected, got '5'.",
class_mapper,
5,
)
def test_unmapped_not_type_error_iter_ok(self):
"""extension version of the same test in test_mapper.
fixes #3408
"""
assert_raises_message(
sa.exc.ArgumentError,
r"Class object expected, got '\(5, 6\)'.",
class_mapper,
(5, 6),
)
class FinderTest(_ExtBase, fixtures.ORMTest):
def test_standard(self):
class A(object):
pass
register_class(A)
eq_(type(manager_of_class(A)), instrumentation.ClassManager)
def test_nativeext_interfaceexact(self):
class A(object):
__sa_instrumentation_manager__ = (
instrumentation.InstrumentationManager
)
register_class(A)
ne_(type(manager_of_class(A)), instrumentation.ClassManager)
def test_nativeext_submanager(self):
class Mine(instrumentation.ClassManager):
pass
class A(object):
__sa_instrumentation_manager__ = Mine
register_class(A)
eq_(type(manager_of_class(A)), Mine)
@modifies_instrumentation_finders
def test_customfinder_greedy(self):
class Mine(instrumentation.ClassManager):
pass
class A(object):
pass
def find(cls):
return Mine
instrumentation.instrumentation_finders.insert(0, find)
register_class(A)
eq_(type(manager_of_class(A)), Mine)
@modifies_instrumentation_finders
def test_customfinder_pass(self):
class A(object):
pass
def find(cls):
return None
instrumentation.instrumentation_finders.insert(0, find)
register_class(A)
eq_(type(manager_of_class(A)), instrumentation.ClassManager)
class InstrumentationCollisionTest(_ExtBase, fixtures.ORMTest):
def test_none(self):
class A(object):
pass
register_class(A)
def mgr_factory(cls):
return instrumentation.ClassManager(cls)
class B(object):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
register_class(B)
class C(object):
__sa_instrumentation_manager__ = instrumentation.ClassManager
register_class(C)
def test_single_down(self):
class A(object):
pass
register_class(A)
def mgr_factory(cls):
return instrumentation.ClassManager(cls)
class B(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
assert_raises_message(
TypeError,
"multiple instrumentation implementations",
register_class,
B,
)
def test_single_up(self):
class A(object):
pass
# delay registration
def mgr_factory(cls):
return instrumentation.ClassManager(cls)
class B(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
register_class(B)
assert_raises_message(
TypeError,
"multiple instrumentation implementations",
register_class,
A,
)
def test_diamond_b1(self):
def mgr_factory(cls):
return instrumentation.ClassManager(cls)
class A(object):
pass
class B1(A):
pass
class B2(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
class C(object):
pass
assert_raises_message(
TypeError,
"multiple instrumentation implementations",
register_class,
B1,
)
def test_diamond_b2(self):
def mgr_factory(cls):
return instrumentation.ClassManager(cls)
class A(object):
pass
class B1(A):
pass
class B2(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
class C(object):
pass
register_class(B2)
assert_raises_message(
TypeError,
"multiple instrumentation implementations",
register_class,
B1,
)
def test_diamond_c_b(self):
def mgr_factory(cls):
return instrumentation.ClassManager(cls)
class A(object):
pass
class B1(A):
pass
class B2(A):
__sa_instrumentation_manager__ = staticmethod(mgr_factory)
class C(object):
pass
register_class(C)
assert_raises_message(
TypeError,
"multiple instrumentation implementations",
register_class,
B1,
)
class ExtendedEventsTest(_ExtBase, fixtures.ORMTest):
"""Allow custom Events implementations."""
@modifies_instrumentation_finders
def test_subclassed(self):
class MyEvents(events.InstanceEvents):
pass
class MyClassManager(instrumentation.ClassManager):
dispatch = event.dispatcher(MyEvents)
instrumentation.instrumentation_finders.insert(
0, lambda cls: MyClassManager
)
class A(object):
pass
register_class(A)
manager = instrumentation.manager_of_class(A)
assert issubclass(manager.dispatch._events, MyEvents)
|
datamade/yournextmp-popit
|
refs/heads/master
|
cached_counts/admin.py
|
13848
|
from django.contrib import admin
# Register your models here.
|
bckwltn/SickRage
|
refs/heads/master
|
lib/github/StatsCommitActivity.py
|
74
|
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
class StatsCommitActivity(github.GithubObject.NonCompletableGithubObject):
"""
This class represents statistics of commit activity. The reference can be found here http://developer.github.com/v3/repos/statistics/#get-the-last-year-of-commit-activity-data
"""
@property
def week(self):
"""
:type: datetime.datetime
"""
return self._week.value
@property
def total(self):
"""
:type: int
"""
return self._total.value
@property
def days(self):
"""
:type: list of int
"""
return self._days.value
def _initAttributes(self):
self._week = github.GithubObject.NotSet
self._total = github.GithubObject.NotSet
self._days = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "week" in attributes: # pragma no branch
self._week = self._makeTimestampAttribute(attributes["week"])
if "total" in attributes: # pragma no branch
self._total = self._makeIntAttribute(attributes["total"])
if "days" in attributes: # pragma no branch
self._days = self._makeListOfIntsAttribute(attributes["days"])
|
erinn/ansible
|
refs/heads/devel
|
v1/ansible/runner/action_plugins/debug.py
|
119
|
# Copyright 2012, Dag Wieers <dag@wieers.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import ansible
from ansible import utils
from ansible.utils import template
from ansible.runner.return_data import ReturnData
class ActionModule(object):
''' Print statements during execution '''
TRANSFERS_FILES = False
def __init__(self, runner):
self.runner = runner
self.basedir = runner.basedir
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
args = {}
if complex_args:
args.update(complex_args)
# attempt to prevent confusing messages when the variable didn't interpolate
module_args = module_args.replace("{{ ","{{").replace(" }}","}}")
kv = utils.parse_kv(module_args)
args.update(kv)
if not 'msg' in args and not 'var' in args:
args['msg'] = 'Hello world!'
result = {}
if 'msg' in args:
if 'fail' in args and utils.boolean(args['fail']):
result = dict(failed=True, msg=args['msg'])
else:
result = dict(msg=args['msg'])
elif 'var' in args and not utils.LOOKUP_REGEX.search(args['var']):
results = template.template(self.basedir, args['var'], inject, convert_bare=True)
result['var'] = { args['var']: results }
# force flag to make debug output module always verbose
result['verbose_always'] = True
return ReturnData(conn=conn, result=result)
|
yod/google-appengine-wx-launcher-gnome
|
refs/heads/master
|
launcher/taskcontroller.py
|
28
|
#!/usr/bin/env python
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import os
import subprocess
import webbrowser
import wx
import launcher
# TODO: rename this file task_controller.py in a big renameathon
class TaskController(object):
"""Main conroller (MVC) for running tasks.
Tasks are running instances of App Engine projects.
"""
def __init__(self, app_controller):
"""Create a new TaskController.
Args:
app_controller: the main application controller.
"""
self._app_controller = app_controller
# self._frame: the main frame for project display
# self._threads: an array of threads for running App Engine applicatons
# self._consoles: an array of LogConsoles for App Engine applications
self._frame = None
self._threads = []
self._consoles = []
self._runtime = None
self._platform = launcher.Platform()
self._preferences = None
def SetModelsViews(self, frame=None, runtime=None, platform=None,
preferences=None):
"""Set models and views (MVC) for this controller.
We need a pointer to the main frame. We can't do in __init__
since those objects wants a pointer to me as well, and one must
come first. Convention for launcher is for model/view to take
controllers in their __init__, and have the controller accept it
later with a call to SetModelsViews().
Args:
frame: the main frame (MainFrame) for the app
runtime: a launcher.Runtime
platform: a launcher.Platform
preferences: a launcher.Preferences
"""
if frame:
self._frame = frame
if runtime:
self._runtime = runtime
if platform:
self._platform = platform
if preferences:
self._preferences = preferences
def _GenericRun(self, extra_flags=None):
"""Run the project(s) selected in the main frame.
Args:
extra_flags: a list of extra command line flags for the run command
"""
for project in self._frame.SelectedProjects():
cmd = None
err = ""
try:
if self._FindThreadForProject(project):
logging.warning('Already running a task for %s!' % project.path)
else:
cmd = self._runtime.DevAppServerCommand(project,
extra_flags=extra_flags)
except launcher.RuntimeException, r:
err = r.message
if not cmd or err:
logging.error(err + '\n'
'Cannot run project %s. Please confirm '
'these values in your Preferences, or take an '
'appropriate measure to fix it (e.g. install Python).'
% project.path)
else:
t = self._CreateTaskThreadForProject(project, cmd)
t.start()
self._threads.append(t)
def _OpenFile(self, path, run_open_cmd):
"""Open file in browser.
Will launch external browser in platform dependent manner.
Args:
path: Absolute path to open.
"""
opencmd = self._platform.OpenCommand(path)
if not opencmd:
logging.warning('Could not form an open command, sorry')
return
run_open_cmd(opencmd)
def OpenSDK(self, event, run_open_cmd=subprocess.Popen):
"""Open SDK in browser.
Called from UI menu.
"""
sdk_dir = self._platform.AppEngineBaseDirectory()
self._OpenFile(sdk_dir, run_open_cmd)
def Run(self, event):
"""Run the project(s) selected in the main frame.
Called directly from UI.
"""
self._GenericRun()
def RunStrict(self, event):
"""Run the project(s) selected in the main frame, strictly.
Called directly from UI.
"""
self._GenericRun(['--require_indexes'])
def _CreateTaskThreadForProject(self, project, cmd):
"""Create and return a task thread, for executing cmd on project.
Assumes the task thread is for running dev_appserver.
Split into a seperate method to make unit testing of self.Run() easier.
Args:
project: the Project that needs a thread
cmd: list of exec and args; the command to execute,
associated with the project
"""
return launcher.DevAppServerTaskThread(self, project, cmd)
def Stop(self, event):
"""Stop the project(s) selected in the main frame.
Called directly from UI.
"""
for project in self._frame.SelectedProjects():
thread = self._FindThreadForProject(project)
if not thread:
if project.runstate == launcher.Project.STATE_DIED:
# Just clearing out a stop.
project.runstate = launcher.Project.STATE_STOP
self.RunStateChanged(project)
else:
logging.warning('Cannot find a running task for %s!' % project.path)
else:
thread.stop() # async
pass
def Browse(self, event):
"""Browse the project(s) selected in the main frame if they are running.
Called directly from UI.
"""
project_list = [p for p in self._frame.SelectedProjects()
if p.runstate == launcher.Project.STATE_RUN]
if not project_list:
logging.warning('No selected projects are running ' +
'so we have nothing to Browse.')
return
for project in project_list:
self._BrowseProject(project)
def _FindOrCreateConsole(self, project):
"""Find and return the launcher.LogConsole for project; create if needed.
Args:
project: the Project associated (or to be associated with) the LogConsole
"""
for console in self._consoles:
if project == console.project:
return console
console = launcher.LogConsole(project)
self._consoles.append(console)
return console
def StopAll(self, _=None):
"""Stop all projects.
Args:
_: not used (made consistent with Stop/Run for easier testing)
"""
[t.stop() for t in self._threads] # t.stop() is async.
def _FindThreadForProject(self, project):
"""Find and return the launcher.TaskThread for project, or None.
Args:
project: the project whose thread we are looking for
"""
for thread in self._threads:
if thread.project == project:
return thread
return None
def Logs(self, event):
"""Display the Console window for the project(s) selected in the main frame.
Called directly from UI.
"""
for project in self._frame.SelectedProjects():
console = self._FindOrCreateConsole(project)
console.DisplayAndBringToFront()
def SdkConsole(self, event):
"""Opens the local SDK Administration console.
The Console is opened for the project(s) selected in the main frame.
The URL looks something like http://localhost:PORT/_ah/admin.
Called directly from UI.
"""
project_list = [p for p in self._frame.SelectedProjects()
if p.runstate == launcher.Project.STATE_RUN]
if not project_list:
logging.warning('No selected projects are running ' +
'so we have no Admin Console to go to.')
return
for project in project_list:
self._BrowseAdminConsoleForProject(project)
def Edit(self, event, run_edit_cmd=subprocess.Popen):
"""Opens, for edit, the project(s) selected in the main frame.
Called directly from UI.
Args:
event: a wxPython event (for all Bind()ings)
run_edit_cmd: the command used to run the actual tuple edit command.
Only ever set to the non-default in a unit test.
"""
for project in self._frame.SelectedProjects():
editor = self._preferences[launcher.Preferences.PREF_EDITOR]
editcmd = self._platform.EditCommand(editor, project.path)
if not editcmd:
logging.warning('Could not form an edit command, sorry')
return
run_edit_cmd(editcmd)
def Open(self, event, run_open_cmd=subprocess.Popen):
"""Opens (in Explorer) the the project(s) selected in the main frame.
Called directly from UI.
Args:
event: a wxPython event (for all Bind()ings)
run_open_cmd: the command used to run the actual tuple open command.
Only ever set to the non-default in a unit test.
"""
for project in self._frame.SelectedProjects():
self._OpenFile(project.path, run_open_cmd)
def Deploy(self, event, deploy_controller=None):
"""Initiates a deploy to Google of the project selected in the main frame.
Called directly from UI.
Args:
event: the wx.Event that initiated the transaction
deploy_controller: if not None, the controller to be used for
deployment. If None, a default is used
(launcher.DeployController). Only non-None in a unit test.
"""
project_list = self._frame.SelectedProjects()
if not project_list:
logging.warning('No projects selected for deployment.')
return
dc = deploy_controller or launcher.DeployController(self._runtime,
launcher.Preferences(),
project_list)
dc.InitiateDeployment()
def Dashboard(self, event):
"""Opens the App Engine Dashboard for the currently selected project(s).
The Dashboard is a System Status page for a deployed application
that lives on a Google server. See
http://code.google.com/appengine/kb/status.html for more info.
A typical URL is https://appengine.google.com/dashboard?app_id=ID
Called directly from UI.
"""
for project in self._frame.SelectedProjects():
self._BrowseDashboardForProject(project)
def RunStateChanged(self, project):
"""Called when the runstate of a project was changed and UI update is needed.
Args:
project: the project whose run state has changed
"""
self._app_controller.RefreshMainView()
self._DeleteThreadIfNeeded(project)
def _DeleteThreadIfNeeded(self, project):
"""If we have a thread for the project and it isn't running, delete it.
Args:
project: the project whose thread is no longer needed
"""
if project.runstate in (launcher.Project.STATE_STOP,
launcher.Project.STATE_DIED):
thread = self._FindThreadForProject(project)
if thread:
self._threads.remove(thread)
def _PlatformObject(self):
"""Return a platform object.
Split out for easier unit testing.
"""
return launcher.Platform()
def _BrowseProject(self, project, browsefunc=webbrowser.open):
"""Unconditionally browse the specified project.
Args:
project: the project we want to browse
browsefunc: if set, use as a browsing function that takes 1 arg, a URL
"""
browsefunc('http://localhost:%d' % project.port)
def _BrowseAdminConsoleForProject(self, project, browsefunc=webbrowser.open):
"""Unconditionally browse the SDK Administration Console for the project.
Args:
project: the project whose admin console we want to browse
browsefunc: if set, use as a browsing function that takes 1 arg, a URL
"""
browsefunc('http://localhost:%d/_ah/admin' % project.port)
def _BrowseDashboardForProject(self, project, browsefunc=webbrowser.open):
"""Unconditionally browse the Dashoard for the project.
The Dashboard is a System Status page for a deployed application
that lives on a Google server.
Args:
project: the project whose Dashboard we want to browse
browsefunc: if set, use as a browsing function that takes 1 arg, a URL
"""
server = (self._preferences[launcher.Preferences.PREF_DEPLOY_SERVER] or
'appengine.google.com')
# TODO(jrg): make the path configurarable on a per-project basis
browsefunc('https://%s/dashboard?app_id=%s'
% (server, str(project.name)))
def DisplayProjectOutput(self, project, text):
"""For the output from |project|, send to the appropriate UI.
Args:
project: the project whose output we now have
text: the output from the project that needs display
"""
console = self._FindOrCreateConsole(project)
console.AppendText(text)
|
chenc10/Spark-PAF
|
refs/heads/master
|
dist/ec2/lib/boto-2.34.0/boto/pyami/__init__.py
|
396
|
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
|
Jrribas/InstantScale
|
refs/heads/master
|
processImage.py
|
1
|
import os
import pytesseract
from re import compile
from cv2 import imread, imwrite, cvtColor, threshold, COLOR_BGR2GRAY, THRESH_BINARY, ADAPTIVE_THRESH_GAUSSIAN_C
from cv2 import adaptiveThreshold
from PIL import Image, ImageFont, ImageDraw
import shutil
def getBar(img):
height, width, channels = img.shape
startRow = None
cropRow = None
# Look pixel by pixel for the white bar
# Look pixel by pixel for the white bar
try:
for i in reversed(range(height)):
if list(img[i, width - 3]) >= [254, 254, 254] and startRow is None:
startRow = i
if list(img[i, width - 3]) <= [250, 250, 250] and startRow is not None:
cropRow = i
if startRow - cropRow < 50:
return 0, 0, 0
break
# Cropping image
crop_img = img[0:cropRow, 0::]
bar_img = img[cropRow + 1:startRow, 1:width]
barSize = (height - cropRow) * 100 / height + 1
except TypeError:
return 0, 0, 0
return crop_img, bar_img, barSize
def cropImage(img, cropPercentage, position):
# Cropping image function if manual is selected
height, width, channels = img.shape
if position == "Bottom":
cropRow = int((height * (100 - cropPercentage)) / 100)
crop_image = img[0:cropRow, 0::]
else:
cropRow = int((height * (100 - cropPercentage)) / 100)
crop_image = img[height - cropRow::, 0::]
return crop_image
def getScale(bar_img):
# Function that count the scale bar pixels
k = []
for i in range(len(bar_img)): # len(bar_img) -> height
for j in range(len(bar_img[i])):
if list(bar_img[i, j]) < [50, 50, 50]:
k.append([i, j])
else:
if len(k) > 30:
scale = k
return scale
k = []
def getNumber(bar_img, bar_img_res, exePath):
# Get path from copy of original image
path = exePath + "\\images\\"
# Transform image in gray "colour"
bar_img = cvtColor(bar_img, COLOR_BGR2GRAY)
units_dict = {"mm": 0, "um": 1, "nm": 2}
for i in range(0, 100, 10):
# Loops through thresh values in order to help tesseract read the scale number
thresh = i
max_Value = 255
th, imga = threshold(bar_img, thresh, max_Value, THRESH_BINARY)
os.chdir(exePath)
if not os.path.exists(path):
os.makedirs(path)
imwrite(path + "/thres.tif", imga)
# Tesseract
scalenumb = pytesseract.image_to_string(Image.open(path + "/thres.tif"))
# Find scale unit
findSize = compile(r'(?<!\.)(\d+)\s?(nm|mm|µm|um)')
mo = findSize.search(scalenumb)
if mo is not None and mo.group(1) != '0 ':
return mo.group(1), units_dict[mo.group(2)]
# If not scale number or unit was found till now an improved threshold is done
bar_img_res = cvtColor(bar_img_res, COLOR_BGR2GRAY)
original_bar_img = bar_img_res
for j in range(0, len(bar_img_res[0]), 100):
x = [69, 71, 73, 75, 77, 79, 81, 83, 85]
for w in x:
bar_img_th = adaptiveThreshold(bar_img_res, 255, ADAPTIVE_THRESH_GAUSSIAN_C,
THRESH_BINARY, w, 4)
os.chdir(exePath)
if not os.path.exists(path):
os.makedirs(path)
imwrite(path + "\\thres.tif", bar_img_th)
scalenumb = pytesseract.image_to_string(Image.open(path + "\\thres.tif"), lang='eng')
findSize = compile(r'(?<!\.)(\d+)\s?(nm|mm|µm|um)')
mo = findSize.search(scalenumb)
if mo is not None and mo.group(1) != '0 ':
return mo.group(1), units_dict[mo.group(2)]
bar_img_res = original_bar_img[1:200, j:j + 250]
imwrite(path + "HoldImages\\resize_im1.tif", bar_img_res)
temp = Image.open(path + "HoldImages\\resize_im1.tif")
temp = temp.resize((600, 750), Image.ANTIALIAS)
temp.save(path + "HoldImages\\resize_im1.tif", dpi=(600, 600))
bar_img_res = imread(path + "HoldImages\\resize_im1.tif")
bar_img_res = cvtColor(bar_img_res, COLOR_BGR2GRAY)
def cleanPathFiles(path, exePath):
Cpath = [""] * len(path)
# Create temp directory
exePath = exePath + "\\images\\"
# Copy images to a more easy directory
for x in path:
x = x.replace('/', '\\')
path1, file = os.path.split(x)
shutil.copyfile(x, exePath + file)
# Clean file name of strange characters
for x in range(len(path)):
filename, fileExtension = os.path.splitext(os.path.basename(path[x]))
intab = "êéèíìîáàãâõñúùóòôç?!ÇÓÒÚÙÑÕÔÂÃÁÀÎÍÌÉÉʪº%"
outtab = "eeeiiiaaaaonuuoooc__COOUUNOOAAAAIIIEEE___"
trantab = str.maketrans(intab, outtab)
new_filename = filename.translate(trantab)
Cpath[x] = exePath + new_filename + fileExtension
if os.path.isfile(Cpath[x]) and exePath + filename + fileExtension != Cpath[x]:
os.remove(Cpath[x])
os.rename(exePath + filename + fileExtension, Cpath[x])
return Cpath
def drawScale(img, scale, scaleNumb, units, exePath, position, sizeOfScale,
fontColor=(0, 0, 0), bgColor=(255, 255, 255), targetValue=0, targetUnits=''):
# Draw the new scale in the image
height, width, channels = img.shape
minpixels = 0.08 * width
maxpixels = 0.20 * width
val = None
sizeOfScale = sizeOfScale * height / 1000
if targetUnits != "":
conv_dict = {"mmµm": 1000, "mmnm": 1000000, "µmmm": 0.001, "µmnm": 1000, "nmmm": 0.000001, "nmµm": 0.001,
"µmµm": 1, "nmnm": 1, "mmmm": 1}
key = units + targetUnits
check = (((1 / conv_dict[key]) * scale) / scaleNumb)
if conv_dict[key] < 1 or (conv_dict[key] == 1 and scaleNumb < targetValue):
if check * targetValue > 0.8 * width:
message = "max"
maxValue = (0.8 * width) / scale * scaleNumb
return message + " value is : " + str(round(maxValue - maxValue * 0.005)) + " " + units
elif conv_dict[key] > 1 or (conv_dict[key] == 1 and scaleNumb > targetValue):
if check * targetValue < 30:
message = "min"
return message + " value is : " + str(round(30 / check) + 1) + " " + targetUnits
newScaleNumb = targetValue
units = targetUnits
newScale = check * targetValue
else:
values = [500, 200, 100, 50, 20, 10, 5, 2, 1, 0.5, 0.2, 0.1, 0.05, 0.02, 0.01]
unit_dict = {"mm": "µm", "µm": "nm"}
conv = 1
if scaleNumb == 1 and scale > maxpixels and units == "nm":
newScale = scale
newScaleNumb = scaleNumb
else:
if scaleNumb != 1:
scale /= scaleNumb
for val in values:
if minpixels < val * scale < maxpixels:
if val < 1:
conv = 1000
units = unit_dict[units]
break
else:
break
newScale = round(val * scale)
newScaleNumb = int(val * conv)
os.chdir(exePath)
path = "images/cropImages"
if not os.path.exists(path):
os.makedirs(path)
imwrite(path + "/crop_rect.png", img)
im = Image.open(path + "/crop_rect.png")
draw = ImageDraw.Draw(im)
fontsize = round(13 * sizeOfScale)
font = ImageFont.truetype("arial.ttf", fontsize)
scaletext = str(newScaleNumb) + ' ' + units
# Draw scale in the cropped image
w, h = draw.textsize(scaletext, font)
if position == 0:
sD = [round(width * 0.0235), round(height * 0.9636) - (20 * sizeOfScale / 3 + 3 * sizeOfScale + h),
(round(width * 0.0235) + newScale) + (20 * sizeOfScale / 3), round(height * 0.9636)] # X0,Y0,X1,Y1
elif position == 1:
sD = [(round(width * 0.9765) - newScale) - (20 * sizeOfScale / 3),
round(height * 0.9636) - (20 * sizeOfScale / 3 + 3 * sizeOfScale + h), round(width * 0.9765),
round(height * 0.9636)] # X0,Y0,X1,Y1
elif position == 2:
sD = [round(width * 0.0235), round(height * 0.0364),
(round(width * 0.0235) + newScale) + (20 * sizeOfScale / 3),
round(height * 0.0364) + (20 * sizeOfScale / 3 + 3 * sizeOfScale + h)] # X0,Y0,X1,Y1
else:
sD = [round((width * 0.9765) - newScale - (20 * sizeOfScale / 3)), round(height * 0.0364),
round(width * 0.9765),
round((height * 0.0364) + ((20 * sizeOfScale / 3) + (3 * sizeOfScale + h)))] # X0,Y0,X1,Y1
if position == 0 or position == 2:
textDimensions = [x + y for x, y in zip(sD, [0, 0, int(-newScale + w), 0])]
else:
textDimensions = [x + y for x, y in zip(sD, [int(+newScale - w), 0, 0, 0])]
if newScale > w:
draw.rectangle(sD, fill=bgColor, outline=bgColor)
draw.text(((((sD[2] - sD[0]) / 2) - w / 2) + sD[0], sD[1] + 7 * sizeOfScale), scaletext, font=font,
fill=fontColor)
draw.line([((sD[2] - sD[0]) / 2) - newScale / 2 + sD[0], sD[1] + 5 * sizeOfScale,
sD[0] + ((sD[2] - sD[0]) / 2) + newScale / 2, sD[1] + 5 * sizeOfScale], fill=fontColor,
width=round(3 * sizeOfScale))
else:
draw.rectangle(textDimensions, fill=bgColor, outline=bgColor)
draw.text(((((textDimensions[2] - textDimensions[0]) / 2) - w / 2) + textDimensions[0],
textDimensions[1] + 7 * sizeOfScale), scaletext, font=font, fill=fontColor)
draw.line([((textDimensions[2] - textDimensions[0]) / 2) - newScale / 2 + textDimensions[0],
textDimensions[1] + 5 * sizeOfScale,
textDimensions[0] + ((textDimensions[2] - textDimensions[0]) / 2) + newScale / 2,
textDimensions[1] + 5 * sizeOfScale], fill=fontColor, width=round(3 * sizeOfScale))
del draw
return im
|
acsone/stock-logistics-workflow
|
refs/heads/8.0
|
stock_inventory_retry_assign/__init__.py
|
28
|
# -*- coding: utf-8 -*-
from . import stock
|
pilou-/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/f5/bigiq_device_facts.py
|
12
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigiq_device_facts
short_description: Collect facts from F5 BIG-IQ devices
description:
- Collect facts from F5 BIG-IQ devices.
version_added: 2.8
options:
gather_subset:
description:
- When supplied, this argument will restrict the facts returned to a given subset.
- Can specify a list of values to include a larger subset.
- Values can also be used with an initial C(!) to specify that a specific subset
should not be collected.
type: list
required: True
choices:
- all
- applications
- managed-devices
- purchased-pool-licenses
- regkey-pools
- system-info
- vlans
- "!all"
- "!applications"
- "!managed-devices"
- "!purchased-pool-licenses"
- "!regkey-pools"
- "!system-info"
- "!vlans"
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Collect BIG-IQ facts
bigiq_device_facts:
gather_subset:
- system-info
- vlans
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Collect all BIG-IQ facts
bigiq_device_facts:
gather_subset:
- all
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Collect all BIG-IP facts except trunks
bigiq_device_facts:
gather_subset:
- all
- "!trunks"
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
'''
RETURN = r'''
applications:
description: Application related facts
returned: When C(managed-devices) is specified in C(gather_subset).
type: complex
contains:
protection_mode:
description:
- The type of F5 Web Application Security Service protection on the application.
returned: changed
type: str
sample: Not Protected
id:
description:
- ID of the application as known to the BIG-IQ.
returned: changed
type: str
sample: 996baae8-5d1d-3662-8a2d-3612fa2aceae
name:
description:
- Name of the application.
returned: changed
type: str
sample: site12http.example.com
status:
description:
- Current state of the application.
returned: changed
type: str
sample: DEPLOYED
transactions_per_second:
description:
- Current measurement of Transactions Per second being handled by the application.
returned: changed
type: float
sample: 0.87
connections:
description:
- Current number of connections established to the application.
returned: changed
type: float
sample: 3.06
new_connections:
description:
- Number of new connections being established per second.
returned: changed
type: float
sample: 0.35
response_time:
description:
- Measured response time of the application in milliseconds.
returned: changed
type: float
sample: 0.02
health:
description:
- Health of the application.
returned: changed
type: str
sample: Good
active_alerts:
description:
- Number of alerts active on the application.
returned: changed
type: int
sample: 0
bad_traffic:
description:
- Percent of traffic to application that is determined to be 'bad'.
- This value is dependent on C(protection_mode) being enabled.
returned: changed
type: float
sample: 1.7498
enhanced_analytics:
description:
- Whether enhanced analytics is enabled for the application or not.
returned: changed
type: bool
sample: yes
bad_traffic_growth:
description:
- Whether or not Bad Traffic Growth alerts are configured to be triggered or not.
returned: changed
type: bool
sample: no
sample: hash/dictionary of values
managed_devices:
description: Managed device related facts.
returned: When C(managed-devices) is specified in C(gather_subset).
type: complex
contains:
address:
description:
- Address where the device was discovered.
returned: changed
type: str
sample: 10.10.10.10
build:
description:
- Build of the version.
returned: changed
type: str
sample: 0.0.4
device_uri:
description:
- URI to reach the management interface of the device.
returned: changed
type: str
sample: "https://10.10.10.10:443"
edition:
description:
- Edition string of the product version.
returned: changed
type: str
sample: Final
group_name:
description:
- BIG-IQ group that the device is a member of.
returned: changed
type: str
sample: cm-bigip-allBigIpDevices
hostname:
description:
- Discovered hostname of the device.
returned: changed
type: str
sample: tier2labB1.lab.fp.foo.com
https_port:
description:
- HTTPS port available on the management interface of the device.
returned: changed
type: int
sample: 443
is_clustered:
description:
- Whether the device is clustered or not.
returned: changed
type: bool
sample: no
is_license_expired:
description:
- Whether the license on the device is expired or not.
returned: changed
type: bool
sample: yes
is_virtual:
description:
- Whether the device is a virtual edition or not.
returned: changed
type: bool
sample: yes
machine_id:
description:
- Machine specific ID assigned to this device by BIG-IQ.
returned: changed
type: str
sample: c141bc88-f734-4434-be64-a3e9ea98356e
management_address:
description:
- IP address of the management interface on the device.
returned: changed
type: str
sample: 10.10.10.10
mcp_device_name:
description:
- Device name as known by MCPD on the BIG-IP.
returned: changed
type: str
sample: /Common/tier2labB1.lab.fp.foo.com
product:
description:
- Product that the managed device is identified as.
returned: changed
type: str
sample: BIG-IP
rest_framework_version:
description:
- REST framework version running on the device
returned: changed
type: str
sample: 13.1.1-0.0.4
self_link:
description:
- Internal reference to the managed device in BIG-IQ.
returned: changed
type: str
sample: "https://localhost/mgmt/shared/resolver/device-groups/cm-bigip-allBigIpDevices/devices/c141bc88-f734-4434-be64-a3e9ea98356e"
slots:
description:
- Volumes on the device and versions of software installed in those volumes.
returned: changed
type: complex
sample: {"volume": "HD1.1", "product": "BIG-IP", "version": "13.1.1", "build": "0.0.4", "isActive": "yes"}
state:
description:
- State of the device.
returned: changed
type: str
sample: ACTIVE
tags:
description:
- Misc tags that are assigned to the device.
returned: changed
type: complex
sample: {'BIGIQ_tier_2_device': '2018-08-22T13:30:47.693-07:00', 'BIGIQ_SSG_name': 'tim-ssg'}
trust_domain_guid:
description:
- GUID of the trust domain the device is part of.
returned: changed
type: str
sample: 40ddf541-e604-4905-bde3005056813e36
uuid:
description:
- UUID of the device in BIG-IQ.
returned: changed
type: str
sample: c141bc88-f734-4434-be64-a3e9ea98356e
version:
description:
- Version of TMOS installed on the device.
returned: changed
type: str
sample: 13.1.1
sample: hash/dictionary of values
purchased_pool_licenses:
description: Purchased Pool License related facts.
returned: When C(purchased-pool-licenses) is specified in C(gather_subset).
type: complex
contains:
base_reg_key:
description:
- Base registration key of the purchased pool
returned: changed
type: str
sample: XXXXX-XXXXX-XXXXX-XXXXX-XXXXXXX
dossier:
description:
- Dossier of the purchased pool license
returned: changed
type: str
sample: d6bd4b8ba5...e9a1a1199b73af9932948a
free_device_licenses:
description:
- Number of free licenses remaining.
returned: changed
type: int
sample: 34
name:
description:
- Name of the purchased pool
returned: changed
type: str
sample: my-pool1
state:
description:
- State of the purchased pool license
returned: changed
type: str
sample: LICENSED
total_device_licenses:
description:
- Total number of licenses in the pool.
returned: changed
type: int
sample: 40
uuid:
description:
- UUID of the purchased pool license
returned: changed
type: str
sample: b2112329-cba7-4f1f-9a26-fab9be416d60
vendor:
description:
- Vendor who provided the license
returned: changed
type: str
sample: F5 Networks, Inc
licensed_date_time:
description:
- Timestamp that the pool was licensed.
returned: changed
type: str
sample: "2018-09-10T00:00:00-07:00"
licensed_version:
description:
- Version of BIG-IQ that is licensed.
returned: changed
type: str
sample: 6.0.1
evaluation_start_date_time:
description:
- Date that evaluation license starts.
returned: changed
type: str
sample: "2018-09-09T00:00:00-07:00"
evaluation_end_date_time:
description:
- Date that evaluation license ends.
returned: changed
type: str
sample: "2018-10-11T00:00:00-07:00"
license_end_date_time:
description:
- Date that the license expires.
returned: changed
type: str
sample: "2018-10-11T00:00:00-07:00"
license_start_date_time:
description:
- Date that the license starts.
returned: changed
type: str
sample: "2018-09-09T00:00:00-07:00"
registration_key:
description:
- Purchased pool license key.
returned: changed
type: str
sample: XXXXX-XXXXX-XXXXX-XXXXX-XXXXXXX
sample: hash/dictionary of values
regkey_pools:
description: Regkey Pool related facts.
returned: When C(regkey-pools) is specified in C(gather_subset).
type: complex
contains:
name:
description:
- Name of the regkey pool.
returned: changed
type: str
sample: pool1
id:
description:
- ID of the regkey pool.
returned: changed
type: str
sample: 4f9b565c-0831-4657-b6c2-6dde6182a502
total_offerings:
description:
- Total number of offerings in the pool
returned: changed
type: int
sample: 10
offerings:
description: List of the offerings in the pool.
type: complex
contains:
dossier:
description:
- Dossier of the license.
returned: changed
type: str
sample: d6bd4b8ba5...e9a1a1199b73af9932948a
name:
description:
- Name of the regkey.
returned: changed
type: str
sample: regkey1
state:
description:
- State of the regkey license
returned: changed
type: str
sample: LICENSED
licensed_date_time:
description:
- Timestamp that the regkey was licensed.
returned: changed
type: str
sample: "2018-09-10T00:00:00-07:00"
licensed_version:
description:
- Version of BIG-IQ that is licensed.
returned: changed
type: str
sample: 6.0.1
evaluation_start_date_time:
description:
- Date that evaluation license starts.
returned: changed
type: str
sample: "2018-09-09T00:00:00-07:00"
evaluation_end_date_time:
description:
- Date that evaluation license ends.
returned: changed
type: str
sample: "2018-10-11T00:00:00-07:00"
license_end_date_time:
description:
- Date that the license expires.
returned: changed
type: str
sample: "2018-10-11T00:00:00-07:00"
license_start_date_time:
description:
- Date that the license starts.
returned: changed
type: str
sample: "2018-09-09T00:00:00-07:00"
registration_key:
description:
- Registration license key.
returned: changed
type: str
sample: XXXXX-XXXXX-XXXXX-XXXXX-XXXXXXX
sample: hash/dictionary of values
sample: hash/dictionary of values
system_info:
description: System info related facts.
returned: When C(system-info) is specified in C(gather_subset).
type: complex
contains:
base_mac_address:
description:
- Media Access Control address (MAC address) of the device.
returned: changed
type: str
sample: "fa:16:3e:c3:42:6f"
marketing_name:
description:
- Marketing name of the device platform.
returned: changed
type: str
sample: BIG-IQ Virtual Edition
time:
description:
- Mapping of the current time information to specific time-named keys.
returned: changed
type: complex
contains:
day:
description:
- The current day of the month, in numeric form.
returned: changed
type: int
sample: 7
hour:
description:
- The current hour of the day in 24-hour form.
returned: changed
type: int
sample: 18
minute:
description:
- The current minute of the hour.
returned: changed
type: int
sample: 16
month:
description:
- The current month, in numeric form.
returned: changed
type: int
sample: 6
second:
description:
- The current second of the minute.
returned: changed
type: int
sample: 51
year:
description:
- The current year in 4-digit form.
returned: changed
type: int
sample: 2018
hardware_information:
description:
- Information related to the hardware (drives and CPUs) of the system.
type: complex
returned: changed
contains:
model:
description:
- The model of the hardware.
type: str
sample: Virtual Disk
name:
description:
- The name of the hardware.
type: str
sample: HD1
type:
description:
- The type of hardware.
type: str
sample: physical-disk
versions:
description:
- Hardware specific properties
type: complex
contains:
name:
description:
- Name of the property
type: str
sample: Size
version:
description:
- Value of the property
type: str
sample: 154.00G
is_admin_password_changed:
description:
- Whether the admin password was changed from its default or not.
returned: changed
type: bool
sample: yes
is_root_password_changed:
description:
- Whether the root password was changed from its default or not.
returned: changed
type: bool
sample: no
is_system_setup:
description:
- Whether the system has been setup or not.
returned: changed
type: bool
sample: yes
package_edition:
description:
- Displays the software edition.
returned: changed
type: str
sample: Point Release 7
package_version:
description:
- A string combining the C(product_build) and C(product_build_date).
type: str
sample: "Build 0.0.1 - Tue May 15 15:26:30 PDT 2018"
product_code:
description:
- Code identifying the product.
type: str
sample: BIG-IQ
product_build:
description:
- Build version of the release version.
type: str
sample: 0.0.1
product_version:
description:
- Major product version of the running software.
type: str
sample: 6.0.0
product_built:
description:
- Unix timestamp of when the product was built.
type: int
sample: 180515152630
product_build_date:
description:
- Human readable build date.
type: str
sample: "Tue May 15 15:26:30 PDT 2018"
product_changelist:
description:
- Changelist that product branches from.
type: int
sample: 2557198
product_jobid:
description:
- ID of the job that built the product version.
type: int
sample: 1012030
chassis_serial:
description:
- Serial of the chassis
type: str
sample: 11111111-2222-3333-444444444444
host_board_part_revision:
description:
- Revision of the host board.
type: str
host_board_serial:
description:
- Serial of the host board.
type: str
platform:
description:
- Platform identifier.
type: str
sample: Z100
switch_board_part_revision:
description:
- Switch board revision.
type: str
switch_board_serial:
description:
- Serial of the switch board.
type: str
uptime:
description:
- Time, in seconds, since the system booted.
type: int
sample: 603202
sample: hash/dictionary of values
vlans:
description: List of VLAN facts.
returned: When C(vlans) is specified in C(gather_subset).
type: complex
contains:
auto_lasthop:
description:
- Allows the system to send return traffic to the MAC address that transmitted the
request, even if the routing table points to a different network or interface.
returned: changed
type: str
sample: enabled
cmp_hash_algorithm:
description:
- Specifies how the traffic on the VLAN will be disaggregated.
returned: changed
type: str
sample: default
description:
description:
- Description of the VLAN.
returned: changed
type: str
sample: My vlan
failsafe_action:
description:
- Action for the system to take when the fail-safe mechanism is triggered.
returned: changed
type: str
sample: reboot
failsafe_enabled:
description:
- Whether failsafe is enabled or not.
returned: changed
type: bool
sample: yes
failsafe_timeout:
description:
- Number of seconds that an active unit can run without detecting network traffic
on this VLAN before it starts a failover.
returned: changed
type: int
sample: 90
if_index:
description:
- Index assigned to this VLAN. It is a unique identifier assigned for all objects
displayed in the SNMP IF-MIB.
returned: changed
type: int
sample: 176
learning_mode:
description:
- Whether switch ports placed in the VLAN are configured for switch learning,
forwarding only, or dropped.
returned: changed
type: str
sample: enable-forward
interfaces:
description:
- List of tagged or untagged interfaces and trunks that you want to configure for the VLAN.
returned: changed
type: complex
contains:
full_path:
description:
- Full name of the resource as known to BIG-IP.
returned: changed
type: str
sample: 1.3
name:
description:
- Relative name of the resource in BIG-IP.
returned: changed
type: str
sample: 1.3
tagged:
description:
- Whether the interface is tagged or not.
returned: changed
type: bool
sample: no
mtu:
description:
- Specific maximum transition unit (MTU) for the VLAN.
returned: changed
type: int
sample: 1500
sflow_poll_interval:
description:
- Maximum interval in seconds between two pollings.
returned: changed
type: int
sample: 0
sflow_poll_interval_global:
description:
- Whether the global VLAN poll-interval setting, overrides the object-level
poll-interval setting.
returned: changed
type: bool
sample: no
sflow_sampling_rate:
description:
- Ratio of packets observed to the samples generated.
returned: changed
type: int
sample: 0
sflow_sampling_rate_global:
description:
- Whether the global VLAN sampling-rate setting, overrides the object-level
sampling-rate setting.
returned: changed
type: bool
sample: yes
source_check_enabled:
description:
- Specifies that only connections that have a return route in the routing table are accepted.
returned: changed
type: bool
sample: yes
true_mac_address:
description:
- Media access control (MAC) address for the lowest-numbered interface assigned to this VLAN.
returned: changed
type: str
sample: "fa:16:3e:10:da:ff"
tag:
description:
- Tag number for the VLAN.
returned: changed
type: int
sample: 30
sample: hash/dictionary of values
'''
import datetime
import math
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
from ansible.module_utils.six import string_types
try:
from library.module_utils.network.f5.bigiq import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import flatten_boolean
from library.module_utils.network.f5.ipaddress import is_valid_ip
from library.module_utils.network.f5.common import transform_name
except ImportError:
from ansible.module_utils.network.f5.bigiq import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import flatten_boolean
from ansible.module_utils.network.f5.ipaddress import is_valid_ip
from ansible.module_utils.network.f5.common import transform_name
def parseStats(entry):
if 'description' in entry:
return entry['description']
elif 'value' in entry:
return entry['value']
elif 'entries' in entry or 'nestedStats' in entry and 'entries' in entry['nestedStats']:
if 'entries' in entry:
entries = entry['entries']
else:
entries = entry['nestedStats']['entries']
result = None
for name in entries:
entry = entries[name]
if 'https://localhost' in name:
name = name.split('/')
name = name[-1]
if result and isinstance(result, list):
result.append(parseStats(entry))
elif result and isinstance(result, dict):
result[name] = parseStats(entry)
else:
try:
int(name)
result = list()
result.append(parseStats(entry))
except ValueError:
result = dict()
result[name] = parseStats(entry)
else:
if '.' in name:
names = name.split('.')
key = names[0]
value = names[1]
if not result[key]:
result[key] = {}
result[key][value] = parseStats(entry)
else:
if result and isinstance(result, list):
result.append(parseStats(entry))
elif result and isinstance(result, dict):
result[name] = parseStats(entry)
else:
try:
int(name)
result = list()
result.append(parseStats(entry))
except ValueError:
result = dict()
result[name] = parseStats(entry)
return result
class BaseManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.kwargs = kwargs
def exec_module(self):
results = []
facts = self.read_facts()
for item in facts:
attrs = item.to_return()
results.append(attrs)
return results
class Parameters(AnsibleF5Parameters):
@property
def gather_subset(self):
if isinstance(self._values['gather_subset'], string_types):
self._values['gather_subset'] = [self._values['gather_subset']]
elif not isinstance(self._values['gather_subset'], list):
raise F5ModuleError(
"The specified gather_subset must be a list."
)
tmp = list(set(self._values['gather_subset']))
tmp.sort()
self._values['gather_subset'] = tmp
return self._values['gather_subset']
class BaseParameters(Parameters):
@property
def enabled(self):
return flatten_boolean(self._values['enabled'])
@property
def disabled(self):
return flatten_boolean(self._values['disabled'])
def _remove_internal_keywords(self, resource):
resource.pop('kind', None)
resource.pop('generation', None)
resource.pop('selfLink', None)
resource.pop('isSubcollection', None)
resource.pop('fullPath', None)
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
class ApplicationsParameters(BaseParameters):
api_map = {
'protectionMode': 'protection_mode',
'transactionsPerSecond': 'transactions_per_second',
'newConnections': 'new_connections',
'responseTime': 'response_time',
'activeAlerts': 'active_alerts',
'badTraffic': 'bad_traffic',
'enhancedAnalytics': 'enhanced_analytics',
'badTrafficGrowth': 'bad_traffic_growth'
}
returnables = [
'protection_mode',
'id',
'name',
'status',
'transactions_per_second',
'connections',
'new_connections',
'response_time',
'health',
'active_alerts',
'bad_traffic',
'enhanced_analytics',
'bad_traffic_growth',
]
@property
def enhanced_analytics(self):
return flatten_boolean(self._values['enhanced_analytics'])
@property
def bad_traffic_growth(self):
return flatten_boolean(self._values['bad_traffic_growth'])
class ApplicationsFactManager(BaseManager):
def __init__(self, *args, **kwargs):
self.client = kwargs.get('client', None)
self.module = kwargs.get('module', None)
super(ApplicationsFactManager, self).__init__(**kwargs)
self.want = ApplicationsParameters(params=self.module.params)
def exec_module(self):
facts = self._exec_module()
result = dict(applications=facts)
return result
def _exec_module(self):
results = []
facts = self.read_facts()
for item in facts:
attrs = item.to_return()
results.append(attrs)
results = sorted(results, key=lambda k: k['name'])
return results
def read_facts(self):
results = []
collection = self.read_collection_from_device()
for resource in collection:
params = ApplicationsParameters(params=resource)
results.append(params)
return results
def read_collection_from_device(self):
uri = "https://{0}:{1}/mgmt/ap/query/v1/tenants/default/reports/AllApplicationsList".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
try:
return response['result']['items']
except KeyError:
return []
class ManagedDevicesParameters(BaseParameters):
api_map = {
'deviceUri': 'device_uri',
'groupName': 'group_name',
'httpsPort': 'https_port',
'isClustered': 'is_clustered',
'isLicenseExpired': 'is_license_expired',
'isVirtual': 'is_virtual',
'machineId': 'machine_id',
'managementAddress': 'management_address',
'mcpDeviceName': 'mcp_device_name',
'restFrameworkVersion': 'rest_framework_version',
'selfLink': 'self_link',
'trustDomainGuid': 'trust_domain_guid',
}
returnables = [
'address',
'build',
'device_uri',
'edition',
'group_name',
'hostname',
'https_port',
'is_clustered',
'is_license_expired',
'is_virtual',
'machine_id',
'management_address',
'mcp_device_name',
'product',
'rest_framework_version',
'self_link',
'slots',
'state',
'tags',
'trust_domain_guid',
'uuid',
'version',
]
@property
def slots(self):
result = []
if self._values['slots'] is None:
return None
for x in self._values['slots']:
x['is_active'] = flatten_boolean(x.pop('isActive', False))
result.append(x)
return result
@property
def tags(self):
if self._values['tags'] is None:
return None
result = dict((x['name'], x['value']) for x in self._values['tags'])
return result
@property
def https_port(self):
return int(self._values['https_port'])
@property
def is_clustered(self):
return flatten_boolean(self._values['is_clustered'])
@property
def is_license_expired(self):
return flatten_boolean(self._values['is_license_expired'])
@property
def is_virtual(self):
return flatten_boolean(self._values['is_virtual'])
class ManagedDevicesFactManager(BaseManager):
def __init__(self, *args, **kwargs):
self.client = kwargs.get('client', None)
self.module = kwargs.get('module', None)
super(ManagedDevicesFactManager, self).__init__(**kwargs)
self.want = ManagedDevicesParameters(params=self.module.params)
def exec_module(self):
facts = self._exec_module()
result = dict(managed_devices=facts)
return result
def _exec_module(self):
results = []
facts = self.read_facts()
for item in facts:
attrs = item.to_return()
results.append(attrs)
results = sorted(results, key=lambda k: k['hostname'])
return results
def read_facts(self):
results = []
collection = self.read_collection_from_device()
for resource in collection:
params = ManagedDevicesParameters(params=resource)
results.append(params)
return results
def read_collection_from_device(self):
uri = "https://{0}:{1}/mgmt/shared/resolver/device-groups/cm-bigip-allBigIpDevices/devices".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
if 'items' not in response:
return []
result = response['items']
return result
class PurchasedPoolLicensesParameters(BaseParameters):
api_map = {
'baseRegKey': 'base_reg_key',
'freeDeviceLicenses': 'free_device_licenses',
'licenseState': 'license_state',
'totalDeviceLicenses': 'total_device_licenses',
}
returnables = [
'base_reg_key',
'dossier',
'free_device_licenses',
'name',
'state',
'total_device_licenses',
'uuid',
# license_state facts
'vendor',
'licensed_date_time',
'licensed_version',
'evaluation_start_date_time',
'evaluation_end_date_time',
'license_end_date_time',
'license_start_date_time',
'registration_key',
]
@property
def registration_key(self):
try:
return self._values['license_state']['registrationKey']
except KeyError:
return None
@property
def license_start_date_time(self):
try:
return self._values['license_state']['licenseStartDateTime']
except KeyError:
return None
@property
def license_end_date_time(self):
try:
return self._values['license_state']['licenseEndDateTime']
except KeyError:
return None
@property
def evaluation_end_date_time(self):
try:
return self._values['license_state']['evaluationEndDateTime']
except KeyError:
return None
@property
def evaluation_start_date_time(self):
try:
return self._values['license_state']['evaluationStartDateTime']
except KeyError:
return None
@property
def licensed_version(self):
try:
return self._values['license_state']['licensedVersion']
except KeyError:
return None
@property
def licensed_date_time(self):
try:
return self._values['license_state']['licensedDateTime']
except KeyError:
return None
@property
def vendor(self):
try:
return self._values['license_state']['vendor']
except KeyError:
return None
class PurchasedPoolLicensesFactManager(BaseManager):
def __init__(self, *args, **kwargs):
self.client = kwargs.get('client', None)
self.module = kwargs.get('module', None)
super(PurchasedPoolLicensesFactManager, self).__init__(**kwargs)
self.want = PurchasedPoolLicensesParameters(params=self.module.params)
def exec_module(self):
facts = self._exec_module()
result = dict(purchased_pool_licenses=facts)
return result
def _exec_module(self):
results = []
facts = self.read_facts()
for item in facts:
attrs = item.to_return()
results.append(attrs)
results = sorted(results, key=lambda k: k['name'])
return results
def read_facts(self):
results = []
collection = self.read_collection_from_device()
for resource in collection:
params = PurchasedPoolLicensesParameters(params=resource)
results.append(params)
return results
def read_collection_from_device(self):
uri = "https://{0}:{1}/mgmt/cm/device/licensing/pool/purchased-pool/licenses".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
try:
return response['items']
except KeyError:
return []
class RegkeyPoolsParameters(BaseParameters):
api_map = {
}
returnables = [
'name',
'id',
'offerings',
'total_offerings',
]
class RegkeyPoolsOfferingParameters(BaseParameters):
api_map = {
'regKey': 'registration_key',
'licenseState': 'license_state',
'status': 'state',
}
returnables = [
'name',
'dossier',
'state',
# license_state facts
'licensed_date_time',
'licensed_version',
'evaluation_start_date_time',
'evaluation_end_date_time',
'license_end_date_time',
'license_start_date_time',
'registration_key',
]
@property
def registration_key(self):
try:
return self._values['license_state']['registrationKey']
except KeyError:
return None
@property
def license_start_date_time(self):
try:
return self._values['license_state']['licenseStartDateTime']
except KeyError:
return None
@property
def license_end_date_time(self):
try:
return self._values['license_state']['licenseEndDateTime']
except KeyError:
return None
@property
def evaluation_end_date_time(self):
try:
return self._values['license_state']['evaluationEndDateTime']
except KeyError:
return None
@property
def evaluation_start_date_time(self):
try:
return self._values['license_state']['evaluationStartDateTime']
except KeyError:
return None
@property
def licensed_version(self):
try:
return self._values['license_state']['licensedVersion']
except KeyError:
return None
@property
def licensed_date_time(self):
try:
return self._values['license_state']['licensedDateTime']
except KeyError:
return None
@property
def vendor(self):
try:
return self._values['license_state']['vendor']
except KeyError:
return None
class RegkeyPoolsFactManager(BaseManager):
def __init__(self, *args, **kwargs):
self.client = kwargs.get('client', None)
self.module = kwargs.get('module', None)
super(RegkeyPoolsFactManager, self).__init__(**kwargs)
self.want = RegkeyPoolsParameters(params=self.module.params)
def exec_module(self):
facts = self._exec_module()
result = dict(regkey_pools=facts)
return result
def _exec_module(self):
results = []
facts = self.read_facts()
for item in facts:
attrs = item.to_return()
results.append(attrs)
results = sorted(results, key=lambda k: k['name'])
return results
def read_facts(self):
results = []
collection = self.read_collection_from_device()
for resource in collection:
params = RegkeyPoolsParameters(params=resource)
offerings = self.read_offerings_from_device(resource['id'])
params.update({'total_offerings': len(offerings)})
for offering in offerings:
params2 = RegkeyPoolsOfferingParameters(params=offering)
params.update({'offerings': params2.to_return()})
results.append(params)
return results
def read_collection_from_device(self):
uri = "https://{0}:{1}/mgmt/cm/device/licensing/pool/regkey/licenses".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
try:
return response['items']
except KeyError:
return []
def read_offerings_from_device(self, license):
uri = "https://{0}:{1}/mgmt/cm/device/licensing/pool/regkey/licenses/{2}/offerings".format(
self.client.provider['server'],
self.client.provider['server_port'],
license,
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
try:
return response['items']
except KeyError:
return []
class SystemInfoParameters(BaseParameters):
api_map = {
'isSystemSetup': 'is_system_setup',
'isAdminPasswordChanged': 'is_admin_password_changed',
'isRootPasswordChanged': 'is_root_password_changed'
}
returnables = [
'base_mac_address',
'chassis_serial',
'hardware_information',
'host_board_part_revision',
'host_board_serial',
'is_admin_password_changed',
'is_root_password_changed',
'is_system_setup',
'marketing_name',
'package_edition',
'package_version',
'platform',
'product_build',
'product_build_date',
'product_built',
'product_changelist',
'product_code',
'product_information',
'product_jobid',
'product_version',
'switch_board_part_revision',
'switch_board_serial',
'time',
'uptime',
]
@property
def is_admin_password_changed(self):
return flatten_boolean(self._values['is_admin_password_changed'])
@property
def is_root_password_changed(self):
return flatten_boolean(self._values['is_root_password_changed'])
@property
def is_system_setup(self):
if self._values['is_system_setup'] is None:
return 'no'
return flatten_boolean(self._values['is_system_setup'])
@property
def chassis_serial(self):
if self._values['system-info'] is None:
return None
# Yes, this is still called "bigip" even though this is querying the BIG-IQ
# product. This is likely due to BIG-IQ inheriting TMOS.
if 'bigipChassisSerialNum' not in self._values['system-info'][0]:
return None
return self._values['system-info'][0]['bigipChassisSerialNum']
@property
def switch_board_serial(self):
if self._values['system-info'] is None:
return None
if 'switchBoardSerialNum' not in self._values['system-info'][0]:
return None
if self._values['system-info'][0]['switchBoardSerialNum'].strip() == '':
return None
return self._values['system-info'][0]['switchBoardSerialNum']
@property
def switch_board_part_revision(self):
if self._values['system-info'] is None:
return None
if 'switchBoardPartRevNum' not in self._values['system-info'][0]:
return None
if self._values['system-info'][0]['switchBoardPartRevNum'].strip() == '':
return None
return self._values['system-info'][0]['switchBoardPartRevNum']
@property
def platform(self):
if self._values['system-info'] is None:
return None
return self._values['system-info'][0]['platform']
@property
def host_board_serial(self):
if self._values['system-info'] is None:
return None
if 'hostBoardSerialNum' not in self._values['system-info'][0]:
return None
if self._values['system-info'][0]['hostBoardSerialNum'].strip() == '':
return None
return self._values['system-info'][0]['hostBoardSerialNum']
@property
def host_board_part_revision(self):
if self._values['system-info'] is None:
return None
if 'hostBoardPartRevNum' not in self._values['system-info'][0]:
return None
if self._values['system-info'][0]['hostBoardPartRevNum'].strip() == '':
return None
return self._values['system-info'][0]['hostBoardPartRevNum']
@property
def package_edition(self):
return self._values['Edition']
@property
def package_version(self):
return 'Build {0} - {1}'.format(self._values['Build'], self._values['Date'])
@property
def product_build(self):
return self._values['Build']
@property
def product_build_date(self):
return self._values['Date']
@property
def product_built(self):
if 'version_info' not in self._values:
return None
if 'Built' in self._values['version_info']:
return int(self._values['version_info']['Built'])
@property
def product_changelist(self):
if 'version_info' not in self._values:
return None
if 'Changelist' in self._values['version_info']:
return int(self._values['version_info']['Changelist'])
@property
def product_jobid(self):
if 'version_info' not in self._values:
return None
if 'JobID' in self._values['version_info']:
return int(self._values['version_info']['JobID'])
@property
def product_code(self):
return self._values['Product']
@property
def product_version(self):
return self._values['Version']
@property
def hardware_information(self):
if self._values['hardware-version'] is None:
return None
self._transform_name_attribute(self._values['hardware-version'])
result = [v for k, v in iteritems(self._values['hardware-version'])]
return result
def _transform_name_attribute(self, entry):
if isinstance(entry, dict):
for k, v in iteritems(entry):
if k == 'tmName':
entry['name'] = entry.pop('tmName')
self._transform_name_attribute(v)
elif isinstance(entry, list):
for k in entry:
if k == 'tmName':
entry['name'] = entry.pop('tmName')
self._transform_name_attribute(k)
else:
return
@property
def time(self):
if self._values['fullDate'] is None:
return None
date = datetime.datetime.strptime(self._values['fullDate'], "%Y-%m-%dT%H:%M:%SZ")
result = dict(
day=date.day,
hour=date.hour,
minute=date.minute,
month=date.month,
second=date.second,
year=date.year
)
return result
@property
def marketing_name(self):
if self._values['platform'] is None:
return None
return self._values['platform'][0]['marketingName']
@property
def base_mac_address(self):
if self._values['platform'] is None:
return None
return self._values['platform'][0]['baseMac']
class SystemInfoFactManager(BaseManager):
def __init__(self, *args, **kwargs):
self.client = kwargs.get('client', None)
self.module = kwargs.get('module', None)
super(SystemInfoFactManager, self).__init__(**kwargs)
self.want = SystemInfoParameters(params=self.module.params)
def exec_module(self):
facts = self._exec_module()
result = dict(system_info=facts)
return result
def _exec_module(self):
facts = self.read_facts()
results = facts.to_return()
return results
def read_facts(self):
collection = self.read_collection_from_device()
params = SystemInfoParameters(params=collection)
return params
def read_collection_from_device(self):
result = dict()
tmp = self.read_hardware_info_from_device()
if tmp:
result.update(tmp)
tmp = self.read_system_setup_from_device()
if tmp:
result.update(tmp)
tmp = self.read_clock_info_from_device()
if tmp:
result.update(tmp)
tmp = self.read_version_info_from_device()
if tmp:
result.update(tmp)
tmp = self.read_uptime_info_from_device()
if tmp:
result.update(tmp)
tmp = self.read_version_file_info_from_device()
if tmp:
result.update(tmp)
return result
def read_system_setup_from_device(self):
uri = "https://{0}:{1}/mgmt/shared/system/setup".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response
def read_version_file_info_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/util/bash".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
args = dict(
command='run',
utilCmdArgs='-c "cat /VERSION"'
)
resp = self.client.api.post(uri, json=args)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
try:
pattern = r'^(?P<key>(Product|Build|Sequence|BaseBuild|Edition|Date|Built|Changelist|JobID))\:(?P<value>.*)'
result = response['commandResult'].strip()
except KeyError:
return None
if 'No such file or directory' in result:
return None
lines = response['commandResult'].split("\n")
result = dict()
for line in lines:
if not line:
continue
matches = re.match(pattern, line)
if matches:
result[matches.group('key')] = matches.group('value').strip()
if result:
return dict(
version_info=result
)
def read_uptime_info_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/util/bash".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
args = dict(
command='run',
utilCmdArgs='-c "cat /proc/uptime"'
)
resp = self.client.api.post(uri, json=args)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
try:
parts = response['commandResult'].strip().split(' ')
return dict(
uptime=math.floor(float(parts[0]))
)
except KeyError:
pass
def read_hardware_info_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/sys/hardware".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
result = parseStats(response)
return result
def read_clock_info_from_device(self):
"""Parses clock info from the REST API
The clock stat returned from the REST API (at the time of 13.1.0.7)
is similar to the following.
{
"kind": "tm:sys:clock:clockstats",
"selfLink": "https://localhost/mgmt/tm/sys/clock?ver=13.1.0.4",
"entries": {
"https://localhost/mgmt/tm/sys/clock/0": {
"nestedStats": {
"entries": {
"fullDate": {
"description": "2018-06-05T13:38:33Z"
}
}
}
}
}
}
Parsing this data using the ``parseStats`` method, yields a list of
the clock stats in a format resembling that below.
[{'fullDate': '2018-06-05T13:41:05Z'}]
Therefore, this method cherry-picks the first entry from this list
and returns it. There can be no other items in this list.
Returns:
A dict mapping keys to the corresponding clock stats. For
example:
{'fullDate': '2018-06-05T13:41:05Z'}
There should never not be a clock stat, unless by chance it
is removed from the API in the future, or changed to a different
API endpoint.
Raises:
F5ModuleError: A non-successful HTTP code was returned or a JSON
response was not found.
"""
uri = "https://{0}:{1}/mgmt/tm/sys/clock".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
result = parseStats(response)
if result is None:
return None
return result[0]
def read_version_info_from_device(self):
"""Parses version info from the REST API
The version stat returned from the REST API (at the time of 13.1.0.7)
is similar to the following.
{
"kind": "tm:sys:version:versionstats",
"selfLink": "https://localhost/mgmt/tm/sys/version?ver=13.1.0.4",
"entries": {
"https://localhost/mgmt/tm/sys/version/0": {
"nestedStats": {
"entries": {
"Build": {
"description": "0.0.6"
},
"Date": {
"description": "Tue Mar 13 20:10:42 PDT 2018"
},
"Edition": {
"description": "Point Release 4"
},
"Product": {
"description": "BIG-IP"
},
"Title": {
"description": "Main Package"
},
"Version": {
"description": "13.1.0.4"
}
}
}
}
}
}
Parsing this data using the ``parseStats`` method, yields a list of
the clock stats in a format resembling that below.
[{'Build': '0.0.6', 'Date': 'Tue Mar 13 20:10:42 PDT 2018',
'Edition': 'Point Release 4', 'Product': 'BIG-IP', 'Title': 'Main Package',
'Version': '13.1.0.4'}]
Therefore, this method cherry-picks the first entry from this list
and returns it. There can be no other items in this list.
Returns:
A dict mapping keys to the corresponding clock stats. For
example:
{'Build': '0.0.6', 'Date': 'Tue Mar 13 20:10:42 PDT 2018',
'Edition': 'Point Release 4', 'Product': 'BIG-IP', 'Title': 'Main Package',
'Version': '13.1.0.4'}
There should never not be a version stat, unless by chance it
is removed from the API in the future, or changed to a different
API endpoint.
Raises:
F5ModuleError: A non-successful HTTP code was returned or a JSON
response was not found.
"""
uri = "https://{0}:{1}/mgmt/tm/sys/version".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
result = parseStats(response)
if result is None:
return None
return result[0]
class VlansParameters(BaseParameters):
api_map = {
'autoLasthop': 'auto_lasthop',
'cmpHash': 'cmp_hash_algorithm',
'failsafeAction': 'failsafe_action',
'failsafe': 'failsafe_enabled',
'failsafeTimeout': 'failsafe_timeout',
'ifIndex': 'if_index',
'learning': 'learning_mode',
'interfacesReference': 'interfaces',
'sourceChecking': 'source_check_enabled',
'fullPath': 'full_path'
}
returnables = [
'full_path',
'name',
'auto_lasthop',
'cmp_hash_algorithm',
'description',
'failsafe_action',
'failsafe_enabled',
'failsafe_timeout',
'if_index',
'learning_mode',
'interfaces',
'mtu',
'sflow_poll_interval',
'sflow_poll_interval_global',
'sflow_sampling_rate',
'sflow_sampling_rate_global',
'source_check_enabled',
'true_mac_address',
'tag',
]
@property
def interfaces(self):
if self._values['interfaces'] is None:
return None
if 'items' not in self._values['interfaces']:
return None
result = []
for item in self._values['interfaces']['items']:
tmp = dict(
name=item['name'],
full_path=item['fullPath']
)
if 'tagged' in item:
tmp['tagged'] = 'yes'
else:
tmp['tagged'] = 'no'
result.append(tmp)
return result
@property
def sflow_poll_interval(self):
return int(self._values['sflow']['pollInterval'])
@property
def sflow_poll_interval_global(self):
return flatten_boolean(self._values['sflow']['pollIntervalGlobal'])
@property
def sflow_sampling_rate(self):
return int(self._values['sflow']['samplingRate'])
@property
def sflow_sampling_rate_global(self):
return flatten_boolean(self._values['sflow']['samplingRateGlobal'])
@property
def source_check_state(self):
return flatten_boolean(self._values['source_check_state'])
@property
def true_mac_address(self):
if self._values['stats']['macTrue'] in [None, 'none']:
return None
return self._values['stats']['macTrue']
@property
def tag(self):
return self._values['stats']['id']
@property
def failsafe_enabled(self):
return flatten_boolean(self._values['failsafe_enabled'])
class VlansFactManager(BaseManager):
def __init__(self, *args, **kwargs):
self.client = kwargs.get('client', None)
self.module = kwargs.get('module', None)
super(VlansFactManager, self).__init__(**kwargs)
self.want = VlansParameters(params=self.module.params)
def exec_module(self):
facts = self._exec_module()
result = dict(vlans=facts)
return result
def _exec_module(self):
results = []
facts = self.read_facts()
for item in facts:
attrs = item.to_return()
results.append(attrs)
results = sorted(results, key=lambda k: k['full_path'])
return results
def read_facts(self):
results = []
collection = self.read_collection_from_device()
for resource in collection:
resource.update(self.read_stats(resource['fullPath']))
params = VlansParameters(params=resource)
results.append(params)
return results
def read_stats(self, resource):
uri = "https://{0}:{1}/mgmt/tm/net/vlan/{2}/stats".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(name=resource)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
result = parseStats(response)
return result
def read_collection_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/net/vlan/?expandSubcollections=true".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
if 'items' not in response:
return []
result = response['items']
return result
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.kwargs = kwargs
self.want = Parameters(params=self.module.params)
self.managers = {
'applications': dict(
manager=ApplicationsFactManager,
client=F5RestClient,
),
'managed-devices': dict(
manager=ManagedDevicesFactManager,
client=F5RestClient,
),
'purchased-pool-licenses': dict(
manager=PurchasedPoolLicensesFactManager,
client=F5RestClient,
),
'regkey-pools': dict(
manager=RegkeyPoolsFactManager,
client=F5RestClient,
),
'system-info': dict(
manager=SystemInfoFactManager,
client=F5RestClient,
),
'vlans': dict(
manager=VlansFactManager,
client=F5RestClient,
),
}
def exec_module(self):
self.handle_all_keyword()
res = self.check_valid_gather_subset(self.want.gather_subset)
if res:
invalid = ','.join(res)
raise F5ModuleError(
"The specified 'gather_subset' options are invalid: {0}".format(invalid)
)
result = self.filter_excluded_facts()
managers = []
for name in result:
manager = self.get_manager(name)
if manager:
managers.append(manager)
if not managers:
result = dict(
changed=False
)
return result
result = self.execute_managers(managers)
if result:
result['changed'] = True
else:
result['changed'] = False
return result
def filter_excluded_facts(self):
# Remove the excluded entries from the list of possible facts
exclude = [x[1:] for x in self.want.gather_subset if x[0] == '!']
include = [x for x in self.want.gather_subset if x[0] != '!']
result = [x for x in include if x not in exclude]
return result
def handle_all_keyword(self):
if 'all' not in self.want.gather_subset:
return
managers = list(self.managers.keys()) + self.want.gather_subset
managers.remove('all')
self.want.update({'gather_subset': managers})
def check_valid_gather_subset(self, includes):
"""Check that the specified subset is valid
The ``gather_subset`` parameter is specified as a "raw" field which means that
any Python type could technically be provided
:param includes:
:return:
"""
keys = self.managers.keys()
result = []
for x in includes:
if x not in keys:
if x[0] == '!':
if x[1:] not in keys:
result.append(x)
else:
result.append(x)
return result
def execute_managers(self, managers):
results = dict()
for manager in managers:
result = manager.exec_module()
results.update(result)
return results
def get_manager(self, which):
result = {}
info = self.managers.get(which, None)
if not info:
return result
kwargs = dict()
kwargs.update(self.kwargs)
manager = info.get('manager', None)
client = info.get('client', None)
kwargs['client'] = client(**self.module.params)
result = manager(**kwargs)
return result
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = False
argument_spec = dict(
gather_subset=dict(
type='list',
required=True,
choices=[
# Meta choices
'all',
# Non-meta choices
'applications',
'managed-devices',
'purchased-pool-licenses',
'regkey-pools',
'system-info',
'vlans',
# Negations of meta choices
'!all',
# Negations of non-meta-choices
'!applications',
'!managed-devices',
'!purchased-pool-licenses',
'!regkey-pools',
'!system-info',
'!vlans',
]
),
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
igorcompuff/ns-3.26
|
refs/heads/master
|
src/aodv/test/examples-to-run.py
|
199
|
#! /usr/bin/env python
## -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
# A list of C++ examples to run in order to ensure that they remain
# buildable and runnable over time. Each tuple in the list contains
#
# (example_name, do_run, do_valgrind_run).
#
# See test.py for more information.
cpp_examples = [
("aodv", "True", "True"),
]
# A list of Python examples to run in order to ensure that they remain
# runnable over time. Each tuple in the list contains
#
# (example_name, do_run).
#
# See test.py for more information.
python_examples = []
|
rampage644/impala-cut
|
refs/heads/executor
|
shell/ext-py/prettytable-0.7.1/setup.py
|
80
|
#!/usr/bin/env python
from setuptools import setup
from prettytable import __version__ as version
setup(
name='prettytable',
version=version,
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: BSD License',
'Topic :: Text Processing'
],
license="BSD (3 clause)",
description='A simple Python library for easily displaying tabular data in a visually appealing ASCII table format',
author='Luke Maurits',
author_email='luke@maurits.id.au',
url='http://code.google.com/p/prettytable',
py_modules=['prettytable'],
test_suite = "prettytable_test"
)
|
un33k/robotframework
|
refs/heads/master
|
utest/utils/test_robotenv.py
|
21
|
import unittest
import os
from robot.utils.asserts import assert_equals, assert_not_none, assert_none, assert_true
from robot.utils import get_env_var, set_env_var, del_env_var, get_env_vars
TEST_VAR = 'TeST_EnV_vAR'
TEST_VAL = 'original value'
NON_ASCII_VAR = u'\xe4iti'
NON_ASCII_VAL = u'is\xe4'
class TestRobotEnv(unittest.TestCase):
def setUp(self):
os.environ[TEST_VAR] = TEST_VAL
def tearDown(self):
if TEST_VAR in os.environ:
del os.environ[TEST_VAR]
def test_get_env_var(self):
assert_not_none(get_env_var('PATH'))
assert_equals(get_env_var(TEST_VAR), TEST_VAL)
assert_none(get_env_var('NoNeXiStInG'))
assert_equals(get_env_var('NoNeXiStInG', 'default'), 'default')
def test_set_env_var(self):
set_env_var(TEST_VAR, 'new value')
assert_equals(os.getenv(TEST_VAR), 'new value')
def test_del_env_var(self):
old = del_env_var(TEST_VAR)
assert_none(os.getenv(TEST_VAR))
assert_equals(old, TEST_VAL)
assert_none(del_env_var(TEST_VAR))
def test_get_set_del_non_ascii_vars(self):
set_env_var(NON_ASCII_VAR, NON_ASCII_VAL)
assert_equals(get_env_var(NON_ASCII_VAR), NON_ASCII_VAL)
assert_equals(del_env_var(NON_ASCII_VAR), NON_ASCII_VAL)
assert_none(get_env_var(NON_ASCII_VAR))
def test_get_env_vars(self):
set_env_var(NON_ASCII_VAR, NON_ASCII_VAL)
vars = get_env_vars()
assert_true('PATH' in vars)
assert_equals(vars[self._upper_on_windows(TEST_VAR)], TEST_VAL)
assert_equals(vars[self._upper_on_windows(NON_ASCII_VAR)], NON_ASCII_VAL)
for k, v in vars.items():
assert_true(isinstance(k, unicode) and isinstance(v, unicode))
def _upper_on_windows(self, name):
return name if os.sep == '/' else name.upper()
if __name__ == '__main__':
unittest.main()
|
etrepum/pgproxy
|
refs/heads/master
|
pgproxy/twistd.py
|
2
|
from twisted.scripts.twistd import ServerOptions, runApp
from twisted.application import app
class Options(ServerOptions):
optParameters = [
('listen-port', '', 5433, 'The port to listen on.', int),
('server-host', '', 'localhost', 'The host of the postgres server.'),
('server-port', '', 5432, 'The port of the postgres server.', int),
]
def run():
app.run(runApp, Options)
if __name__ == '__main__':
run()
|
mayankcu/Django-social
|
refs/heads/master
|
venv/Lib/site-packages/django/contrib/sessions/tests.py
|
73
|
from __future__ import with_statement
from datetime import datetime, timedelta
import shutil
import string
import tempfile
import warnings
from django.conf import settings
from django.contrib.sessions.backends.db import SessionStore as DatabaseSession
from django.contrib.sessions.backends.cache import SessionStore as CacheSession
from django.contrib.sessions.backends.cached_db import SessionStore as CacheDBSession
from django.contrib.sessions.backends.file import SessionStore as FileSession
from django.contrib.sessions.backends.signed_cookies import SessionStore as CookieSession
from django.contrib.sessions.models import Session
from django.contrib.sessions.middleware import SessionMiddleware
from django.core.cache.backends.base import CacheKeyWarning
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
from django.http import HttpResponse
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings, get_warnings_state, restore_warnings_state
from django.utils import timezone
from django.utils import unittest
class SessionTestsMixin(object):
# This does not inherit from TestCase to avoid any tests being run with this
# class, which wouldn't work, and to allow different TestCase subclasses to
# be used.
backend = None # subclasses must specify
def setUp(self):
self.session = self.backend()
def tearDown(self):
# NB: be careful to delete any sessions created; stale sessions fill up
# the /tmp (with some backends) and eventually overwhelm it after lots
# of runs (think buildbots)
self.session.delete()
def test_new_session(self):
self.assertFalse(self.session.modified)
self.assertFalse(self.session.accessed)
def test_get_empty(self):
self.assertEqual(self.session.get('cat'), None)
def test_store(self):
self.session['cat'] = "dog"
self.assertTrue(self.session.modified)
self.assertEqual(self.session.pop('cat'), 'dog')
def test_pop(self):
self.session['some key'] = 'exists'
# Need to reset these to pretend we haven't accessed it:
self.accessed = False
self.modified = False
self.assertEqual(self.session.pop('some key'), 'exists')
self.assertTrue(self.session.accessed)
self.assertTrue(self.session.modified)
self.assertEqual(self.session.get('some key'), None)
def test_pop_default(self):
self.assertEqual(self.session.pop('some key', 'does not exist'),
'does not exist')
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
def test_setdefault(self):
self.assertEqual(self.session.setdefault('foo', 'bar'), 'bar')
self.assertEqual(self.session.setdefault('foo', 'baz'), 'bar')
self.assertTrue(self.session.accessed)
self.assertTrue(self.session.modified)
def test_update(self):
self.session.update({'update key': 1})
self.assertTrue(self.session.accessed)
self.assertTrue(self.session.modified)
self.assertEqual(self.session.get('update key', None), 1)
def test_has_key(self):
self.session['some key'] = 1
self.session.modified = False
self.session.accessed = False
self.assertTrue('some key' in self.session)
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
def test_values(self):
self.assertEqual(self.session.values(), [])
self.assertTrue(self.session.accessed)
self.session['some key'] = 1
self.assertEqual(self.session.values(), [1])
def test_iterkeys(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
i = self.session.iterkeys()
self.assertTrue(hasattr(i, '__iter__'))
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
self.assertEqual(list(i), ['x'])
def test_itervalues(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
i = self.session.itervalues()
self.assertTrue(hasattr(i, '__iter__'))
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
self.assertEqual(list(i), [1])
def test_iteritems(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
i = self.session.iteritems()
self.assertTrue(hasattr(i, '__iter__'))
self.assertTrue(self.session.accessed)
self.assertFalse(self.session.modified)
self.assertEqual(list(i), [('x', 1)])
def test_clear(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(self.session.items(), [('x', 1)])
self.session.clear()
self.assertEqual(self.session.items(), [])
self.assertTrue(self.session.accessed)
self.assertTrue(self.session.modified)
def test_save(self):
self.session.save()
self.assertTrue(self.session.exists(self.session.session_key))
def test_delete(self):
self.session.save()
self.session.delete(self.session.session_key)
self.assertFalse(self.session.exists(self.session.session_key))
def test_flush(self):
self.session['foo'] = 'bar'
self.session.save()
prev_key = self.session.session_key
self.session.flush()
self.assertFalse(self.session.exists(prev_key))
self.assertNotEqual(self.session.session_key, prev_key)
self.assertTrue(self.session.modified)
self.assertTrue(self.session.accessed)
def test_cycle(self):
self.session['a'], self.session['b'] = 'c', 'd'
self.session.save()
prev_key = self.session.session_key
prev_data = self.session.items()
self.session.cycle_key()
self.assertNotEqual(self.session.session_key, prev_key)
self.assertEqual(self.session.items(), prev_data)
def test_invalid_key(self):
# Submitting an invalid session key (either by guessing, or if the db has
# removed the key) results in a new key being generated.
try:
session = self.backend('1')
try:
session.save()
except AttributeError:
self.fail("The session object did not save properly. Middleware may be saving cache items without namespaces.")
self.assertNotEqual(session.session_key, '1')
self.assertEqual(session.get('cat'), None)
session.delete()
finally:
# Some backends leave a stale cache entry for the invalid
# session key; make sure that entry is manually deleted
session.delete('1')
def test_session_key_is_read_only(self):
def set_session_key(session):
session.session_key = session._get_new_session_key()
self.assertRaises(AttributeError, set_session_key, self.session)
# Custom session expiry
def test_default_expiry(self):
# A normal session has a max age equal to settings
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
# So does a custom session with an idle expiration time of 0 (but it'll
# expire at browser close)
self.session.set_expiry(0)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_custom_expiry_seconds(self):
# Using seconds
self.session.set_expiry(10)
delta = self.session.get_expiry_date() - timezone.now()
self.assertTrue(delta.seconds in (9, 10))
age = self.session.get_expiry_age()
self.assertTrue(age in (9, 10))
def test_custom_expiry_timedelta(self):
# Using timedelta
self.session.set_expiry(timedelta(seconds=10))
delta = self.session.get_expiry_date() - timezone.now()
self.assertTrue(delta.seconds in (9, 10))
age = self.session.get_expiry_age()
self.assertTrue(age in (9, 10))
def test_custom_expiry_datetime(self):
# Using fixed datetime
self.session.set_expiry(timezone.now() + timedelta(seconds=10))
delta = self.session.get_expiry_date() - timezone.now()
self.assertTrue(delta.seconds in (9, 10))
age = self.session.get_expiry_age()
self.assertTrue(age in (9, 10))
def test_custom_expiry_reset(self):
self.session.set_expiry(None)
self.session.set_expiry(10)
self.session.set_expiry(None)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_get_expire_at_browser_close(self):
# Tests get_expire_at_browser_close with different settings and different
# set_expiry calls
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False):
self.session.set_expiry(10)
self.assertFalse(self.session.get_expire_at_browser_close())
self.session.set_expiry(0)
self.assertTrue(self.session.get_expire_at_browser_close())
self.session.set_expiry(None)
self.assertFalse(self.session.get_expire_at_browser_close())
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True):
self.session.set_expiry(10)
self.assertFalse(self.session.get_expire_at_browser_close())
self.session.set_expiry(0)
self.assertTrue(self.session.get_expire_at_browser_close())
self.session.set_expiry(None)
self.assertTrue(self.session.get_expire_at_browser_close())
def test_decode(self):
# Ensure we can decode what we encode
data = {'a test key': 'a test value'}
encoded = self.session.encode(data)
self.assertEqual(self.session.decode(encoded), data)
class DatabaseSessionTests(SessionTestsMixin, TestCase):
backend = DatabaseSession
def test_session_get_decoded(self):
"""
Test we can use Session.get_decoded to retrieve data stored
in normal way
"""
self.session['x'] = 1
self.session.save()
s = Session.objects.get(session_key=self.session.session_key)
self.assertEqual(s.get_decoded(), {'x': 1})
def test_sessionmanager_save(self):
"""
Test SessionManager.save method
"""
# Create a session
self.session['y'] = 1
self.session.save()
s = Session.objects.get(session_key=self.session.session_key)
# Change it
Session.objects.save(s.session_key, {'y': 2}, s.expire_date)
# Clear cache, so that it will be retrieved from DB
del self.session._session_cache
self.assertEqual(self.session['y'], 2)
DatabaseSessionWithTimeZoneTests = override_settings(USE_TZ=True)(DatabaseSessionTests)
class CacheDBSessionTests(SessionTestsMixin, TestCase):
backend = CacheDBSession
def test_exists_searches_cache_first(self):
self.session.save()
with self.assertNumQueries(0):
self.assertTrue(self.session.exists(self.session.session_key))
def test_load_overlong_key(self):
warnings_state = get_warnings_state()
warnings.filterwarnings('ignore',
category=CacheKeyWarning)
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
restore_warnings_state(warnings_state)
CacheDBSessionWithTimeZoneTests = override_settings(USE_TZ=True)(CacheDBSessionTests)
# Don't need DB flushing for these tests, so can use unittest.TestCase as base class
class FileSessionTests(SessionTestsMixin, unittest.TestCase):
backend = FileSession
def setUp(self):
super(FileSessionTests, self).setUp()
# Do file session tests in an isolated directory, and kill it after we're done.
self.original_session_file_path = settings.SESSION_FILE_PATH
self.temp_session_store = settings.SESSION_FILE_PATH = tempfile.mkdtemp()
def tearDown(self):
settings.SESSION_FILE_PATH = self.original_session_file_path
shutil.rmtree(self.temp_session_store)
super(FileSessionTests, self).tearDown()
@override_settings(
SESSION_FILE_PATH="/if/this/directory/exists/you/have/a/weird/computer")
def test_configuration_check(self):
# Make sure the file backend checks for a good storage dir
self.assertRaises(ImproperlyConfigured, self.backend)
def test_invalid_key_backslash(self):
# Ensure we don't allow directory-traversal
self.assertRaises(SuspiciousOperation,
self.backend("a\\b\\c").load)
def test_invalid_key_forwardslash(self):
# Ensure we don't allow directory-traversal
self.assertRaises(SuspiciousOperation,
self.backend("a/b/c").load)
class CacheSessionTests(SessionTestsMixin, unittest.TestCase):
backend = CacheSession
def test_load_overlong_key(self):
warnings_state = get_warnings_state()
warnings.filterwarnings('ignore',
category=CacheKeyWarning)
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
restore_warnings_state(warnings_state)
class SessionMiddlewareTests(unittest.TestCase):
@override_settings(SESSION_COOKIE_SECURE=True)
def test_secure_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertTrue(
response.cookies[settings.SESSION_COOKIE_NAME]['secure'])
@override_settings(SESSION_COOKIE_HTTPONLY=True)
def test_httponly_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertTrue(
response.cookies[settings.SESSION_COOKIE_NAME]['httponly'])
self.assertIn('httponly',
str(response.cookies[settings.SESSION_COOKIE_NAME]))
@override_settings(SESSION_COOKIE_HTTPONLY=False)
def test_no_httponly_session_cookie(self):
request = RequestFactory().get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
# If it isn't in the cookie, that's fine (Python 2.5)
if 'httponly' in settings.SESSION_COOKIE_NAME:
self.assertFalse(
response.cookies[settings.SESSION_COOKIE_NAME]['httponly'])
self.assertNotIn('httponly',
str(response.cookies[settings.SESSION_COOKIE_NAME]))
class CookieSessionTests(SessionTestsMixin, TestCase):
backend = CookieSession
def test_save(self):
"""
This test tested exists() in the other session backends, but that
doesn't make sense for us.
"""
pass
def test_cycle(self):
"""
This test tested cycle_key() which would create a new session
key for the same session data. But we can't invalidate previously
signed cookies (other than letting them expire naturally) so
testing for this behavior is meaningless.
"""
pass
|
uberamd/NGECore2
|
refs/heads/master
|
scripts/object/mobile/meatlump_f_officer_01_nonvendor_vendor.py
|
85615
|
import sys
def setup(core, object):
return
|
uberamd/NGECore2
|
refs/heads/master
|
scripts/object/tangible/loot/creature_loot/collections/space/engine_mark_02_koensayr.py
|
85615
|
import sys
def setup(core, object):
return
|
palaniyappanBala/thug
|
refs/heads/master
|
src/ActiveX/modules/SilverLight.py
|
8
|
import logging
log = logging.getLogger("Thug")
def isVersionSupported(self, version):
shockwave = log.ThugVulnModules.shockwave_flash.split('.')
sversion = version.split('?')
if len(sversion) == 1:
sversion = version.split('.')
if len(sversion) != 4:
return False
for i in range(0, 4):
if int(sversion[i]) > int(shockwave[i]):
return False
return True
|
thesuperzapper/tensorflow
|
refs/heads/master
|
tensorflow/contrib/learn/python/learn/utils/gc.py
|
66
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""System for specifying garbage collection (GC) of path based data.
This framework allows for GC of data specified by path names, for example files
on disk. gc.Path objects each represent a single item stored at a path and may
be a base directory,
/tmp/exports/0/...
/tmp/exports/1/...
...
or a fully qualified file,
/tmp/train-1.ckpt
/tmp/train-2.ckpt
...
A gc filter function takes and returns a list of gc.Path items. Filter
functions are responsible for selecting Path items for preservation or deletion.
Note that functions should always return a sorted list.
For example,
base_dir = "/tmp"
# create the directories
for e in xrange(10):
os.mkdir("%s/%d" % (base_dir, e), 0o755)
# create a simple parser that pulls the export_version from the directory
def parser(path):
match = re.match("^" + base_dir + "/(\\d+)$", path.path)
if not match:
return None
return path._replace(export_version=int(match.group(1)))
path_list = gc.get_paths("/tmp", parser) # contains all ten Paths
every_fifth = gc.mod_export_version(5)
print every_fifth(path_list) # shows ["/tmp/0", "/tmp/5"]
largest_three = gc.largest_export_versions(3)
print largest_three(all_paths) # shows ["/tmp/7", "/tmp/8", "/tmp/9"]
both = gc.union(every_fifth, largest_three)
print both(all_paths) # shows ["/tmp/0", "/tmp/5",
# "/tmp/7", "/tmp/8", "/tmp/9"]
# delete everything not in 'both'
to_delete = gc.negation(both)
for p in to_delete(all_paths):
gfile.DeleteRecursively(p.path) # deletes: "/tmp/1", "/tmp/2",
# "/tmp/3", "/tmp/4", "/tmp/6",
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import heapq
import math
import os
from tensorflow.python.platform import gfile
Path = collections.namedtuple('Path', 'path export_version')
def largest_export_versions(n):
"""Creates a filter that keeps the largest n export versions.
Args:
n: number of versions to keep.
Returns:
A filter function that keeps the n largest paths.
"""
def keep(paths):
heap = []
for idx, path in enumerate(paths):
if path.export_version is not None:
heapq.heappush(heap, (path.export_version, idx))
keepers = [paths[i] for _, i in heapq.nlargest(n, heap)]
return sorted(keepers)
return keep
def one_of_every_n_export_versions(n):
"""Creates a filter that keeps one of every n export versions.
Args:
n: interval size.
Returns:
A filter function that keeps exactly one path from each interval
[0, n], (n, 2n], (2n, 3n], etc... If more than one path exists in an
interval the largest is kept.
"""
def keep(paths):
"""A filter function that keeps exactly one out of every n paths."""
keeper_map = {} # map from interval to largest path seen in that interval
for p in paths:
if p.export_version is None:
# Skip missing export_versions.
continue
# Find the interval (with a special case to map export_version = 0 to
# interval 0.
interval = math.floor(
(p.export_version - 1) / n) if p.export_version else 0
existing = keeper_map.get(interval, None)
if (not existing) or (existing.export_version < p.export_version):
keeper_map[interval] = p
return sorted(keeper_map.values())
return keep
def mod_export_version(n):
"""Creates a filter that keeps every export that is a multiple of n.
Args:
n: step size.
Returns:
A filter function that keeps paths where export_version % n == 0.
"""
def keep(paths):
keepers = []
for p in paths:
if p.export_version % n == 0:
keepers.append(p)
return sorted(keepers)
return keep
def union(lf, rf):
"""Creates a filter that keeps the union of two filters.
Args:
lf: first filter
rf: second filter
Returns:
A filter function that keeps the n largest paths.
"""
def keep(paths):
l = set(lf(paths))
r = set(rf(paths))
return sorted(list(l|r))
return keep
def negation(f):
"""Negate a filter.
Args:
f: filter function to invert
Returns:
A filter function that returns the negation of f.
"""
def keep(paths):
l = set(paths)
r = set(f(paths))
return sorted(list(l-r))
return keep
def get_paths(base_dir, parser):
"""Gets a list of Paths in a given directory.
Args:
base_dir: directory.
parser: a function which gets the raw Path and can augment it with
information such as the export_version, or ignore the path by returning
None. An example parser may extract the export version from a path
such as "/tmp/exports/100" an another may extract from a full file
name such as "/tmp/checkpoint-99.out".
Returns:
A list of Paths contained in the base directory with the parsing function
applied.
By default the following fields are populated,
- Path.path
The parsing function is responsible for populating,
- Path.export_version
"""
raw_paths = gfile.ListDirectory(base_dir)
paths = []
for r in raw_paths:
p = parser(Path(os.path.join(base_dir, r), None))
if p:
paths.append(p)
return sorted(paths)
|
bikong2/django
|
refs/heads/master
|
tests/migrations/test_migrations_no_changes/0001_initial.py
|
2995
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
operations = [
migrations.CreateModel(
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=255)),
("slug", models.SlugField(null=True)),
("age", models.IntegerField(default=0)),
("silly_field", models.BooleanField(default=False)),
],
),
migrations.CreateModel(
"Tribble",
[
("id", models.AutoField(primary_key=True)),
("fluffy", models.BooleanField(default=True)),
],
)
]
|
gluster-rhsc/nagios-plugin
|
refs/heads/master
|
nagios/plugins/check_disk_and_inode.py
|
1
|
#!/usr/bin/python
# sadf.py -- nagios plugin uses sadf output for perf data
# Copyright (C) 2014 Red Hat Inc
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
#
import re
import sys
import commands
from optparse import OptionParser
def getUsageAndFree(command, lvm):
status = commands.getstatusoutput(command)[1].split()
path = status[-1]
usagePer = status[-2]
availSpace = status[-3]
usedSpace = status[-4]
device = status[-6].split("-")[-1]
dmatch = re.compile('[0-9]+').match(usagePer)
if (dmatch):
usage = eval(dmatch.group(0))
return (float(usage), float(100 - usage), usedSpace,
availSpace, device, path)
else:
return None, None, None, None, None, None
def getDisk(path, readable=False, lvm=False):
if readable:
return getUsageAndFree("df -m %s" % path, lvm)
else:
return getUsageAndFree("df -kh %s" % path, lvm)
def getInode(path, readable=False, lvm=False):
return getUsageAndFree("df -i %s" % path, lvm)
def appendStatus(lst, level, typ, device, mpath, usage):
if 2 == level:
level = "crit"
elif 1 == level:
level = "warn"
else:
level = "ok"
lst.append("%s:%s:%s;%s;%s" % (level, device, mpath, usage))
def getMounts(searchQuery=None, excludeList=[]):
mountPaths = []
f = open("/etc/mtab")
for i in f.readlines():
if searchQuery and i.startswith(searchQuery):
if not excludeList:
mountPaths.append(i.split()[0])
else:
device = i.split()
if not device[0] in options.exclude and\
not device[1] in options.exclude:
mountPaths.append(device[0])
f.close()
return mountPaths
def parse_input():
parser = OptionParser()
parser.add_option('-w', '--warning', action='store', type='int',
dest='warn', help='Warning count in %', default=80)
parser.add_option('-c', '--critical', action='store', type='int',
dest='crit', help='Critical count in %', default=90)
parser.add_option('-u', '--usage', action="store_true", dest='usage',
help='Output disk and inode usage', default=False)
parser.add_option('-l', '--lvm', action="store_true",
dest='lvm', help='List lvm mounts', default=False)
parser.add_option('-a', '--all', action="store_true",
dest='all', help='List all mounts', default=False)
parser.add_option('-n', '--ignore', action="store_true",
dest='ignore', help='Ignore errors', default=False)
parser.add_option('-i', '--include', action='append', type='string',
dest='mountPath', help='Mount path', default=[])
parser.add_option('-x', '--exclude', action="append", type='string',
dest='exclude', help='Exclude disk')
return parser.parse_args()
if __name__ == '__main__':
disk = []
warnList = []
critList = []
diskList = []
mounts = []
level = -1
(options, args) = parse_input()
if len(args) > 2:
if args[0].isdigit() and args[1].isdigit():
warn = int(args[0])
crit = int(args[1])
options.mountPath = args[2:]
else:
warn = 80
crit = 90
options.mountPath = args
else:
crit = options.crit
warn = options.warn
if options.lvm:
searchQuery = "/dev/mapper"
elif options.all:
searchQuery = None
else:
searchQuery = "/"
if not options.mountPath or options.lvm or options.all:
options.mountPath += getMounts(searchQuery, options.exclude)
#if not options.mountPath:
# parser.print_help()
# sys.exit(1)
for path in options.mountPath:
diskUsage, diskFree, used, avail, dev, mpath = getDisk(path,
options.usage,
options.lvm)
inodeUsage, inodeFree, iused, iavail, idev, ipath = getInode(path,
options.usage,
options.lvm)
if mpath in mounts:
continue
if not used or not iused:
if options.ignore:
continue
else:
sys.exit(3)
mounts.append(mpath)
if options.usage:
total = (float(used) + float(avail)) / 1000
itot = (float(iused) + float(iavail)) / 1000
disk.append("%s=%.1f;%.1f;%.1f;0;%.1f %s=%.1f;%.1f;%.1f;0;%.1f" % (
mpath, float(used)/1000, warn*total/100, crit*total/100, total,
ipath, float(iused)/1000, warn*itot/100, crit*itot/100, itot))
else:
disk.append("%s=%.2f;%s;%s;0;100 %s=%.2f;%s;%s;0;100" % (
mpath, diskUsage, warn, crit, ipath, inodeUsage, warn, crit))
if diskUsage >= crit or inodeUsage >= crit:
if diskUsage >= crit:
critList.append("crit:disk:%s;%s;%s" % (dev, mpath, diskUsage))
else:
critList.append("crit:inode:%s;%s;%s" % (idev, ipath, inodeUsage))
if not level > 1:
level = 2
elif (diskUsage >= warn and diskUsage < crit) or (
inodeUsage >= warn and inodeUsage < crit):
if diskUsage >= warn:
warnList.append("warn:disk:%s;%s;%s" % (dev, mpath, diskUsage))
else:
warnList.append("warn:inode:%s;%s;%s" % (idev, ipath, inodeUsage))
if not level > 0:
level = 1
else:
diskList.append("%s:%s" % (dev, mpath))
msg = " ".join(critList + warnList)
if not msg:
msg += " disks:mounts:(" + ",".join(diskList) + ")"
if 2 == level:
print "CRITICAL : %s | %s" % (msg, " ".join(disk))
sys.exit(2)
elif 1 == level:
print "WARNING : %s | %s" % (msg, " ".join(disk))
sys.exit(1)
else:
print "OK : %s | %s" % (msg, " ".join(disk))
|
balloob/home-assistant
|
refs/heads/dev
|
tests/components/ring/test_init.py
|
14
|
"""The tests for the Ring component."""
from datetime import timedelta
import homeassistant.components.ring as ring
from homeassistant.setup import async_setup_component
from tests.common import load_fixture
ATTRIBUTION = "Data provided by Ring.com"
VALID_CONFIG = {
"ring": {"username": "foo", "password": "bar", "scan_interval": timedelta(10)}
}
async def test_setup(hass, requests_mock):
"""Test the setup."""
await async_setup_component(hass, ring.DOMAIN, {})
requests_mock.post(
"https://oauth.ring.com/oauth/token", text=load_fixture("ring_oauth.json")
)
requests_mock.post(
"https://api.ring.com/clients_api/session",
text=load_fixture("ring_session.json"),
)
requests_mock.get(
"https://api.ring.com/clients_api/ring_devices",
text=load_fixture("ring_devices.json"),
)
requests_mock.get(
"https://api.ring.com/clients_api/chimes/999999/health",
text=load_fixture("ring_chime_health_attrs.json"),
)
requests_mock.get(
"https://api.ring.com/clients_api/doorbots/987652/health",
text=load_fixture("ring_doorboot_health_attrs.json"),
)
assert await ring.async_setup(hass, VALID_CONFIG)
|
Araneidae/cothread
|
refs/heads/master
|
old_tests/timing/timing.py
|
2
|
#!/usr/bin/env python
from __future__ import print_function
import time
import sys
import os
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')))
import cothread
from cothread.catools import *
if sys.argv[1:]:
count = int(sys.argv[1])
else:
count = 0
callbacks = 0
def callback(value):
global callbacks
callbacks += 1
if value.update_count != 1:
print('update_count', value.update_count)
camonitor("ARAVISCAM1:ARR:ArrayData", callback, count=count, all_updates=True)
@cothread.Spawn
def timer():
last = time.time()
last_callbacks = 0
while True:
cothread.Sleep(1)
now = time.time()
print("%d callbacks" % callbacks, callbacks - last_callbacks)
last = now
last_callbacks = callbacks
cothread.WaitForQuit()
|
mysql/mysql-utilities
|
refs/heads/master
|
mysql/utilities/common/tools.py
|
1
|
#
# Copyright (c) 2010, 2016, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
"""
This module contains methods for working with mysql server tools.
"""
import inspect
import os
import re
import sys
import shlex
import shutil
import socket
import subprocess
import time
try:
import ctypes
except ImportError:
pass
from mysql.utilities import (PYTHON_MIN_VERSION, PYTHON_MAX_VERSION,
CONNECTOR_MIN_VERSION)
from mysql.utilities.exception import UtilError
def _add_basedir(search_paths, path_str):
"""Add a basedir and all known sub directories
This method builds a list of possible paths for a basedir for locating
special MySQL files like mysqld (mysqld.exe), etc.
search_paths[inout] List of paths to append
path_str[in] The basedir path to append
"""
search_paths.append(path_str)
search_paths.append(os.path.join(path_str, "sql")) # for source trees
search_paths.append(os.path.join(path_str, "client")) # for source trees
search_paths.append(os.path.join(path_str, "share"))
search_paths.append(os.path.join(path_str, "scripts"))
search_paths.append(os.path.join(path_str, "bin"))
search_paths.append(os.path.join(path_str, "libexec"))
search_paths.append(os.path.join(path_str, "mysql"))
def get_tool_path(basedir, tool, fix_ext=True, required=True,
defaults_paths=None, search_PATH=False, quote=False):
"""Search for a MySQL tool and return the full path
basedir[in] The initial basedir to search (from mysql server)
tool[in] The name of the tool to find
fix_ext[in] If True (default is True), add .exe if running on
Windows.
required[in] If True (default is True), and error will be
generated and the utility aborted if the tool is
not found.
defaults_paths[in] Default list of paths to search for the tool.
By default an empty list is assumed, i.e. [].
search_PATH[in] Boolean value that indicates if the paths specified by
the PATH environment variable will be used to search
for the tool. By default the PATH will not be searched,
i.e. search_PATH=False.
quote[in] If True, the result path is surrounded with the OS
quotes.
Returns (string) full path to tool
"""
if not defaults_paths:
defaults_paths = []
search_paths = []
if quote:
if os.name == "posix":
quote_char = "'"
else:
quote_char = '"'
else:
quote_char = ''
if basedir:
# Add specified basedir path to search paths
_add_basedir(search_paths, basedir)
if defaults_paths and len(defaults_paths):
# Add specified default paths to search paths
for path in defaults_paths:
search_paths.append(path)
else:
# Add default basedir paths to search paths
_add_basedir(search_paths, "/usr/local/mysql/")
_add_basedir(search_paths, "/usr/sbin/")
_add_basedir(search_paths, "/usr/share/")
# Search in path from the PATH environment variable
if search_PATH:
for path in os.environ['PATH'].split(os.pathsep):
search_paths.append(path)
if os.name == "nt" and fix_ext:
tool = tool + ".exe"
# Search for the tool
for path in search_paths:
norm_path = os.path.normpath(path)
if os.path.isdir(norm_path):
toolpath = os.path.join(norm_path, tool)
if os.path.isfile(toolpath):
return r"%s%s%s" % (quote_char, toolpath, quote_char)
else:
if tool == "mysqld.exe":
toolpath = os.path.join(norm_path, "mysqld-nt.exe")
if os.path.isfile(toolpath):
return r"%s%s%s" % (quote_char, toolpath, quote_char)
if required:
raise UtilError("Cannot find location of %s." % tool)
return None
def delete_directory(path):
"""Remove a directory (folder) and its contents.
path[in] target directory
"""
if os.path.exists(path):
# It can take up to 10 seconds for Windows to 'release' a directory
# once a process has terminated. We wait...
if os.name == "nt":
stop = 10
i = 1
while i < stop and os.path.exists(path):
shutil.rmtree(path, True)
time.sleep(1)
i += 1
else:
shutil.rmtree(path, True)
def estimate_free_space(path, unit_multiple=2):
"""Estimated free space for the given path.
Calculates free space for the given path, returning the value
on the size given by the unit_multiple.
path[in] the path to calculate the free space for.
unit_multiple[in] the unit size given as a multiple.
Accepts int values > to zero.
Size unit_multiple
bytes 0
Kilobytes 1
Megabytes 2
Gigabytes 3
and so on...
Returns folder/drive free space (in bytes)
"""
unit_size = 1024 ** unit_multiple
if os.name == 'nt':
free_bytes = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(path),
None, None,
ctypes.pointer(free_bytes))
return free_bytes.value / unit_size
else:
st = os.statvfs(path) # pylint: disable=E1101
return st.f_bavail * st.f_frsize / unit_size
def execute_script(run_cmd, filename=None, options=None, verbosity=False):
"""Execute a script.
This method spawns a subprocess to execute a script. If a file is
specified, it will direct output to that file else it will suppress
all output from the script.
run_cmd[in] command/script to execute
filename[in] file path name to file, os.stdout, etc.
Default is None (do not log/write output)
options[in] arguments for script
Default is no arguments ([])
verbosity[in] show result of script
Default is False
Returns int - result from process execution
"""
if options is None:
options = []
if verbosity:
f_out = sys.stdout
else:
if not filename:
filename = os.devnull
f_out = open(filename, 'w')
is_posix = (os.name == "posix")
command = shlex.split(run_cmd, posix=is_posix)
if options:
command.extend([str(opt) for opt in options])
if verbosity:
print("# SCRIPT EXECUTED: {0}".format(" ".join(command)))
try:
proc = subprocess.Popen(command, shell=False,
stdout=f_out, stderr=f_out)
except:
_, err, _ = sys.exc_info()
raise UtilError(str(err))
ret_val = proc.wait()
if not verbosity:
f_out.close()
return ret_val
def ping_host(host, timeout):
"""Execute 'ping' against host to see if it is alive.
host[in] hostname or IP to ping
timeout[in] timeout in seconds to wait
returns bool - True = host is reachable via ping
"""
if sys.platform == "darwin":
run_cmd = "ping -o -t %s %s" % (timeout, host)
elif os.name == "posix":
run_cmd = "ping -w %s %s" % (timeout, host)
else: # must be windows
run_cmd = "ping -n %s %s" % (timeout, host)
ret_val = execute_script(run_cmd)
return (ret_val == 0)
def parse_mysqld_version(vers_str):
""" Parse the MySQL version string.
vers_str[in] MySQL Version from client
Returns string = version string
"""
pattern = r"mysqld(?:\.exe)?\s+Ver\s+(\d+\.\d+\.\S+)\s"
match = re.search(pattern, vers_str)
if not match:
return None
version = match.group(1)
try:
# get the version digits. If more than 2, we get first 3 parts
# pylint: disable=W0612
maj_ver, min_ver, dev = version.split(".", 2)
rel = dev.split("-", 1)
return (maj_ver, min_ver, rel[0])
except:
return None
def get_mysqld_version(mysqld_path):
"""Return the version number for a mysqld executable.
mysqld_path[in] location of the mysqld executable
Returns tuple - (major, minor, release), or None if error
"""
out = open("version_check", 'w')
proc = subprocess.Popen("%s --version" % mysqld_path,
stdout=out, stderr=out, shell=True)
proc.wait()
out.close()
out = open("version_check", 'r')
line = None
for line in out.readlines():
if "Ver" in line:
break
out.close()
try:
os.unlink('version_check')
except:
pass
if line is None:
return None
# strip path for long, unusual paths that contain version number
fixed_str = "{0} {1}".format("mysqld", line.strip(mysqld_path))
return parse_mysqld_version(fixed_str)
def show_file_statistics(file_name, wild=False, out_format="GRID"):
"""Show file statistics for file name specified
file_name[in] target file name and path
wild[in] if True, get file statistics for all files with prefix of
file_name. Default is False
out_format[in] output format to print file statistics. Default is GRID.
"""
def _get_file_stats(path, file_name):
"""Return file stats
"""
stats = os.stat(os.path.join(path, file_name))
return ((file_name, stats.st_size, time.ctime(stats.st_ctime),
time.ctime(stats.st_mtime)))
columns = ["File", "Size", "Created", "Last Modified"]
rows = []
path, filename = os.path.split(file_name)
if wild:
for _, _, files in os.walk(path):
for f in files:
if f.startswith(filename):
rows.append(_get_file_stats(path, f))
else:
rows.append(_get_file_stats(path, filename))
# Local import is needed because of Python compability issues
from mysql.utilities.common.format import print_list
print_list(sys.stdout, out_format, columns, rows)
def remote_copy(filepath, user, host, local_path, verbosity=0):
"""Copy a file from a remote machine to the localhost.
filepath[in] The full path and file name of the file on the remote
machine
user[in] Remote login
local_path[in] The path to where the file is to be copie
Returns bool - True = succes, False = failure or exception
"""
if os.name == "posix": # use scp
run_cmd = "scp %s@%s:%s %s" % (user, host, filepath, local_path)
if verbosity > 1:
print("# Command =%s" % run_cmd)
print("# Copying file from %s:%s to %s:" %
(host, filepath, local_path))
proc = subprocess.Popen(run_cmd, shell=True)
proc.wait()
else:
print("Remote copy not supported. Please use UNC paths and omit "
"the --remote-login option to use a local copy operation.")
return True
def check_python_version(min_version=PYTHON_MIN_VERSION,
max_version=PYTHON_MAX_VERSION,
raise_exception_on_fail=False,
name=None, print_on_fail=True,
exit_on_fail=True,
return_error_msg=False):
"""Check the Python version compatibility.
By default this method uses constants to define the minimum and maximum
Python versions required. It's possible to override this by passing new
values on ``min_version`` and ``max_version`` parameters.
It will run a ``sys.exit`` or raise a ``UtilError`` if the version of
Python detected it not compatible.
min_version[in] Tuple with the minimum Python version
required (inclusive).
max_version[in] Tuple with the maximum Python version
required (exclusive).
raise_exception_on_fail[in] Boolean, it will raise a ``UtilError`` if
True and Python detected is not compatible.
name[in] String for a custom name, if not provided
will get the module name from where this
function was called.
print_on_fail[in] If True, print error else do not print
error on failure.
exit_on_fail[in] If True, issue exit() else do not exit()
on failure.
return_error_msg[in] If True, and is not compatible
returns (result, error_msg) tuple.
"""
# Only use the fields: major, minor and micro
sys_version = sys.version_info[:3]
# Test min version compatibility
is_compat = min_version <= sys_version
# Test max version compatibility if it's defined
if is_compat and max_version:
is_compat = sys_version < max_version
if not is_compat:
if not name:
# Get the utility name by finding the module
# name from where this function was called
frm = inspect.stack()[1]
mod = inspect.getmodule(frm[0])
mod_name = os.path.splitext(
os.path.basename(mod.__file__))[0]
name = '%s utility' % mod_name
# Build the error message
if max_version:
max_version_error_msg = 'or higher and lower than %s' % \
'.'.join([str(el) for el in max_version])
else:
max_version_error_msg = 'or higher'
error_msg = (
'The %(name)s requires Python version %(min_version)s '
'%(max_version_error_msg)s. The version of Python detected was '
'%(sys_version)s. You may need to install or redirect the '
'execution of this utility to an environment that includes a '
'compatible Python version.'
) % {
'name': name,
'sys_version': '.'.join([str(el) for el in sys_version]),
'min_version': '.'.join([str(el) for el in min_version]),
'max_version_error_msg': max_version_error_msg
}
if raise_exception_on_fail:
raise UtilError(error_msg)
if print_on_fail:
print('ERROR: %s' % error_msg)
if exit_on_fail:
sys.exit(1)
if return_error_msg:
return is_compat, error_msg
return is_compat
def check_port_in_use(host, port):
"""Check to see if port is in use.
host[in] Hostname or IP to check
port[in] Port number to check
Returns bool - True = port is available, False is not available
"""
try:
sock = socket.create_connection((host, port))
except socket.error:
return True
sock.close()
return False
def requires_encoding(orig_str):
r"""Check to see if a string requires encoding
This method will check to see if a string requires encoding to be used
as a MySQL file name (r"[\w$]*").
orig_str[in] original string
Returns bool - True = requires encoding, False = does not require encoding
"""
ok_chars = re.compile(r"[\w$]*")
parts = ok_chars.findall(orig_str)
return len(parts) > 2 and parts[1].strip() == ''
def encode(orig_str):
r"""Encode a string containing non-MySQL observed characters
This method will take a string containing characters other than those
recognized by MySQL (r"[\w$]*") and covert them to embedded ascii values.
For example, "this.has.periods" becomes "this@002ehas@00e2periods"
orig_str[in] original string
Returns string - encoded string or original string
"""
# First, find the parts that match valid characters
ok_chars = re.compile(r"[\w$]*")
parts = ok_chars.findall(orig_str)
# Now find each part that does not match the list of valid characters
# Save the good parts
i = 0
encode_parts = []
good_parts = []
for part in parts:
if not len(part):
continue
good_parts.append(part)
if i == 0:
i = len(part)
else:
j = orig_str[i:].find(part)
encode_parts.append(orig_str[i:i + j])
i += len(part) + j
# Next, convert the non-valid parts to the form @NNNN (hex)
encoded_parts = []
for part in encode_parts:
new_part = "".join(["@%04x" % ord(c) for c in part])
encoded_parts.append(new_part)
# Take the good parts and the encoded parts and reform the string
i = 0
new_parts = []
for part in good_parts[:len(good_parts) - 1]:
new_parts.append(part)
new_parts.append(encoded_parts[i])
i += 1
new_parts.append(good_parts[len(good_parts) - 1])
# Return the new string
return "".join(new_parts)
def requires_decoding(orig_str):
"""Check to if a string required decoding
This method will check to see if a string requires decoding to be used
as a filename (has @NNNN entries)
orig_str[in] original string
Returns bool - True = requires decoding, False = does not require decoding
"""
return '@' in orig_str
def decode(orig_str):
r"""Decode a string containing @NNNN entries
This method will take a string containing characters other than those
recognized by MySQL (r"[\w$]*") and covert them to character values.
For example, "this@002ehas@00e2periods" becomes "this.has.periods".
orig_str[in] original string
Returns string - decoded string or original string
"""
parts = orig_str.split('@')
if len(parts) == 1:
return orig_str
new_parts = [parts[0]]
for part in parts[1:]:
# take first four positions and convert to ascii
new_parts.append(chr(int(part[0:4], 16)))
new_parts.append(part[4:])
return "".join(new_parts)
def check_connector_python(print_error=True,
min_version=CONNECTOR_MIN_VERSION):
"""Check to see if Connector Python is installed and accessible and
meets minimum required version.
By default this method uses constants to define the minimum
C/Python version required. It's possible to override this by passing a new
value to ``min_version`` parameter.
print_error[in] If True, print error else do not print
error on failure.
min_version[in] Tuple with the minimum C/Python version
required (inclusive).
"""
is_compatible = True
try:
import mysql.connector # pylint: disable=W0612
except ImportError:
if print_error:
print("ERROR: The MySQL Connector/Python module was not found. "
"MySQL Utilities requires the connector to be installed. "
"Please check your paths or download and install the "
"Connector/Python from http://dev.mysql.com.")
return False
else:
try:
sys_version = mysql.connector.version.VERSION[:3]
except AttributeError:
is_compatible = False
if is_compatible and sys_version >= min_version:
return True
else:
if print_error:
print("ERROR: The MYSQL Connector/Python module was found "
"but it is either not properly installed or it is an "
"old version. MySQL Utilities requires Connector/Python "
"version > '{0}'. Download and install Connector/Python "
"from http://dev.mysql.com.".format(min_version))
return False
def print_elapsed_time(start_time):
"""Print the elapsed time to stdout (screen)
start_time[in] The starting time of the test
"""
stop_time = time.time()
display_time = stop_time - start_time
print("Time: {0:.2f} sec\n".format(display_time))
def join_and_build_str(list_of_strings, sep=', ', last_sep='and'):
"""Buils and returns a string from a list of elems.
list_of_strings[in] the list of strings that will be joined into a
single string.
sep[in] the separator that will be used to group all strings
except the last one.
last_sep[in] the separator that is used in last place
"""
if list_of_strings:
if len(list_of_strings) > 1:
res_str = "{0} {1} {2}".format(
sep.join(list_of_strings[:-1]), last_sep, list_of_strings[-1])
else: # list has a single elem
res_str = list_of_strings[0]
else: # if list_of_str is empty, return empty string
res_str = ""
return res_str
|
femtoghoti/PubSubServer
|
refs/heads/master
|
PubSubServer/kommunication_server.py
|
1
|
from twisted.internet import protocol
from twisted.protocols import basic
class PubProtocol(basic.LineReceiver):
def __init__(self, factory):
self.factory = factory
def connectionMade(self):
self.factory.clients.add(self)
def connectionLost(self, reason):
self.factory.clients.remove(self)
def lineReceived(self, line):
for c in self.factory.clients:
c.sendLine(line)
class PubFactory(protocol.Factory):
def __init__(self):
self.clients = set()
def buildProtocol(self, addr):
return PubProtocol(self)
|
SlateScience/MozillaJS
|
refs/heads/master
|
js/src/python/mock-1.0.0/tests/testsentinel.py
|
111
|
# Copyright (C) 2007-2012 Michael Foord & the mock team
# E-mail: fuzzyman AT voidspace DOT org DOT uk
# http://www.voidspace.org.uk/python/mock/
from tests.support import unittest2
from mock import sentinel, DEFAULT
class SentinelTest(unittest2.TestCase):
def testSentinels(self):
self.assertEqual(sentinel.whatever, sentinel.whatever,
'sentinel not stored')
self.assertNotEqual(sentinel.whatever, sentinel.whateverelse,
'sentinel should be unique')
def testSentinelName(self):
self.assertEqual(str(sentinel.whatever), 'sentinel.whatever',
'sentinel name incorrect')
def testDEFAULT(self):
self.assertTrue(DEFAULT is sentinel.DEFAULT)
def testBases(self):
# If this doesn't raise an AttributeError then help(mock) is broken
self.assertRaises(AttributeError, lambda: sentinel.__bases__)
if __name__ == '__main__':
unittest2.main()
|
momingsong/ns-3
|
refs/heads/master
|
src/aodv/bindings/callbacks_list.py
|
95
|
callback_classes = [
['void', 'ns3::Ptr<ns3::Packet const>', 'ns3::Ipv4Header const&', 'ns3::Socket::SocketErrno', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Ipv4Route>', 'ns3::Ptr<ns3::Packet const>', 'ns3::Ipv4Header const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::WifiMacHeader const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ipv4Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::ArpCache const>', 'ns3::Ipv4Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::Socket>', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
|
rebolinho/liveit.repository
|
refs/heads/master
|
plugin.video.SportsDevil/lib/downloader.py
|
25
|
# -*- coding: utf-8 -*-
import common
import urllib
import os.path
import xbmc, xbmcgui
class Downloader(object):
def __init__(self):
self.pDialog = None
def downloadWithJDownloader(self, url, title):
common.runPlugin('plugin://plugin.program.jdownloader/?action=addlink&url=' + url)
common.showNotification('Sent to JDownloader:')
def downloadMovie(self, url, path, title, extension):
if not os.path.exists(path):
common.log('Path does not exist')
return None
if title == '':
common.log('No title given')
return None
file_path = xbmc.makeLegalFilename(os.path.join(path, title + extension))
file_path = urllib.unquote_plus(file_path)
# Overwrite existing file?
if os.path.isfile(file_path):
self.pDialog = xbmcgui.Dialog()
if not common.ask('File already exists. Overwrite?\n' + os.path.basename(file_path)):
title = common.showOSK(urllib.unquote_plus(title), common.translate(30102))
if not title:
return None
file_path = xbmc.makeLegalFilename(os.path.join(path, title + extension))
file_path = urllib.unquote_plus(file_path)
success = self.__download(url, file_path)
if success:
return file_path
else:
return None
def __download(self, url, file_path):
try:
# Setup progress dialog and download
self.pDialog = xbmcgui.DialogProgress()
self.pDialog.create('SportsDevil', common.translate(30050), common.translate(30051))
urllib.urlretrieve(url, file_path, self.video_report_hook)
self.pDialog.close()
return True
except IOError:
self.pDialog.close()
common.showError(common.translate(30053))
except KeyboardInterrupt:
self.pDialog.close()
return False
def video_report_hook(self, count, blocksize, totalsize):
percent = int(float(count * blocksize * 100) / totalsize)
self.pDialog.update(percent, common.translate(30050), common.translate(30051))
if self.pDialog.iscanceled():
raise KeyboardInterrupt
|
neumerance/cloudloon2
|
refs/heads/master
|
.venv/lib/python2.7/site-packages/django/contrib/messages/__init__.py
|
311
|
from __future__ import absolute_import
from django.contrib.messages.api import *
from django.contrib.messages.constants import *
|
super7ramp/pulseaudio-dlna
|
refs/heads/master
|
pulseaudio_dlna/workarounds.py
|
3
|
#!/usr/bin/python
# This file is part of pulseaudio-dlna.
# pulseaudio-dlna is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# pulseaudio-dlna is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with pulseaudio-dlna. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import logging
from lxml import etree
import requests
import urlparse
import traceback
logger = logging.getLogger('pulseaudio_dlna.workarounds')
class BaseWorkaround(object):
"""
Define functions which are called at specific situations during the
application.
Those may be:
- before_register
- after_register
- before_play
- after_play
- before_stop
- after_stop
This may be extended in the future.
"""
ENABLED = True
def __init__(self):
pass
def run(self, method_name, *args, **kwargs):
method = getattr(self, method_name, None)
if self.ENABLED and method and callable(method):
logger.info('Running workaround "{}".'.format(method_name))
method(*args, **kwargs)
class YamahaWorkaround(BaseWorkaround):
# Misc constants
REQUEST_TIMEOUT = 5
ENCODING = 'utf-8'
URL_FORMAT = 'http://{ip}:{port}{url}'
# MediaRenderer constants
MR_YAMAHA_PREFIX = 'yamaha'
MR_YAMAHA_DEVICE = MR_YAMAHA_PREFIX + ':' + 'X_device'
MR_YAMAHA_URLBASE = MR_YAMAHA_PREFIX + ':' + 'X_URLBase'
MR_YAMAHA_SERVICELIST = MR_YAMAHA_PREFIX + ':' + 'X_serviceList'
MR_YAMAHA_SERVICE = MR_YAMAHA_PREFIX + ':' + 'X_service'
MR_YAMAHA_CONTROLURL = MR_YAMAHA_PREFIX + ':' + 'X_controlURL'
MR_YAMAHA_URLBASE_PATH = '/'.join([MR_YAMAHA_DEVICE, MR_YAMAHA_URLBASE])
MR_YAMAHA_CONTROLURL_PATH = '/'.join(
[MR_YAMAHA_DEVICE, MR_YAMAHA_SERVICELIST, MR_YAMAHA_SERVICE,
MR_YAMAHA_CONTROLURL])
# YamahaRemoteControl constants
YRC_TAG_ROOT = 'YAMAHA_AV'
YRC_KEY_RC = 'RC'
YRC_CMD_GETPARAM = 'GetParam'
YRC_BASEPATH_CONFIG = 'Config'
YRC_BASEPATH_BASICSTATUS = 'Basic_Status'
YRC_BASEPATH_FEATURES = 'Feature_Existence'
YRC_BASEPATH_INPUTNAMES = 'Name/Input'
YRC_BASEPATH_POWER = 'Power_Control/Power'
YRC_BASEPATH_SOURCE = 'Input/Input_Sel'
YRC_VALUE_POWER_ON = 'On'
YRC_VALUE_POWER_OFF = 'Standby'
YRC_REQUEST_CONTENTTYPE = 'text/xml; charset="{encoding}"'.format(
encoding=ENCODING)
YRC_REQUEST_TEMPLATE = \
'<?xml version="1.0" encoding="{encoding}"?>' \
'<YAMAHA_AV cmd="{cmd}">{request}</YAMAHA_AV>'
# Known server modes
YRC_SERVER_MODES = ['SERVER', 'PC']
def __init__(self, xml):
BaseWorkaround.__init__(self)
self.enabled = False
self.control_url = None
self.ip = None
self.port = None
self.zones = None
self.sources = None
self.server_mode_zone = None
self.server_mode_source = None
try:
# Initialize YamahaRemoteControl interface
if (not self._detect_remotecontrolinterface(xml)):
raise Exception()
self.enabled = True
except:
logger.warning(
'The YamahaWorkaround initialization failed. '
'Automatic source switching will not be enabled'
' - Please switch to server mode manually to enable UPnP'
' streaming')
logger.debug(traceback.format_exc())
def _detect_remotecontrolinterface(self, xml):
# Check for YamahaRemoteControl support
if (not self._parse_xml(xml)):
logger.info('No Yamaha RemoteControl interface detected')
return False
logger.info('Yamaha RemoteControl found: ' + self.URL_FORMAT.format(
ip=self.ip, port=self.port, url=self.control_url))
# Get supported features
self.zones, self.sources = self._query_supported_features()
if ((self.zones is None) or (self.sources is None)):
logger.error('Failed to query features')
return False
# Determine main zone
logger.info('Supported zones: ' + ', '.join(self.zones))
self.server_mode_zone = self.zones[0]
logger.info('Using \'{zone}\' as main zone'.format(
zone=self.server_mode_zone
))
# Determine UPnP server source
if (self.sources):
logger.info('Supported sources: ' + ', '.join(self.sources))
for source in self.YRC_SERVER_MODES:
if (source not in self.sources):
continue
self.server_mode_source = source
break
else:
logger.warning('Querying supported features failed')
if (not self.server_mode_source):
logger.warning('Unable to determine UPnP server mode source')
return False
logger.info('Using \'{source}\' as UPnP server mode source'.format(
source=self.server_mode_source
))
return True
def _parse_xml(self, xml):
# Parse MediaRenderer description XML
xml_root = etree.fromstring(xml)
namespaces = xml_root.nsmap
namespaces.pop(None, None)
# Determine AVRC URL
url_base = xml_root.find(self.MR_YAMAHA_URLBASE_PATH, namespaces)
control_url = xml_root.find(self.MR_YAMAHA_CONTROLURL_PATH, namespaces)
if ((url_base is None) or (control_url is None)):
return False
ip, port = urlparse.urlparse(url_base.text).netloc.split(':')
if ((not ip) or (not port)):
return False
self.ip = ip
self.port = port
self.control_url = control_url.text
return True
def _generate_request(self, cmd, root, path, value):
# Generate headers
headers = {
'Content-Type': self.YRC_REQUEST_CONTENTTYPE,
}
# Generate XML request
tags = path.split('/')
if (root):
tags = [root] + tags
request = ''
for tag in tags:
request += '<{tag}>'.format(tag=tag)
request += value
for tag in reversed(tags):
request += '</{tag}>'.format(tag=tag)
body = self.YRC_REQUEST_TEMPLATE.format(
encoding=self.ENCODING,
cmd=cmd,
request=request,
)
# Construct URL
url = self.URL_FORMAT.format(
ip=self.ip,
port=self.port,
url=self.control_url,
)
return headers, body, url
def _get(self, root, path, value, filter_path=None):
# Generate request
headers, data, url = self._generate_request('GET', root, path, value)
# POST request
try:
logger.debug('Yamaha RC request: '+data)
response = requests.post(
url, data.encode(self.ENCODING),
headers=headers, timeout=self.REQUEST_TIMEOUT)
logger.debug('Yamaha RC response: ' + response.text)
if response.status_code != 200:
logger.error(
'Yamaha RC request failed - Status code: {code}'.format(
code=response.status_code))
return None
except requests.exceptions.Timeout:
logger.error('Yamaha RC request failed - Connection timeout')
return None
# Parse response
xml_root = etree.fromstring(response.content)
if (xml_root.tag != self.YRC_TAG_ROOT):
logger.error("Malformed response: Root tag missing")
return None
# Parse response code
rc = xml_root.get(self.YRC_KEY_RC)
if (not rc):
logger.error("Malformed response: RC attribute missing")
return None
rc = int(rc)
if (rc > 0):
logger.error(
'Yamaha RC request failed - Response code: {code}'.format(
code=rc))
return rc
# Only return subtree
result_path = []
if (root):
result_path.append(root)
result_path.append(path)
if (filter_path):
result_path.append(filter_path)
result_path = '/'.join(result_path)
return xml_root.find(result_path)
def _put(self, root, path, value):
# Generate request
headers, data, url = self._generate_request('PUT', root, path, value)
# POST request
try:
logger.debug('Yamaha RC request: '+data)
response = requests.post(
url, data.encode(self.ENCODING),
headers=headers, timeout=self.REQUEST_TIMEOUT)
logger.debug('Yamaha RC response: ' + response.text)
if response.status_code != 200:
logger.error(
'Yamaha RC request failed - Status code: {code}'.format(
code=response.status_code))
return False
except requests.exceptions.Timeout:
logger.error('Yamaha RC request failed - Connection timeout')
return None
# Parse response
xml_root = etree.fromstring(response.content)
if (xml_root.tag != self.YRC_TAG_ROOT):
logger.error("Malformed response: Root tag missing")
return None
# Parse response code
rc = xml_root.get(self.YRC_KEY_RC)
if (not rc):
logger.error("Malformed response: RC attribute missing")
return None
rc = int(rc)
if (rc > 0):
logger.error(
'Yamaha RC request failed - Response code: {code}'.format(
code=rc))
return rc
return 0
def _query_supported_features(self):
xml_response = self._get('System', 'Config', self.YRC_CMD_GETPARAM)
if (xml_response is None):
return None, None
xml_features = xml_response.find(self.YRC_BASEPATH_FEATURES)
if (xml_features is None):
logger.debug('Failed to find feature description')
return None, None
# Features can be retrieved in different ways, most probably
# dependending on the recever's firmware / protocol version
# Here are the different responses known up to now:
#
# 1. Comma-separated list of all features in one single tag, containing
# all input sources
# 2. Each feature is enclosed by a tag along with context information
# depending on the XML path:
# - YRC_BASEPATH_FEATURES: availability and/or support
# (0 == not supported, 1 == supported)
# - YRC_BASEPATH_INPUTNAMES: input/source name
# Every feature is a input source, if it does not contain the
# substring 'Zone'. Otherwise, it is a zone supported by the
# receiver.
zones = []
sources = []
if (xml_features.text):
# Format 1:
sources = xml_features.text.split(',')
else:
# Format 2:
for child in xml_features.getchildren():
if ((not child.text) or (int(child.text) == 0)):
continue
if ('Zone' in child.tag):
zones.append(child.tag)
else:
sources.append(child.tag)
xml_names = xml_response.find(self.YRC_BASEPATH_INPUTNAMES)
if (xml_names is not None):
for child in xml_names.getchildren():
sources.append(child.tag)
# If we got no zones up to now, we have to assume, that the receiver
# has no multi zone support. Thus there can be only one!
# Let's call it "System" and pray for the best!
if (len(zones) == 0):
zones.append('System')
return zones, sources
def _set_source(self, value, zone=None):
if (not zone):
zone = self.server_mode_zone
self._put(zone, self.YRC_BASEPATH_SOURCE, value)
def before_register(self):
if (not self.enabled):
return
logger.info('Switching to UPnP server mode')
self._set_source(self.server_mode_source)
|
lptorres/noah-inasafe
|
refs/heads/master
|
web_api/third_party/raven/__init__.py
|
1
|
"""
raven
~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
__all__ = ('VERSION', 'Client', 'load')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception, e:
VERSION = 'unknown'
from raven.base import *
from raven.conf import *
|
JenSte/pyqtgraph
|
refs/heads/develop
|
pyqtgraph/flowchart/library/Display.py
|
52
|
# -*- coding: utf-8 -*-
from ..Node import Node
import weakref
from ...Qt import QtCore, QtGui
from ...graphicsItems.ScatterPlotItem import ScatterPlotItem
from ...graphicsItems.PlotCurveItem import PlotCurveItem
from ... import PlotDataItem, ComboBox
from .common import *
import numpy as np
class PlotWidgetNode(Node):
"""Connection to PlotWidget. Will plot arrays, metaarrays, and display event lists."""
nodeName = 'PlotWidget'
sigPlotChanged = QtCore.Signal(object)
def __init__(self, name):
Node.__init__(self, name, terminals={'In': {'io': 'in', 'multi': True}})
self.plot = None # currently selected plot
self.plots = {} # list of available plots user may select from
self.ui = None
self.items = {}
def disconnected(self, localTerm, remoteTerm):
if localTerm is self['In'] and remoteTerm in self.items:
self.plot.removeItem(self.items[remoteTerm])
del self.items[remoteTerm]
def setPlot(self, plot):
#print "======set plot"
if plot == self.plot:
return
# clear data from previous plot
if self.plot is not None:
for vid in list(self.items.keys()):
self.plot.removeItem(self.items[vid])
del self.items[vid]
self.plot = plot
self.updateUi()
self.update()
self.sigPlotChanged.emit(self)
def getPlot(self):
return self.plot
def process(self, In, display=True):
if display and self.plot is not None:
items = set()
# Add all new input items to selected plot
for name, vals in In.items():
if vals is None:
continue
if type(vals) is not list:
vals = [vals]
for val in vals:
vid = id(val)
if vid in self.items and self.items[vid].scene() is self.plot.scene():
# Item is already added to the correct scene
# possible bug: what if two plots occupy the same scene? (should
# rarely be a problem because items are removed from a plot before
# switching).
items.add(vid)
else:
# Add the item to the plot, or generate a new item if needed.
if isinstance(val, QtGui.QGraphicsItem):
self.plot.addItem(val)
item = val
else:
item = self.plot.plot(val)
self.items[vid] = item
items.add(vid)
# Any left-over items that did not appear in the input must be removed
for vid in list(self.items.keys()):
if vid not in items:
self.plot.removeItem(self.items[vid])
del self.items[vid]
def processBypassed(self, args):
if self.plot is None:
return
for item in list(self.items.values()):
self.plot.removeItem(item)
self.items = {}
def ctrlWidget(self):
if self.ui is None:
self.ui = ComboBox()
self.ui.currentIndexChanged.connect(self.plotSelected)
self.updateUi()
return self.ui
def plotSelected(self, index):
self.setPlot(self.ui.value())
def setPlotList(self, plots):
"""
Specify the set of plots (PlotWidget or PlotItem) that the user may
select from.
*plots* must be a dictionary of {name: plot} pairs.
"""
self.plots = plots
self.updateUi()
def updateUi(self):
# sets list and automatically preserves previous selection
self.ui.setItems(self.plots)
try:
self.ui.setValue(self.plot)
except ValueError:
pass
class CanvasNode(Node):
"""Connection to a Canvas widget."""
nodeName = 'CanvasWidget'
def __init__(self, name):
Node.__init__(self, name, terminals={'In': {'io': 'in', 'multi': True}})
self.canvas = None
self.items = {}
def disconnected(self, localTerm, remoteTerm):
if localTerm is self.In and remoteTerm in self.items:
self.canvas.removeItem(self.items[remoteTerm])
del self.items[remoteTerm]
def setCanvas(self, canvas):
self.canvas = canvas
def getCanvas(self):
return self.canvas
def process(self, In, display=True):
if display:
items = set()
for name, vals in In.items():
if vals is None:
continue
if type(vals) is not list:
vals = [vals]
for val in vals:
vid = id(val)
if vid in self.items:
items.add(vid)
else:
self.canvas.addItem(val)
item = val
self.items[vid] = item
items.add(vid)
for vid in list(self.items.keys()):
if vid not in items:
#print "remove", self.items[vid]
self.canvas.removeItem(self.items[vid])
del self.items[vid]
class PlotCurve(CtrlNode):
"""Generates a plot curve from x/y data"""
nodeName = 'PlotCurve'
uiTemplate = [
('color', 'color'),
]
def __init__(self, name):
CtrlNode.__init__(self, name, terminals={
'x': {'io': 'in'},
'y': {'io': 'in'},
'plot': {'io': 'out'}
})
self.item = PlotDataItem()
def process(self, x, y, display=True):
#print "scatterplot process"
if not display:
return {'plot': None}
self.item.setData(x, y, pen=self.ctrls['color'].color())
return {'plot': self.item}
class ScatterPlot(CtrlNode):
"""Generates a scatter plot from a record array or nested dicts"""
nodeName = 'ScatterPlot'
uiTemplate = [
('x', 'combo', {'values': [], 'index': 0}),
('y', 'combo', {'values': [], 'index': 0}),
('sizeEnabled', 'check', {'value': False}),
('size', 'combo', {'values': [], 'index': 0}),
('absoluteSize', 'check', {'value': False}),
('colorEnabled', 'check', {'value': False}),
('color', 'colormap', {}),
('borderEnabled', 'check', {'value': False}),
('border', 'colormap', {}),
]
def __init__(self, name):
CtrlNode.__init__(self, name, terminals={
'input': {'io': 'in'},
'plot': {'io': 'out'}
})
self.item = ScatterPlotItem()
self.keys = []
#self.ui = QtGui.QWidget()
#self.layout = QtGui.QGridLayout()
#self.ui.setLayout(self.layout)
#self.xCombo = QtGui.QComboBox()
#self.yCombo = QtGui.QComboBox()
def process(self, input, display=True):
#print "scatterplot process"
if not display:
return {'plot': None}
self.updateKeys(input[0])
x = str(self.ctrls['x'].currentText())
y = str(self.ctrls['y'].currentText())
size = str(self.ctrls['size'].currentText())
pen = QtGui.QPen(QtGui.QColor(0,0,0,0))
points = []
for i in input:
pt = {'pos': (i[x], i[y])}
if self.ctrls['sizeEnabled'].isChecked():
pt['size'] = i[size]
if self.ctrls['borderEnabled'].isChecked():
pt['pen'] = QtGui.QPen(self.ctrls['border'].getColor(i))
else:
pt['pen'] = pen
if self.ctrls['colorEnabled'].isChecked():
pt['brush'] = QtGui.QBrush(self.ctrls['color'].getColor(i))
points.append(pt)
self.item.setPxMode(not self.ctrls['absoluteSize'].isChecked())
self.item.setPoints(points)
return {'plot': self.item}
def updateKeys(self, data):
if isinstance(data, dict):
keys = list(data.keys())
elif isinstance(data, list) or isinstance(data, tuple):
keys = data
elif isinstance(data, np.ndarray) or isinstance(data, np.void):
keys = data.dtype.names
else:
print("Unknown data type:", type(data), data)
return
for c in self.ctrls.values():
c.blockSignals(True)
for c in [self.ctrls['x'], self.ctrls['y'], self.ctrls['size']]:
cur = str(c.currentText())
c.clear()
for k in keys:
c.addItem(k)
if k == cur:
c.setCurrentIndex(c.count()-1)
for c in [self.ctrls['color'], self.ctrls['border']]:
c.setArgList(keys)
for c in self.ctrls.values():
c.blockSignals(False)
self.keys = keys
def saveState(self):
state = CtrlNode.saveState(self)
return {'keys': self.keys, 'ctrls': state}
def restoreState(self, state):
self.updateKeys(state['keys'])
CtrlNode.restoreState(self, state['ctrls'])
#class ImageItem(Node):
#"""Creates an ImageItem for display in a canvas from a file handle."""
#nodeName = 'Image'
#def __init__(self, name):
#Node.__init__(self, name, terminals={
#'file': {'io': 'in'},
#'image': {'io': 'out'}
#})
#self.imageItem = graphicsItems.ImageItem()
#self.handle = None
#def process(self, file, display=True):
#if not display:
#return {'image': None}
#if file != self.handle:
#self.handle = file
#data = file.read()
#self.imageItem.updateImage(data)
#pos = file.
|
SiviVuk/inflection
|
refs/heads/master
|
docs/conf.py
|
4
|
# -*- coding: utf-8 -*-
#
# inflection documentation build configuration file, created by
# sphinx-quickstart on Wed Feb 22 22:51:13 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
from inflection import __version__
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'inflection'
copyright = u'2012-2015, Janne Vanhala'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __version__
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default' if on_rtd else 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'inflectiondoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'inflection.tex', u'inflection Documentation',
u'Janne Vanhala', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'inflection', u'inflection Documentation',
[u'Janne Vanhala'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'inflection', u'inflection Documentation',
u'Janne Vanhala', 'inflection', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
OpenXT/seabios
|
refs/heads/master
|
tools/transdump.py
|
121
|
#!/usr/bin/env python
# This script is useful for taking the output of memdump() and
# converting it back into binary output. This can be useful, for
# example, when one wants to push that data into other tools like
# objdump or hexdump.
#
# (C) Copyright 2010 Kevin O'Connor <kevin@koconnor.net>
#
# This file may be distributed under the terms of the GNU GPLv3 license.
import sys
import struct
def unhex(str):
return int(str, 16)
def parseMem(filehdl):
mem = []
for line in filehdl:
parts = line.split(':')
if len(parts) < 2:
continue
try:
vaddr = unhex(parts[0])
parts = parts[1].split()
mem.extend([unhex(v) for v in parts])
except ValueError:
continue
return mem
def printUsage():
sys.stderr.write("Usage:\n %s <file | ->\n"
% (sys.argv[0],))
sys.exit(1)
def main():
if len(sys.argv) != 2:
printUsage()
filename = sys.argv[1]
if filename == '-':
filehdl = sys.stdin
else:
filehdl = open(filename, 'r')
mem = parseMem(filehdl)
for i in mem:
sys.stdout.write(struct.pack("<I", i))
if __name__ == '__main__':
main()
|
jvkops/django
|
refs/heads/master
|
tests/admin_scripts/app_raising_warning/models.py
|
391
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core import checks
from django.db import models
class ModelRaisingMessages(models.Model):
@classmethod
def check(self, **kwargs):
return [
checks.Warning(
'A warning',
hint=None,
),
]
|
ahartz1/python_koans
|
refs/heads/master
|
python3/koans/about_triangle_project.py
|
130
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
# You need to write the triangle method in the file 'triangle.py'
from .triangle import *
class AboutTriangleProject(Koan):
def test_equilateral_triangles_have_equal_sides(self):
self.assertEqual('equilateral', triangle(2, 2, 2))
self.assertEqual('equilateral', triangle(10, 10, 10))
def test_isosceles_triangles_have_exactly_two_sides_equal(self):
self.assertEqual('isosceles', triangle(3, 4, 4))
self.assertEqual('isosceles', triangle(4, 3, 4))
self.assertEqual('isosceles', triangle(4, 4, 3))
self.assertEqual('isosceles', triangle(10, 10, 2))
def test_scalene_triangles_have_no_equal_sides(self):
self.assertEqual('scalene', triangle(3, 4, 5))
self.assertEqual('scalene', triangle(10, 11, 12))
self.assertEqual('scalene', triangle(5, 4, 2))
|
NCBI-Hackathons/PhenVar
|
refs/heads/master
|
generateVCF/lanprosForVCF.py
|
1
|
import nltk
from Bio import Entrez
import time # We need this if we want to filter based on age
from urllib2 import HTTPError
import xml.etree.ElementTree as ET
from collections import Counter
import numpy as np
import operator
from os import path
import random
def tokenize_abstracts(RS_pmids_abstracts_dict):
""" Takes in a dictionary where key is the RSID, value is a dictionary where key is the pmid and value is abstract. Breaks up each abstract into tokens. Return: dictionary where key is RSID, value is a dictionary where key is the pmid and value is a list of tokenized abstracts """
RS_pmids_tokenizedabstracts_dict = {}
for each_RS in RS_pmids_abstracts_dict:
pmids_tokenizedabstracts = {}
pmids_abstracts = RS_pmids_abstracts_dict[each_RS]
for pmid in pmids_abstracts:
tokenizedabtracts_list = []
tokens = nltk.word_tokenize(pmids_abstracts[pmid])
tokenizedabtracts_list.append(tokens)
pmids_tokenizedabstracts[pmid] = tokenizedabtracts_list
RS_pmids_tokenizedabstracts_dict[each_RS] = pmids_tokenizedabstracts
return RS_pmids_tokenizedabstracts_dict
def tagged_abstracts(RS_pmids_tokenizedabstracts_dict):
""" Takes a dict of tokenized abstracts
and tags them using the NLTK module for Natural Language Entities.
Input dictionary: key is the RS ID, value is a dictionary where key is the pmid and value is a list of tokens"""
RS_pmids_taggedabstracts_dict = {}
for each_RS in RS_pmids_tokenizedabstracts_dict:
pmids_taggedabstracts = {}
pmids_tokenizedabstracts = RS_pmids_tokenizedabstracts_dict[each_RS]
for pmid in pmids_tokenizedabstracts:
taggedabstracts_list = []
for token in pmids_tokenizedabstracts[pmid]:
tagged = nltk.pos_tag(token)
taggedabstracts_list.append(tagged)
pmids_taggedabstracts[pmid] = taggedabstracts_list
RS_pmids_taggedabstracts_dict[each_RS] = pmids_taggedabstracts
return RS_pmids_taggedabstracts_dict
def extract_nouns(RS_pmids_taggedabstracts_dict):
"""Takes a dict where key is the RS ID, values is a dict where key is the pmid and value is list of tuples of the form (word, tag).
Return a dictionary of counts for each
word with tag "NN", "NNS", "NNP" or "NNPS" """
# noun_counter = []
# all_abstract_noun_counts = []
# normalized_all_counts = {}
RS_pmids_nounsabstracts_dict = {}
for each_RS in RS_pmids_taggedabstracts_dict:
pmids_nounsabstracts = {}
pmids_taggedabstracts = RS_pmids_taggedabstracts_dict[each_RS]
for pmid in pmids_taggedabstracts:
nounsabstracts = []
for tags in pmids_taggedabstracts[pmid]:
for tag in tags:
if tag[1] == "NN" or tag[1] == "NNS" or tag[1] == "NNP" or tag[1] == "NNPS":
nounsabstracts.append(str(tag[0].encode('ascii', 'ignore')))
nounsabstracts_count = dict(Counter(nounsabstracts))
nouns = sorted(nounsabstracts_count, key=nounsabstracts_count.__getitem__, reverse=True)
#nouns = nounsabstracts_count.keys()
# pmids_nounsabstracts[pmid] = nounsabstracts_count
pmids_nounsabstracts[pmid] = nouns
RS_pmids_nounsabstracts_dict[each_RS] = pmids_nounsabstracts
return RS_pmids_nounsabstracts_dict
# def obtain_all_abtracts_counts(RS_pmids_nounsabstracts_dict):
# all_abstract_noun_counts = {}
# for each_RS in RS_pmids_nounsabstracts_dict:
# pmids_nounsabstracts_dict = RS_pmids_nounsabstracts_dict[each_RS]
# for pmid in pmids_nounsabstracts_dict:
# for each_noun in pmids_nounsabstracts_dict[pmid]:
# if each_noun not in all_abstract_noun_counts.keys():
# all_abstract_noun_counts[each_noun] = pmids_nounsabstracts_dict[pmid][each_noun]
# else:
# all_abstract_noun_counts[each_noun] += pmids_nounsabstracts_dict[pmid][each_noun]
# print len(all_abstract_noun_counts)
# for tags in taggedabstracts_list:
# per_abstract_noun_counts = []
# for tag in tags:
# if tag[1] == "NN" or tag[1] == "NNS" or tag[1] == "NNP" or tag[1] == "NNPS":
# per_abstract_noun_counts.append(str(tag[0].encode('ascii', 'ignore')))
# noun_counter.append(str(tag[0].encode('ascii', 'ignore')))
# all_abstract_noun_counts.append(dict(Counter(per_abstract_noun_counts)))
# all_counts = dict(Counter(noun_counter))
# num_abstracts = float(len(taggedabstracts_list))
# for key in all_counts.keys():
# total_occurrences = float(all_counts[key])
# for each_abstract in all_abstract_noun_counts:
# if key in each_abstract:
# single_abstract_count = float(each_abstract[key])
# # if def_tags_per_abs != 0:
# # if (single_abstract_count/total_occurrences) < def_tags_per_abs:
# # normalized_all_counts[key] = float(all_counts[key])/num_abstracts
# # else:
# # normalized_all_counts[key] = float(all_counts[key])/num_abstracts
# return normalized_all_counts
|
Debian/openjfx
|
refs/heads/master
|
modules/web/src/main/native/Tools/QueueStatusServer/filters/__init__.py
|
6014
|
# Required for Python to search this directory for module files
|
TheTypoMaster/chromium-crosswalk
|
refs/heads/master
|
build/android/pylib/utils/md5sum_test.py
|
30
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import unittest
from pylib import cmd_helper
from pylib import constants
from pylib.utils import md5sum
sys.path.append(
os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
import mock
TEST_OUT_DIR = os.path.join('test', 'out', 'directory')
HOST_MD5_EXECUTABLE = os.path.join(TEST_OUT_DIR, 'md5sum_bin_host')
class Md5SumTest(unittest.TestCase):
def setUp(self):
self._patchers = [
mock.patch('pylib.constants.GetOutDirectory',
new=mock.Mock(return_value=TEST_OUT_DIR)),
mock.patch('os.path.exists',
new=mock.Mock(return_value=True)),
]
for p in self._patchers:
p.start()
def tearDown(self):
for p in self._patchers:
p.stop()
def testCalculateHostMd5Sums_singlePath(self):
test_path = '/test/host/file.dat'
mock_get_cmd_output = mock.Mock(
return_value='0123456789abcdeffedcba9876543210 /test/host/file.dat')
with mock.patch('pylib.cmd_helper.GetCmdOutput', new=mock_get_cmd_output):
out = md5sum.CalculateHostMd5Sums(test_path)
self.assertEquals(1, len(out))
self.assertTrue('/test/host/file.dat' in out)
self.assertEquals('0123456789abcdeffedcba9876543210',
out['/test/host/file.dat'])
mock_get_cmd_output.assert_called_once_with(
[HOST_MD5_EXECUTABLE, '/test/host/file.dat'])
def testCalculateHostMd5Sums_list(self):
test_paths = ['/test/host/file0.dat', '/test/host/file1.dat']
mock_get_cmd_output = mock.Mock(
return_value='0123456789abcdeffedcba9876543210 /test/host/file0.dat\n'
'123456789abcdef00fedcba987654321 /test/host/file1.dat\n')
with mock.patch('pylib.cmd_helper.GetCmdOutput', new=mock_get_cmd_output):
out = md5sum.CalculateHostMd5Sums(test_paths)
self.assertEquals(2, len(out))
self.assertTrue('/test/host/file0.dat' in out)
self.assertEquals('0123456789abcdeffedcba9876543210',
out['/test/host/file0.dat'])
self.assertTrue('/test/host/file1.dat' in out)
self.assertEquals('123456789abcdef00fedcba987654321',
out['/test/host/file1.dat'])
mock_get_cmd_output.assert_called_once_with(
[HOST_MD5_EXECUTABLE, '/test/host/file0.dat',
'/test/host/file1.dat'])
def testCalculateHostMd5Sums_generator(self):
test_paths = ('/test/host/' + p for p in ['file0.dat', 'file1.dat'])
mock_get_cmd_output = mock.Mock(
return_value='0123456789abcdeffedcba9876543210 /test/host/file0.dat\n'
'123456789abcdef00fedcba987654321 /test/host/file1.dat\n')
with mock.patch('pylib.cmd_helper.GetCmdOutput', new=mock_get_cmd_output):
out = md5sum.CalculateHostMd5Sums(test_paths)
self.assertEquals(2, len(out))
self.assertTrue('/test/host/file0.dat' in out)
self.assertEquals('0123456789abcdeffedcba9876543210',
out['/test/host/file0.dat'])
self.assertTrue('/test/host/file1.dat' in out)
self.assertEquals('123456789abcdef00fedcba987654321',
out['/test/host/file1.dat'])
mock_get_cmd_output.assert_called_once_with(
[HOST_MD5_EXECUTABLE, '/test/host/file0.dat', '/test/host/file1.dat'])
def testCalculateDeviceMd5Sums_singlePath(self):
test_path = '/storage/emulated/legacy/test/file.dat'
device = mock.NonCallableMock()
device.adb = mock.NonCallableMock()
device.adb.Push = mock.Mock()
device_md5sum_output = [
'0123456789abcdeffedcba9876543210 '
'/storage/emulated/legacy/test/file.dat',
]
device.RunShellCommand = mock.Mock(return_value=device_md5sum_output)
mock_temp_file = mock.mock_open()
mock_temp_file.return_value.name = '/tmp/test/script/file.sh'
mock_device_temp_file = mock.mock_open()
mock_device_temp_file.return_value.name = (
'/data/local/tmp/test/script/file.sh')
with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), (
mock.patch('pylib.utils.device_temp_file.DeviceTempFile',
new=mock_device_temp_file)):
out = md5sum.CalculateDeviceMd5Sums(test_path, device)
self.assertEquals(1, len(out))
self.assertTrue('/storage/emulated/legacy/test/file.dat' in out)
self.assertEquals('0123456789abcdeffedcba9876543210',
out['/storage/emulated/legacy/test/file.dat'])
device.adb.Push.assert_called_once_with(
'/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh')
device.RunShellCommand.assert_called_once_with(
['sh', '/data/local/tmp/test/script/file.sh'])
def testCalculateDeviceMd5Sums_list(self):
test_path = ['/storage/emulated/legacy/test/file0.dat',
'/storage/emulated/legacy/test/file1.dat']
device = mock.NonCallableMock()
device.adb = mock.NonCallableMock()
device.adb.Push = mock.Mock()
device_md5sum_output = [
'0123456789abcdeffedcba9876543210 '
'/storage/emulated/legacy/test/file0.dat',
'123456789abcdef00fedcba987654321 '
'/storage/emulated/legacy/test/file1.dat',
]
device.RunShellCommand = mock.Mock(return_value=device_md5sum_output)
mock_temp_file = mock.mock_open()
mock_temp_file.return_value.name = '/tmp/test/script/file.sh'
mock_device_temp_file = mock.mock_open()
mock_device_temp_file.return_value.name = (
'/data/local/tmp/test/script/file.sh')
with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), (
mock.patch('pylib.utils.device_temp_file.DeviceTempFile',
new=mock_device_temp_file)):
out = md5sum.CalculateDeviceMd5Sums(test_path, device)
self.assertEquals(2, len(out))
self.assertTrue('/storage/emulated/legacy/test/file0.dat' in out)
self.assertEquals('0123456789abcdeffedcba9876543210',
out['/storage/emulated/legacy/test/file0.dat'])
self.assertTrue('/storage/emulated/legacy/test/file1.dat' in out)
self.assertEquals('123456789abcdef00fedcba987654321',
out['/storage/emulated/legacy/test/file1.dat'])
device.adb.Push.assert_called_once_with(
'/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh')
device.RunShellCommand.assert_called_once_with(
['sh', '/data/local/tmp/test/script/file.sh'])
def testCalculateDeviceMd5Sums_generator(self):
test_path = ('/storage/emulated/legacy/test/file%d.dat' % n
for n in xrange(0, 2))
device = mock.NonCallableMock()
device.adb = mock.NonCallableMock()
device.adb.Push = mock.Mock()
device_md5sum_output = [
'0123456789abcdeffedcba9876543210 '
'/storage/emulated/legacy/test/file0.dat',
'123456789abcdef00fedcba987654321 '
'/storage/emulated/legacy/test/file1.dat',
]
device.RunShellCommand = mock.Mock(return_value=device_md5sum_output)
mock_temp_file = mock.mock_open()
mock_temp_file.return_value.name = '/tmp/test/script/file.sh'
mock_device_temp_file = mock.mock_open()
mock_device_temp_file.return_value.name = (
'/data/local/tmp/test/script/file.sh')
with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), (
mock.patch('pylib.utils.device_temp_file.DeviceTempFile',
new=mock_device_temp_file)):
out = md5sum.CalculateDeviceMd5Sums(test_path, device)
self.assertEquals(2, len(out))
self.assertTrue('/storage/emulated/legacy/test/file0.dat' in out)
self.assertEquals('0123456789abcdeffedcba9876543210',
out['/storage/emulated/legacy/test/file0.dat'])
self.assertTrue('/storage/emulated/legacy/test/file1.dat' in out)
self.assertEquals('123456789abcdef00fedcba987654321',
out['/storage/emulated/legacy/test/file1.dat'])
device.adb.Push.assert_called_once_with(
'/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh')
device.RunShellCommand.assert_called_once_with(
['sh', '/data/local/tmp/test/script/file.sh'])
def testCalculateDeviceMd5Sums_singlePath_linkerWarning(self):
# See crbug/479966
test_path = '/storage/emulated/legacy/test/file.dat'
device = mock.NonCallableMock()
device.adb = mock.NonCallableMock()
device.adb.Push = mock.Mock()
device_md5sum_output = [
'WARNING: linker: /data/local/tmp/md5sum/md5sum_bin: '
'unused DT entry: type 0x1d arg 0x15db',
'THIS_IS_NOT_A_VALID_CHECKSUM_ZZZ some random text',
'0123456789abcdeffedcba9876543210 '
'/storage/emulated/legacy/test/file.dat',
]
device.RunShellCommand = mock.Mock(return_value=device_md5sum_output)
mock_temp_file = mock.mock_open()
mock_temp_file.return_value.name = '/tmp/test/script/file.sh'
mock_device_temp_file = mock.mock_open()
mock_device_temp_file.return_value.name = (
'/data/local/tmp/test/script/file.sh')
with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), (
mock.patch('pylib.utils.device_temp_file.DeviceTempFile',
new=mock_device_temp_file)):
out = md5sum.CalculateDeviceMd5Sums(test_path, device)
self.assertEquals(1, len(out))
self.assertTrue('/storage/emulated/legacy/test/file.dat' in out)
self.assertEquals('0123456789abcdeffedcba9876543210',
out['/storage/emulated/legacy/test/file.dat'])
device.adb.Push.assert_called_once_with(
'/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh')
device.RunShellCommand.assert_called_once_with(
['sh', '/data/local/tmp/test/script/file.sh'])
if __name__ == '__main__':
unittest.main(verbosity=2)
|
mbeacom/locust
|
refs/heads/master
|
locust/test/test_stats.py
|
1
|
import csv
import time
import unittest
import re
import os
import json
import gevent
import mock
import locust
from locust import HttpUser, TaskSet, task, User, constant
from locust.env import Environment
from locust.rpc.protocol import Message
from locust.stats import CachedResponseTimes, RequestStats, StatsEntry, diff_response_time_dicts, PERCENTILES_TO_REPORT
from locust.stats import StatsCSVFileWriter
from locust.stats import stats_history
from locust.test.testcases import LocustTestCase
from locust.user.inspectuser import get_task_ratio_dict
from .testcases import WebserverTestCase
from .test_runners import mocked_rpc
_TEST_CSV_STATS_INTERVAL_SEC = 0.2
_TEST_CSV_STATS_INTERVAL_WAIT_SEC = _TEST_CSV_STATS_INTERVAL_SEC + 0.1
def _write_csv_files(environment, stats_base_name, full_history=False):
"""Spawn CVS writer and exit loop after first iteration."""
stats_writer = StatsCSVFileWriter(environment, PERCENTILES_TO_REPORT, stats_base_name, full_history=full_history)
greenlet = gevent.spawn(stats_writer)
gevent.sleep(_TEST_CSV_STATS_INTERVAL_WAIT_SEC)
gevent.kill(greenlet)
stats_writer.close_files()
class TestRequestStats(unittest.TestCase):
def setUp(self):
locust.stats.PERCENTILES_TO_REPORT = PERCENTILES_TO_REPORT
self.stats = RequestStats()
def log(response_time, size):
self.stats.log_request("GET", "test_entry", response_time, size)
def log_error(exc):
self.stats.log_error("GET", "test_entry", exc)
log(45, 1)
log(135, 1)
log(44, 1)
log(None, 1)
log_error(Exception("dummy fail"))
log_error(Exception("dummy fail"))
log(375, 1)
log(601, 1)
log(35, 1)
log(79, 1)
log(None, 1)
log_error(Exception("dummy fail"))
self.s = self.stats.get("test_entry", "GET")
def test_percentile(self):
s = StatsEntry(self.stats, "percentile_test", "GET")
for x in range(100):
s.log(x, 0)
self.assertEqual(s.get_response_time_percentile(0.5), 50)
self.assertEqual(s.get_response_time_percentile(0.6), 60)
self.assertEqual(s.get_response_time_percentile(0.95), 95)
def test_median(self):
self.assertEqual(self.s.median_response_time, 79)
def test_median_out_of_min_max_bounds(self):
s = StatsEntry(self.stats, "median_test", "GET")
s.log(6034, 0)
self.assertEqual(s.median_response_time, 6034)
s.reset()
s.log(6099, 0)
self.assertEqual(s.median_response_time, 6099)
def test_total_rps(self):
self.stats.log_request("GET", "other_endpoint", 1337, 1337)
s2 = self.stats.get("other_endpoint", "GET")
s2.start_time = 2.0
s2.last_request_timestamp = 6.0
self.s.start_time = 1.0
self.s.last_request_timestamp = 4.0
self.stats.total.start_time = 1.0
self.stats.total.last_request_timestamp = 6.0
self.assertEqual(self.s.total_rps, 9 / 5.0)
self.assertAlmostEqual(s2.total_rps, 1 / 5.0)
self.assertEqual(self.stats.total.total_rps, 10 / 5.0)
def test_rps_less_than_one_second(self):
s = StatsEntry(self.stats, "percentile_test", "GET")
for i in range(10):
s.log(i, 0)
self.assertGreater(s.total_rps, 10)
def test_current_rps(self):
self.stats.total.last_request_timestamp = int(time.time()) + 4
self.assertEqual(self.s.current_rps, 4.5)
self.stats.total.last_request_timestamp = int(time.time()) + 25
self.assertEqual(self.s.current_rps, 0)
def test_current_fail_per_sec(self):
self.stats.total.last_request_timestamp = int(time.time()) + 4
self.assertEqual(self.s.current_fail_per_sec, 1.5)
self.stats.total.last_request_timestamp = int(time.time()) + 12
self.assertEqual(self.s.current_fail_per_sec, 0.3)
self.stats.total.last_request_timestamp = int(time.time()) + 25
self.assertEqual(self.s.current_fail_per_sec, 0)
def test_num_reqs_fails(self):
self.assertEqual(self.s.num_requests, 9)
self.assertEqual(self.s.num_failures, 3)
def test_avg(self):
self.assertEqual(self.s.avg_response_time, 187.71428571428572)
def test_total_content_length(self):
self.assertEqual(self.s.total_content_length, 9)
def test_reset(self):
self.s.reset()
self.s.log(756, 0)
self.s.log_error(Exception("dummy fail after reset"))
self.s.log(85, 0)
self.assertGreater(self.s.total_rps, 2)
self.assertEqual(self.s.num_requests, 2)
self.assertEqual(self.s.num_failures, 1)
self.assertEqual(self.s.avg_response_time, 420.5)
self.assertEqual(self.s.median_response_time, 85)
self.assertNotEqual(None, self.s.last_request_timestamp)
self.s.reset()
self.assertEqual(None, self.s.last_request_timestamp)
def test_avg_only_none(self):
self.s.reset()
self.s.log(None, 123)
self.assertEqual(self.s.avg_response_time, 0)
self.assertEqual(self.s.median_response_time, 0)
self.assertEqual(self.s.get_response_time_percentile(0.5), 0)
def test_reset_min_response_time(self):
self.s.reset()
self.s.log(756, 0)
self.assertEqual(756, self.s.min_response_time)
def test_aggregation(self):
s1 = StatsEntry(self.stats, "aggregate me!", "GET")
s1.log(12, 0)
s1.log(12, 0)
s1.log(38, 0)
s1.log_error("Dummy exception")
s2 = StatsEntry(self.stats, "aggregate me!", "GET")
s2.log_error("Dummy exception")
s2.log_error("Dummy exception")
s2.log(12, 0)
s2.log(99, 0)
s2.log(14, 0)
s2.log(55, 0)
s2.log(38, 0)
s2.log(55, 0)
s2.log(97, 0)
s = StatsEntry(self.stats, "GET", "")
s.extend(s1)
s.extend(s2)
self.assertEqual(s.num_requests, 10)
self.assertEqual(s.num_failures, 3)
self.assertEqual(s.median_response_time, 38)
self.assertEqual(s.avg_response_time, 43.2)
def test_aggregation_with_rounding(self):
s1 = StatsEntry(self.stats, "round me!", "GET")
s1.log(122, 0) # (rounded 120) min
s1.log(992, 0) # (rounded 990) max
s1.log(142, 0) # (rounded 140)
s1.log(552, 0) # (rounded 550)
s1.log(557, 0) # (rounded 560)
s1.log(387, 0) # (rounded 390)
s1.log(557, 0) # (rounded 560)
s1.log(977, 0) # (rounded 980)
self.assertEqual(s1.num_requests, 8)
self.assertEqual(s1.median_response_time, 550)
self.assertEqual(s1.avg_response_time, 535.75)
self.assertEqual(s1.min_response_time, 122)
self.assertEqual(s1.max_response_time, 992)
def test_aggregation_with_decimal_rounding(self):
s1 = StatsEntry(self.stats, "round me!", "GET")
s1.log(1.1, 0)
s1.log(1.99, 0)
s1.log(3.1, 0)
self.assertEqual(s1.num_requests, 3)
self.assertEqual(s1.median_response_time, 2)
self.assertEqual(s1.avg_response_time, (1.1 + 1.99 + 3.1) / 3)
self.assertEqual(s1.min_response_time, 1.1)
self.assertEqual(s1.max_response_time, 3.1)
def test_aggregation_min_response_time(self):
s1 = StatsEntry(self.stats, "min", "GET")
s1.log(10, 0)
self.assertEqual(10, s1.min_response_time)
s2 = StatsEntry(self.stats, "min", "GET")
s1.extend(s2)
self.assertEqual(10, s1.min_response_time)
def test_aggregation_last_request_timestamp(self):
s1 = StatsEntry(self.stats, "r", "GET")
s2 = StatsEntry(self.stats, "r", "GET")
s1.extend(s2)
self.assertEqual(None, s1.last_request_timestamp)
s1 = StatsEntry(self.stats, "r", "GET")
s2 = StatsEntry(self.stats, "r", "GET")
s1.last_request_timestamp = 666
s1.extend(s2)
self.assertEqual(666, s1.last_request_timestamp)
s1 = StatsEntry(self.stats, "r", "GET")
s2 = StatsEntry(self.stats, "r", "GET")
s2.last_request_timestamp = 666
s1.extend(s2)
self.assertEqual(666, s1.last_request_timestamp)
s1 = StatsEntry(self.stats, "r", "GET")
s2 = StatsEntry(self.stats, "r", "GET")
s1.last_request_timestamp = 666
s1.last_request_timestamp = 700
s1.extend(s2)
self.assertEqual(700, s1.last_request_timestamp)
def test_percentile_rounded_down(self):
s1 = StatsEntry(self.stats, "rounding down!", "GET")
s1.log(122, 0) # (rounded 120) min
actual_percentile = s1.percentile().split()
self.assertEqual(actual_percentile, ["GET", "rounding", "down!"] + ["120"] * len(PERCENTILES_TO_REPORT) + ["1"])
def test_percentile_rounded_up(self):
s2 = StatsEntry(self.stats, "rounding up!", "GET")
s2.log(127, 0) # (rounded 130) min
actual_percentile = s2.percentile().split()
self.assertEqual(actual_percentile, ["GET", "rounding", "up!"] + ["130"] * len(PERCENTILES_TO_REPORT) + ["1"])
def test_custom_percentile_list(self):
s = StatsEntry(self.stats, "custom_percentiles", "GET")
custom_percentile_list = [0.50, 0.90, 0.95, 0.99]
locust.stats.PERCENTILES_TO_REPORT = custom_percentile_list
s.log(150, 0)
actual_percentile = s.percentile().split()
self.assertEqual(
actual_percentile, ["GET", "custom_percentiles"] + ["150"] * len(custom_percentile_list) + ["1"]
)
def test_error_grouping(self):
# reset stats
self.stats = RequestStats()
self.stats.log_error("GET", "/some-path", Exception("Exception!"))
self.stats.log_error("GET", "/some-path", Exception("Exception!"))
self.assertEqual(1, len(self.stats.errors))
self.assertEqual(2, list(self.stats.errors.values())[0].occurrences)
self.stats.log_error("GET", "/some-path", Exception("Another exception!"))
self.stats.log_error("GET", "/some-path", Exception("Another exception!"))
self.stats.log_error("GET", "/some-path", Exception("Third exception!"))
self.assertEqual(3, len(self.stats.errors))
def test_error_grouping_errors_with_memory_addresses(self):
# reset stats
self.stats = RequestStats()
class Dummy:
pass
self.stats.log_error("GET", "/", Exception("Error caused by %r" % Dummy()))
self.assertEqual(1, len(self.stats.errors))
def test_serialize_through_message(self):
"""
Serialize a RequestStats instance, then serialize it through a Message,
and unserialize the whole thing again. This is done "IRL" when stats are sent
from workers to master.
"""
s1 = StatsEntry(self.stats, "test", "GET")
s1.log(10, 0)
s1.log(20, 0)
s1.log(40, 0)
u1 = StatsEntry.unserialize(s1.serialize())
data = Message.unserialize(Message("dummy", s1.serialize(), "none").serialize()).data
u1 = StatsEntry.unserialize(data)
self.assertEqual(20, u1.median_response_time)
class TestStatsPrinting(LocustTestCase):
def test_print_percentile_stats(self):
stats = RequestStats()
for i in range(100):
stats.log_request("GET", "test_entry", i, 2000 + i)
locust.stats.print_percentile_stats(stats)
info = self.mocked_log.info
self.assertEqual(7, len(info))
# check that headline contains same number of column as the value rows
headlines = info[1].replace("# reqs", "#reqs").split()
self.assertEqual(len(headlines), len(info[3].split()))
self.assertEqual(len(headlines), len(info[5].split()))
class TestCsvStats(LocustTestCase):
STATS_BASE_NAME = "test"
STATS_FILENAME = "{}_stats.csv".format(STATS_BASE_NAME)
STATS_HISTORY_FILENAME = "{}_stats_history.csv".format(STATS_BASE_NAME)
STATS_FAILURES_FILENAME = "{}_failures.csv".format(STATS_BASE_NAME)
def setUp(self):
super().setUp()
self.remove_file_if_exists(self.STATS_FILENAME)
self.remove_file_if_exists(self.STATS_HISTORY_FILENAME)
self.remove_file_if_exists(self.STATS_FAILURES_FILENAME)
def tearDown(self):
self.remove_file_if_exists(self.STATS_FILENAME)
self.remove_file_if_exists(self.STATS_HISTORY_FILENAME)
self.remove_file_if_exists(self.STATS_FAILURES_FILENAME)
def remove_file_if_exists(self, filename):
if os.path.exists(filename):
os.remove(filename)
def test_write_csv_files(self):
_write_csv_files(self.environment, self.STATS_BASE_NAME)
self.assertTrue(os.path.exists(self.STATS_FILENAME))
self.assertTrue(os.path.exists(self.STATS_HISTORY_FILENAME))
self.assertTrue(os.path.exists(self.STATS_FAILURES_FILENAME))
def test_write_csv_files_full_history(self):
_write_csv_files(self.environment, self.STATS_BASE_NAME, full_history=True)
self.assertTrue(os.path.exists(self.STATS_FILENAME))
self.assertTrue(os.path.exists(self.STATS_HISTORY_FILENAME))
self.assertTrue(os.path.exists(self.STATS_FAILURES_FILENAME))
@mock.patch("locust.stats.CSV_STATS_INTERVAL_SEC", new=_TEST_CSV_STATS_INTERVAL_SEC)
def test_csv_stats_writer(self):
_write_csv_files(self.environment, self.STATS_BASE_NAME)
self.assertTrue(os.path.exists(self.STATS_FILENAME))
self.assertTrue(os.path.exists(self.STATS_HISTORY_FILENAME))
self.assertTrue(os.path.exists(self.STATS_FAILURES_FILENAME))
with open(self.STATS_HISTORY_FILENAME) as f:
reader = csv.DictReader(f)
rows = [r for r in reader]
self.assertEqual(2, len(rows))
self.assertEqual("Aggregated", rows[0]["Name"])
self.assertEqual("Aggregated", rows[1]["Name"])
@mock.patch("locust.stats.CSV_STATS_INTERVAL_SEC", new=_TEST_CSV_STATS_INTERVAL_SEC)
def test_csv_stats_writer_full_history(self):
stats_writer = StatsCSVFileWriter(
self.environment, PERCENTILES_TO_REPORT, self.STATS_BASE_NAME, full_history=True
)
self.runner.stats.log_request("GET", "/", 10, content_length=666)
greenlet = gevent.spawn(stats_writer)
gevent.sleep(_TEST_CSV_STATS_INTERVAL_WAIT_SEC)
gevent.kill(greenlet)
stats_writer.close_files()
self.assertTrue(os.path.exists(self.STATS_FILENAME))
self.assertTrue(os.path.exists(self.STATS_HISTORY_FILENAME))
self.assertTrue(os.path.exists(self.STATS_FAILURES_FILENAME))
with open(self.STATS_HISTORY_FILENAME) as f:
reader = csv.DictReader(f)
rows = [r for r in reader]
self.assertEqual(4, len(rows))
self.assertEqual("/", rows[0]["Name"])
self.assertEqual("Aggregated", rows[1]["Name"])
self.assertEqual("/", rows[2]["Name"])
self.assertEqual("Aggregated", rows[3]["Name"])
def test_csv_stats_on_master_from_aggregated_stats(self):
# Failing test for: https://github.com/locustio/locust/issues/1315
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
environment = Environment()
stats_writer = StatsCSVFileWriter(
environment, PERCENTILES_TO_REPORT, self.STATS_BASE_NAME, full_history=True
)
master = environment.create_master_runner(master_bind_host="*", master_bind_port=0)
greenlet = gevent.spawn(stats_writer)
gevent.sleep(_TEST_CSV_STATS_INTERVAL_WAIT_SEC)
server.mocked_send(Message("client_ready", None, "fake_client"))
master.stats.get("/", "GET").log(100, 23455)
master.stats.get("/", "GET").log(800, 23455)
master.stats.get("/", "GET").log(700, 23455)
data = {"user_count": 1}
environment.events.report_to_master.fire(client_id="fake_client", data=data)
master.stats.clear_all()
server.mocked_send(Message("stats", data, "fake_client"))
s = master.stats.get("/", "GET")
self.assertEqual(700, s.median_response_time)
gevent.kill(greenlet)
stats_writer.close_files()
self.assertTrue(os.path.exists(self.STATS_FILENAME))
self.assertTrue(os.path.exists(self.STATS_HISTORY_FILENAME))
self.assertTrue(os.path.exists(self.STATS_FAILURES_FILENAME))
@mock.patch("locust.stats.CSV_STATS_INTERVAL_SEC", new=_TEST_CSV_STATS_INTERVAL_SEC)
def test_user_count_in_csv_history_stats(self):
start_time = int(time.time())
class TestUser(User):
wait_time = constant(10)
@task
def t(self):
self.environment.runner.stats.log_request("GET", "/", 10, 10)
environment = Environment(user_classes=[TestUser])
stats_writer = StatsCSVFileWriter(environment, PERCENTILES_TO_REPORT, self.STATS_BASE_NAME, full_history=True)
runner = environment.create_local_runner()
runner.start(3, 5) # spawn a user every _TEST_CSV_STATS_INTERVAL_SEC second
gevent.sleep(0.1)
greenlet = gevent.spawn(stats_writer)
gevent.sleep(0.6)
gevent.kill(greenlet)
stats_writer.close_files()
runner.stop()
with open(self.STATS_HISTORY_FILENAME) as f:
reader = csv.DictReader(f)
rows = [r for r in reader]
self.assertEqual(6, len(rows))
for i in range(3):
row = rows.pop(0)
self.assertEqual("%i" % (i + 1), row["User Count"])
self.assertEqual("/", row["Name"])
self.assertEqual("%i" % (i + 1), row["Total Request Count"])
self.assertGreaterEqual(int(row["Timestamp"]), start_time)
row = rows.pop(0)
self.assertEqual("%i" % (i + 1), row["User Count"])
self.assertEqual("Aggregated", row["Name"])
self.assertEqual("%i" % (i + 1), row["Total Request Count"])
self.assertGreaterEqual(int(row["Timestamp"]), start_time)
def test_requests_csv_quote_escaping(self):
with mock.patch("locust.rpc.rpc.Server", mocked_rpc()) as server:
environment = Environment()
master = environment.create_master_runner(master_bind_host="*", master_bind_port=0)
server.mocked_send(Message("client_ready", None, "fake_client"))
request_name_dict = {
"scenario": "get cashes",
"path": "/cash/[amount]",
"arguments": [{"size": 1}],
}
request_name_str = json.dumps(request_name_dict)
master.stats.get(request_name_str, "GET").log(100, 23455)
data = {"user_count": 1}
environment.events.report_to_master.fire(client_id="fake_client", data=data)
master.stats.clear_all()
server.mocked_send(Message("stats", data, "fake_client"))
_write_csv_files(environment, self.STATS_BASE_NAME, full_history=True)
with open(self.STATS_FILENAME) as f:
reader = csv.DictReader(f)
rows = [r for r in reader]
csv_request_name = rows[0].get("Name")
self.assertEqual(request_name_str, csv_request_name)
def test_stats_history(self):
env1 = Environment(events=locust.events, catch_exceptions=False)
runner1 = env1.create_master_runner("127.0.0.1", 5558)
env2 = Environment(events=locust.events, catch_exceptions=False)
runner2 = env2.create_worker_runner("127.0.0.1", 5558)
greenlet1 = gevent.spawn(stats_history, runner1)
greenlet2 = gevent.spawn(stats_history, runner2)
gevent.sleep(1)
hs1 = runner1.stats.history
hs2 = runner2.stats.history
gevent.kill(greenlet1)
gevent.kill(greenlet2)
self.assertEqual(1, len(hs1))
self.assertEqual(0, len(hs2))
class TestStatsEntryResponseTimesCache(unittest.TestCase):
def setUp(self, *args, **kwargs):
super().setUp(*args, **kwargs)
self.stats = RequestStats()
def test_response_times_cached(self):
s = StatsEntry(self.stats, "/", "GET", use_response_times_cache=True)
self.assertEqual(1, len(s.response_times_cache))
s.log(11, 1337)
self.assertEqual(1, len(s.response_times_cache))
s.last_request_timestamp -= 1
s.log(666, 1337)
self.assertEqual(2, len(s.response_times_cache))
self.assertEqual(
CachedResponseTimes(
response_times={11: 1},
num_requests=1,
),
s.response_times_cache[int(s.last_request_timestamp) - 1],
)
def test_response_times_not_cached_if_not_enabled(self):
s = StatsEntry(self.stats, "/", "GET")
s.log(11, 1337)
self.assertEqual(None, s.response_times_cache)
s.last_request_timestamp -= 1
s.log(666, 1337)
self.assertEqual(None, s.response_times_cache)
def test_latest_total_response_times_pruned(self):
"""
Check that RequestStats.latest_total_response_times are pruned when exceeding 20 entries
"""
s = StatsEntry(self.stats, "/", "GET", use_response_times_cache=True)
t = int(time.time())
for i in reversed(range(2, 30)):
s.response_times_cache[t - i] = CachedResponseTimes(response_times={}, num_requests=0)
self.assertEqual(29, len(s.response_times_cache))
s.log(17, 1337)
s.last_request_timestamp -= 1
s.log(1, 1)
self.assertEqual(20, len(s.response_times_cache))
self.assertEqual(
CachedResponseTimes(response_times={17: 1}, num_requests=1),
s.response_times_cache.popitem(last=True)[1],
)
def test_get_current_response_time_percentile(self):
s = StatsEntry(self.stats, "/", "GET", use_response_times_cache=True)
t = int(time.time())
s.response_times_cache[t - 10] = CachedResponseTimes(
response_times={i: 1 for i in range(100)}, num_requests=200
)
s.response_times_cache[t - 10].response_times[1] = 201
s.response_times = {i: 2 for i in range(100)}
s.response_times[1] = 202
s.num_requests = 300
self.assertEqual(95, s.get_current_response_time_percentile(0.95))
def test_diff_response_times_dicts(self):
self.assertEqual(
{1: 5, 6: 8},
diff_response_time_dicts(
{1: 6, 6: 16, 2: 2},
{1: 1, 6: 8, 2: 2},
),
)
self.assertEqual(
{},
diff_response_time_dicts(
{},
{},
),
)
self.assertEqual(
{10: 15},
diff_response_time_dicts(
{10: 15},
{},
),
)
self.assertEqual(
{10: 10},
diff_response_time_dicts(
{10: 10},
{},
),
)
self.assertEqual(
{},
diff_response_time_dicts(
{1: 1},
{1: 1},
),
)
class TestStatsEntry(unittest.TestCase):
def parse_string_output(self, text):
tokenlist = re.split(r"[\s\(\)%|]+", text.strip())
tokens = {
"method": tokenlist[0],
"name": tokenlist[1],
"request_count": int(tokenlist[2]),
"failure_count": int(tokenlist[3]),
"failure_percentage": float(tokenlist[4]),
}
return tokens
def setUp(self, *args, **kwargs):
super().setUp(*args, **kwargs)
self.stats = RequestStats()
def test_fail_ratio_with_no_failures(self):
REQUEST_COUNT = 10
FAILURE_COUNT = 0
EXPECTED_FAIL_RATIO = 0.0
s = StatsEntry(self.stats, "/", "GET")
s.num_requests = REQUEST_COUNT
s.num_failures = FAILURE_COUNT
self.assertAlmostEqual(s.fail_ratio, EXPECTED_FAIL_RATIO)
output_fields = self.parse_string_output(str(s))
self.assertEqual(output_fields["request_count"], REQUEST_COUNT)
self.assertEqual(output_fields["failure_count"], FAILURE_COUNT)
self.assertAlmostEqual(output_fields["failure_percentage"], EXPECTED_FAIL_RATIO * 100)
def test_fail_ratio_with_all_failures(self):
REQUEST_COUNT = 10
FAILURE_COUNT = 10
EXPECTED_FAIL_RATIO = 1.0
s = StatsEntry(self.stats, "/", "GET")
s.num_requests = REQUEST_COUNT
s.num_failures = FAILURE_COUNT
self.assertAlmostEqual(s.fail_ratio, EXPECTED_FAIL_RATIO)
output_fields = self.parse_string_output(str(s))
self.assertEqual(output_fields["request_count"], REQUEST_COUNT)
self.assertEqual(output_fields["failure_count"], FAILURE_COUNT)
self.assertAlmostEqual(output_fields["failure_percentage"], EXPECTED_FAIL_RATIO * 100)
def test_fail_ratio_with_half_failures(self):
REQUEST_COUNT = 10
FAILURE_COUNT = 5
EXPECTED_FAIL_RATIO = 0.5
s = StatsEntry(self.stats, "/", "GET")
s.num_requests = REQUEST_COUNT
s.num_failures = FAILURE_COUNT
self.assertAlmostEqual(s.fail_ratio, EXPECTED_FAIL_RATIO)
output_fields = self.parse_string_output(str(s))
self.assertEqual(output_fields["request_count"], REQUEST_COUNT)
self.assertEqual(output_fields["failure_count"], FAILURE_COUNT)
self.assertAlmostEqual(output_fields["failure_percentage"], EXPECTED_FAIL_RATIO * 100)
class TestRequestStatsWithWebserver(WebserverTestCase):
def setUp(self):
super().setUp()
class MyUser(HttpUser):
host = "http://127.0.0.1:%i" % self.port
self.locust = MyUser(self.environment)
def test_request_stats_content_length(self):
self.locust.client.get("/ultra_fast")
self.assertEqual(
self.runner.stats.get("/ultra_fast", "GET").avg_content_length, len("This is an ultra fast response")
)
self.locust.client.get("/ultra_fast")
self.assertEqual(
self.runner.stats.get("/ultra_fast", "GET").avg_content_length, len("This is an ultra fast response")
)
def test_request_stats_no_content_length(self):
path = "/no_content_length"
self.locust.client.get(path)
self.assertEqual(
self.runner.stats.get(path, "GET").avg_content_length,
len("This response does not have content-length in the header"),
)
def test_request_stats_no_content_length_streaming(self):
path = "/no_content_length"
self.locust.client.get(path, stream=True)
self.assertEqual(0, self.runner.stats.get(path, "GET").avg_content_length)
def test_request_stats_named_endpoint(self):
self.locust.client.get("/ultra_fast", name="my_custom_name")
self.assertEqual(1, self.runner.stats.get("my_custom_name", "GET").num_requests)
def test_request_stats_query_variables(self):
self.locust.client.get("/ultra_fast?query=1")
self.assertEqual(1, self.runner.stats.get("/ultra_fast?query=1", "GET").num_requests)
def test_request_stats_put(self):
self.locust.client.put("/put")
self.assertEqual(1, self.runner.stats.get("/put", "PUT").num_requests)
def test_request_connection_error(self):
class MyUser(HttpUser):
host = "http://localhost:1"
locust = MyUser(self.environment)
response = locust.client.get("/", timeout=0.1)
self.assertEqual(response.status_code, 0)
self.assertEqual(1, self.runner.stats.get("/", "GET").num_failures)
self.assertEqual(1, self.runner.stats.get("/", "GET").num_requests)
class MyTaskSet(TaskSet):
@task(75)
def root_task(self):
pass
@task(25)
class MySubTaskSet(TaskSet):
@task
def task1(self):
pass
@task
def task2(self):
pass
class TestInspectUser(unittest.TestCase):
def test_get_task_ratio_dict_relative(self):
ratio = get_task_ratio_dict([MyTaskSet])
self.assertEqual(1.0, ratio["MyTaskSet"]["ratio"])
self.assertEqual(0.75, ratio["MyTaskSet"]["tasks"]["root_task"]["ratio"])
self.assertEqual(0.25, ratio["MyTaskSet"]["tasks"]["MySubTaskSet"]["ratio"])
self.assertEqual(0.5, ratio["MyTaskSet"]["tasks"]["MySubTaskSet"]["tasks"]["task1"]["ratio"])
self.assertEqual(0.5, ratio["MyTaskSet"]["tasks"]["MySubTaskSet"]["tasks"]["task2"]["ratio"])
def test_get_task_ratio_dict_total(self):
ratio = get_task_ratio_dict([MyTaskSet], total=True)
self.assertEqual(1.0, ratio["MyTaskSet"]["ratio"])
self.assertEqual(0.75, ratio["MyTaskSet"]["tasks"]["root_task"]["ratio"])
self.assertEqual(0.25, ratio["MyTaskSet"]["tasks"]["MySubTaskSet"]["ratio"])
self.assertEqual(0.125, ratio["MyTaskSet"]["tasks"]["MySubTaskSet"]["tasks"]["task1"]["ratio"])
self.assertEqual(0.125, ratio["MyTaskSet"]["tasks"]["MySubTaskSet"]["tasks"]["task2"]["ratio"])
|
baoboa/pyqt5
|
refs/heads/master
|
pyuic/uic/widget-plugins/qscintilla.py
|
3
|
#############################################################################
##
## Copyright (c) 2018 Riverbank Computing Limited <info@riverbankcomputing.com>
##
## This file is part of PyQt5.
##
## This file may be used under the terms of the GNU General Public License
## version 3.0 as published by the Free Software Foundation and appearing in
## the file LICENSE included in the packaging of this file. Please review the
## following information to ensure the GNU General Public License version 3.0
## requirements will be met: http://www.gnu.org/copyleft/gpl.html.
##
## If you do not wish to use this file under the terms of the GPL version 3.0
## then you may purchase a commercial license. For more information contact
## info@riverbankcomputing.com.
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
#############################################################################
# If pluginType is MODULE, the plugin loader will call moduleInformation. The
# variable MODULE is inserted into the local namespace by the plugin loader.
pluginType = MODULE
# moduleInformation() must return a tuple (module, widget_list). If "module"
# is "A" and any widget from this module is used, the code generator will write
# "import A". If "module" is "A[.B].C", the code generator will write
# "from A[.B] import C". Each entry in "widget_list" must be unique.
def moduleInformation():
return "PyQt5.Qsci", ("QsciScintilla", )
|
alexbruy/QGIS
|
refs/heads/master
|
python/plugins/processing/algs/qgis/SetVectorStyle.py
|
3
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
SelectByLocation.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.outputs import OutputVector
from processing.core.parameters import ParameterFile
from processing.tools import dataobjects
from qgis.utils import iface
class SetVectorStyle(GeoAlgorithm):
INPUT = 'INPUT'
STYLE = 'STYLE'
OUTPUT = 'OUTPUT'
def defineCharacteristics(self):
# self.allowOnlyOpenedLayers = True
self.name, self.i18n_name = self.trAlgorithm('Set style for vector layer')
self.group, self.i18n_group = self.trAlgorithm('Vector general tools')
self.addParameter(ParameterVector(self.INPUT,
self.tr('Vector layer'), [ParameterVector.VECTOR_TYPE_ANY]))
self.addParameter(ParameterFile(self.STYLE,
self.tr('Style file'), False, False, 'qml'))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Styled'), True))
def processAlgorithm(self, progress):
filename = self.getParameterValue(self.INPUT)
style = self.getParameterValue(self.STYLE)
layer = dataobjects.getObjectFromUri(filename, False)
if layer is None:
dataobjects.load(filename, os.path.basename(filename), style=style)
self.getOutputFromName(self.OUTPUT).open = False
else:
layer.loadNamedStyle(style)
iface.mapCanvas().refresh()
iface.legendInterface().refreshLayerSymbology(layer)
layer.triggerRepaint()
|
RaoUmer/django
|
refs/heads/master
|
django/utils/hashcompat.py
|
124
|
"""
The md5 and sha modules are deprecated since Python 2.5, replaced by the
hashlib module containing both hash algorithms. Here, we provide a common
interface to the md5 and sha constructors, depending on system version.
"""
import warnings
warnings.warn("django.utils.hashcompat is deprecated; use hashlib instead",
DeprecationWarning)
import hashlib
md5_constructor = hashlib.md5
md5_hmac = md5_constructor
sha_constructor = hashlib.sha1
sha_hmac = sha_constructor
|
zjj/trac_hack
|
refs/heads/master
|
trac/admin/tests/__init__.py
|
8
|
import unittest
from trac.admin.tests import console
from trac.admin.tests.functional import functionalSuite
def suite():
suite = unittest.TestSuite()
suite.addTest(console.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
podhmo/boto
|
refs/heads/develop
|
boto/sts/credentials.py
|
153
|
# Copyright (c) 2011 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2011, Eucalyptus Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import os
import datetime
import boto.utils
from boto.compat import json
class Credentials(object):
"""
:ivar access_key: The AccessKeyID.
:ivar secret_key: The SecretAccessKey.
:ivar session_token: The session token that must be passed with
requests to use the temporary credentials
:ivar expiration: The timestamp for when the credentials will expire
"""
def __init__(self, parent=None):
self.parent = parent
self.access_key = None
self.secret_key = None
self.session_token = None
self.expiration = None
self.request_id = None
@classmethod
def from_json(cls, json_doc):
"""
Create and return a new Session Token based on the contents
of a JSON document.
:type json_doc: str
:param json_doc: A string containing a JSON document with a
previously saved Credentials object.
"""
d = json.loads(json_doc)
token = cls()
token.__dict__.update(d)
return token
@classmethod
def load(cls, file_path):
"""
Create and return a new Session Token based on the contents
of a previously saved JSON-format file.
:type file_path: str
:param file_path: The fully qualified path to the JSON-format
file containing the previously saved Session Token information.
"""
fp = open(file_path)
json_doc = fp.read()
fp.close()
return cls.from_json(json_doc)
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'AccessKeyId':
self.access_key = value
elif name == 'SecretAccessKey':
self.secret_key = value
elif name == 'SessionToken':
self.session_token = value
elif name == 'Expiration':
self.expiration = value
elif name == 'RequestId':
self.request_id = value
else:
pass
def to_dict(self):
"""
Return a Python dict containing the important information
about this Session Token.
"""
return {'access_key': self.access_key,
'secret_key': self.secret_key,
'session_token': self.session_token,
'expiration': self.expiration,
'request_id': self.request_id}
def save(self, file_path):
"""
Persist a Session Token to a file in JSON format.
:type path: str
:param path: The fully qualified path to the file where the
the Session Token data should be written. Any previous
data in the file will be overwritten. To help protect
the credentials contained in the file, the permissions
of the file will be set to readable/writable by owner only.
"""
fp = open(file_path, 'w')
json.dump(self.to_dict(), fp)
fp.close()
os.chmod(file_path, 0o600)
def is_expired(self, time_offset_seconds=0):
"""
Checks to see if the Session Token is expired or not. By default
it will check to see if the Session Token is expired as of the
moment the method is called. However, you can supply an
optional parameter which is the number of seconds of offset
into the future for the check. For example, if you supply
a value of 5, this method will return a True if the Session
Token will be expired 5 seconds from this moment.
:type time_offset_seconds: int
:param time_offset_seconds: The number of seconds into the future
to test the Session Token for expiration.
"""
now = datetime.datetime.utcnow()
if time_offset_seconds:
now = now + datetime.timedelta(seconds=time_offset_seconds)
ts = boto.utils.parse_ts(self.expiration)
delta = ts - now
return delta.total_seconds() <= 0
class FederationToken(object):
"""
:ivar credentials: A Credentials object containing the credentials.
:ivar federated_user_arn: ARN specifying federated user using credentials.
:ivar federated_user_id: The ID of the federated user using credentials.
:ivar packed_policy_size: A percentage value indicating the size of
the policy in packed form
"""
def __init__(self, parent=None):
self.parent = parent
self.credentials = None
self.federated_user_arn = None
self.federated_user_id = None
self.packed_policy_size = None
self.request_id = None
def startElement(self, name, attrs, connection):
if name == 'Credentials':
self.credentials = Credentials()
return self.credentials
else:
return None
def endElement(self, name, value, connection):
if name == 'Arn':
self.federated_user_arn = value
elif name == 'FederatedUserId':
self.federated_user_id = value
elif name == 'PackedPolicySize':
self.packed_policy_size = int(value)
elif name == 'RequestId':
self.request_id = value
else:
pass
class AssumedRole(object):
"""
:ivar user: The assumed role user.
:ivar credentials: A Credentials object containing the credentials.
"""
def __init__(self, connection=None, credentials=None, user=None):
self._connection = connection
self.credentials = credentials
self.user = user
def startElement(self, name, attrs, connection):
if name == 'Credentials':
self.credentials = Credentials()
return self.credentials
elif name == 'AssumedRoleUser':
self.user = User()
return self.user
def endElement(self, name, value, connection):
pass
class User(object):
"""
:ivar arn: The arn of the user assuming the role.
:ivar assume_role_id: The identifier of the assumed role.
"""
def __init__(self, arn=None, assume_role_id=None):
self.arn = arn
self.assume_role_id = assume_role_id
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'Arn':
self.arn = value
elif name == 'AssumedRoleId':
self.assume_role_id = value
class DecodeAuthorizationMessage(object):
"""
:ivar request_id: The request ID.
:ivar decoded_message: The decoded authorization message (may be JSON).
"""
def __init__(self, request_id=None, decoded_message=None):
self.request_id = request_id
self.decoded_message = decoded_message
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'requestId':
self.request_id = value
elif name == 'DecodedMessage':
self.decoded_message = value
|
yohei-washizaki/perlin
|
refs/heads/master
|
util.py
|
1
|
#!/usr/bin/env python3
import math
def normalize_vector(x, y):
length = math.sqrt(x * x + y * y)
if math.isclose(length, 0):
return (0, 0)
else:
inv = 1.0 / length
return (x * inv, y * inv)
def dot_vector(v1, v2):
return v1[0] * v2[0] + v1[1] * v2[1]
def fade(t):
return t * t * t * (t * (t * 6 - 15) + 10)
def lerp(a, b, t):
return a + t * (b - a)
def next_power_of_2(x):
if x == 1:
return 1
if x == 2:
return 4
return int(math.pow(2, math.ceil(math.log2(float(x)))))
def power_to_int(x):
return int(math.pow(x, 2))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--powerof2", type=int, help="power of 2")
parser.add_argument("--power", type=int, help="power")
args = parser.parse_args()
if args.powerof2:
print(next_power_of_2(args.powerof2))
elif args.power:
print(power_to_int(args.power))
|
touilleMan/authomatic
|
refs/heads/master
|
tests/functional_tests/expected_values/bitly.py
|
4
|
import fixtures
import constants
from authomatic.providers import oauth2
conf = fixtures.get_configuration('bitly')
LINK = 'http://bitly.com/u/{0}'.format(conf.user_id)
PICTURE = 'http://bitly.com/u/{0}.png'.format(conf.user_id)
CONFIG = {
'pre_login_xpaths': [
'//*[@id="sign-in"]/div[2]/fieldset/div/a',
# '//*[@id="sign-in"]/div[3]/fieldset/label/a',
],
'login_xpath': '//*[@id="sign-in"]/div[3]/fieldset/label[1]/input',
'password_xpath': '//*[@id="sign-in"]/div[3]/fieldset/label[2]/input',
'consent_xpaths': [
'//*[@id="sign-in"]/div[3]/fieldset/input[2]',
'//*[@id="oauth_access"]/form/button[1]',
],
'consent_wait_seconds': 1,
'class_': oauth2.Bitly,
'scope': oauth2.Bitly.user_info_scope,
'user': {
'id': conf.user_id,
'email': None,
'username': conf.user_username_reverse,
'name': conf.user_name,
'first_name': None,
'last_name': None,
'nickname': None,
'birth_date': None,
'city': None,
'country': None,
'gender': None,
'link': LINK,
'locale': None,
'phone': None,
'picture': PICTURE,
'postal_code': None,
'timezone': None,
},
'content_should_contain': [
conf.user_id,
conf.user_username_reverse,
conf.user_name,
# User info JSON keys
'status_code', 'data', 'apiKey', 'domain_options', 'member_since',
'enterprise_permissions', 'has_master', 'profile_image',
'share_accounts', 'numeric_id', 'account_login', 'account_type',
'account_id', 'primary', 'visible', 'is_delegated', 'full_name',
'account_name', 'is_enterprise', 'tracking_domains',
'default_link_privacy', 'display_name', 'custom_short_domain',
'login', 'is_verified', 'profile_url', 'status_txt',
],
# Case insensitive
'content_should_not_contain': conf.no_phone + conf.no_birth_date +
conf.no_email + conf.no_location +
conf.no_gender + conf.no_locale +
conf.no_first_name + conf.no_last_name,
# True means that any thruthy value is expected
'credentials': {
'token_type': None,
'provider_type_id': '2-2',
'_expiration_time': None,
'consumer_key': None,
'provider_id': None,
'consumer_secret': None,
'token': True,
'token_secret': None,
'_expire_in': True,
'provider_name': 'bitly',
'refresh_token': None,
'provider_type': 'authomatic.providers.oauth2.OAuth2',
'refresh_status': constants.CREDENTIALS_REFRESH_NOT_SUPPORTED,
},
}
|
Russell-IO/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/vmware/vmware_guest_snapshot_facts.py
|
15
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_guest_snapshot_facts
short_description: Gather facts about virtual machine's snapshots in vCenter
description:
- This module can be used to gather facts about virtual machine's snapshots.
version_added: 2.6
author:
- Abhijeet Kasurde (@akasurde) <akasurde@redhat.com>
notes:
- Tested on vSphere 6.0 and 6.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
name:
description:
- Name of the VM to work with.
- This is required if C(uuid) is not supplied.
uuid:
description:
- UUID of the instance to manage if known, this value is VMware's unique identifier.
- This is required if C(name) is not supplied.
- The C(folder) is ignored, if C(uuid) is provided.
folder:
description:
- Destination folder, absolute or relative path to find an existing guest.
- This is required only, if multiple virtual machines with same name are found on given vCenter.
- The folder should include the datacenter. ESX's datacenter is ha-datacenter
- 'Examples:'
- ' folder: /ha-datacenter/vm'
- ' folder: ha-datacenter/vm'
- ' folder: /datacenter1/vm'
- ' folder: datacenter1/vm'
- ' folder: /datacenter1/vm/folder1'
- ' folder: datacenter1/vm/folder1'
- ' folder: /folder1/datacenter1/vm'
- ' folder: folder1/datacenter1/vm'
- ' folder: /folder1/datacenter1/vm/folder2'
datacenter:
description:
- Name of the datacenter.
required: True
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Gather facts about the virtual machine in given vCenter
vmware_guest_snapshot_facts:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
datacenter: datacenter_name
name: dummy_vm
delegate_to: localhost
register: snapshot_facts
'''
RETURN = """
guest_snapshots:
description: metadata about the snapshot facts
returned: always
type: dict
sample: {
"current_snapshot": {
"creation_time": "2018-02-10T14:48:31.999459+00:00",
"description": "",
"id": 28,
"name": "snap_0003",
"state": "poweredOff"
},
"snapshots": [
{
"creation_time": "2018-02-10T14:48:31.999459+00:00",
"description": "",
"id": 28,
"name": "snap_0003",
"state": "poweredOff"
}
]
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import PyVmomi, list_snapshots, vmware_argument_spec
class PyVmomiHelper(PyVmomi):
def __init__(self, module):
super(PyVmomiHelper, self).__init__(module)
@staticmethod
def gather_guest_snapshot_facts(vm_obj=None):
"""
Function to return snpashot related facts about given virtual machine
Args:
vm_obj: Virtual Machine Managed object
Returns: Dictionary containing snapshot facts
"""
if vm_obj is None:
return {}
return list_snapshots(vm=vm_obj)
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
name=dict(type='str'),
uuid=dict(type='str'),
folder=dict(type='str'),
datacenter=dict(required=True, type='str'),
)
module = AnsibleModule(argument_spec=argument_spec,
required_together=[['name', 'folder']],
required_one_of=[['name', 'uuid']],
)
if module.params['folder']:
# FindByInventoryPath() does not require an absolute path
# so we should leave the input folder path unmodified
module.params['folder'] = module.params['folder'].rstrip('/')
pyv = PyVmomiHelper(module)
# Check if the VM exists before continuing
vm = pyv.get_vm()
if not vm:
# If UUID is set, getvm select UUID, show error message accordingly.
module.fail_json(msg="Unable to gather facts about snapshots for"
" non-existing VM ['%s']" % (module.params.get('uuid') or
module.params.get('name')))
results = dict(changed=False, guest_snapshots=pyv.gather_guest_snapshot_facts(vm_obj=vm))
module.exit_json(**results)
if __name__ == '__main__':
main()
|
tictakk/servo
|
refs/heads/ticbranch
|
tests/wpt/web-platform-tests/html/semantics/embedded-content/media-elements/track/track-element/cors/support/cors-tester.py
|
238
|
from wptserve.handlers import HTTPException
import urllib
def main(request, response):
if request.method != "GET":
raise HTTPException(400, message="Method was not GET")
if not "id" in request.GET:
raise HTTPException(400, message="No id")
id = request.GET['id']
if "read" in request.GET:
data = request.server.stash.take(id)
if data is None:
response.set_error(404, "Tried to read data not yet set")
return
return [("Content-Type", "text/plain")], data
elif "cleanup" in request.GET:
request.server.stash.take(id)
return "OK"
elif "delete-cookie" in request.GET:
response.delete_cookie(id)
return [("Content-Type", "text/plain")], "OK"
if "origin" in request.GET:
response.headers.set('Access-Control-Allow-Origin', request.GET['origin'])
response.headers.set('Access-Control-Allow-Credentials', 'true')
cors = request.headers.get("origin", "no")
cookie = request.cookies.first(id, "no")
line = 'cors = ' + cors + ' | cookie = ' + cookie.value;
data = request.server.stash.take(id)
if data is not None:
line = data + "\n" + line
request.server.stash.put(id, line)
if "redirect" in request.GET:
response.status = 302
response.headers.set('Location', request.GET['redirect'])
else:
return """WEBVTT
00:00:00.000 --> 00:00:10.000
Test"""
|
magnunor/hyperspy
|
refs/heads/RELEASE_next_minor
|
hyperspy/ui_registry.py
|
1
|
'''Registry of user interface widgets.
Format {"tool_key" : {"toolkit" : <function(obj, display, **kwargs)>}}
The ``tool_key` is defined by the "model function" to which the widget provides
and user interface. That function gets the widget function from this registry
and executes it passing the ``obj``, ``display`` and any extra keyword
arguments. When ``display`` is true, ``function`` displays the widget. If
``False`` it returns a dictionary with whatever is needed to display the
widgets externally (usually for testing or customisation purposes).
'''
import functools
import types
from hyperspy.misc.utils import isiterable
UI_REGISTRY = {}
TOOLKIT_REGISTRY = set()
KNOWN_TOOLKITS = set(("ipywidgets", "traitsui"))
def register_widget(toolkit, toolkey):
"""Decorator to register a UI widget.
Parameters
----------
f: function
Function that returns or display the UI widget. The signature must
include ``obj``, ``display`` and ``**kwargs``.
toolkit: string
The name of the widget toolkit e.g. ipywidgets
toolkey: string
The "key" of the tool for which the widget provides an interface. If
the toolkey is not in the ``UI_REGISTRY`` dictionary a ``NameError``
is raised.
Returns
-------
widgets: dictionary or None
Dictionary containing the widget objects if display is False, else None.
"""
if not toolkey in UI_REGISTRY:
raise NameError("%s is not a registered toolkey" % toolkey)
TOOLKIT_REGISTRY.add(toolkit)
def decorator(f):
UI_REGISTRY[toolkey][toolkit] = f
return f
return decorator
def register_toolkey(toolkey):
"""Register a toolkey.
Parameters
----------
toolkey: string
"""
if toolkey in UI_REGISTRY:
raise NameError(
"Another tool has been registered with the same name.")
UI_REGISTRY[toolkey] = {}
def _toolkits_to_string(toolkits):
if isinstance(toolkits, str):
return "{} toolkit".format(toolkits)
else:
toolkits = tuple(toolkits)
if len(toolkits) == 1:
return "{} toolkit".format(toolkits[0])
elif len(toolkits) == 2:
return " and ".join(toolkits) + " toolkits"
else: # > 2
txt = ", ".join(toolkits[:-1])
return txt + " and {}".format(toolkits[-1]) + " toolkits"
def get_gui(self, toolkey, display=True, toolkit=None, **kwargs):
if not TOOLKIT_REGISTRY:
raise ImportError(
"No toolkit registered. Install hyperspy_gui_ipywidgets or "
"hyperspy_gui_traitsui GUI elements. If hyperspy_gui_traits"
"is installed, initialize a toolkit supported by traitsui "
"before importing HyperSpy."
)
from hyperspy.defaults_parser import preferences
if isinstance(toolkit, str):
toolkit = (toolkit,)
if isiterable(toolkit):
toolkits = set()
for tk in toolkit:
if tk in TOOLKIT_REGISTRY:
toolkits.add(tk)
else:
raise ValueError(
"{} is not a registered toolkit.".format(tk)
)
elif toolkit is None:
toolkits = set()
available_disabled_toolkits = set()
if "ipywidgets" in TOOLKIT_REGISTRY:
if preferences.GUIs.enable_ipywidgets_gui:
toolkits.add("ipywidgets")
else:
available_disabled_toolkits.add("ipywidgets")
if "traitsui" in TOOLKIT_REGISTRY:
if preferences.GUIs.enable_traitsui_gui:
toolkits.add("traitsui")
else:
available_disabled_toolkits.add("traitsui")
if not toolkits and available_disabled_toolkits:
is_or_are = "is" if len(
available_disabled_toolkits) == 1 else "are"
them_or_it = ("it" if len(available_disabled_toolkits) == 1
else "them")
raise ValueError(
"No toolkit available. The {} {} installed but "
"disabled in `preferences`. Enable {} in `preferences` or "
"manually select a toolkit with the `toolkit` argument.".format(
_toolkits_to_string(available_disabled_toolkits),
is_or_are, them_or_it)
)
else:
raise ValueError(
"`toolkit` must be a string, an iterable of strings or None.")
if toolkey not in UI_REGISTRY or not UI_REGISTRY[toolkey]:
propose = KNOWN_TOOLKITS - TOOLKIT_REGISTRY
if propose:
propose = ["hyperspy_gui_{}".format(tk) for tk in propose]
if len(propose) > 1:
propose_ = ", ".join(propose[:-1])
propose = propose_ + " and/or {}".format(propose[-1])
else:
propose = propose.pop()
raise NotImplementedError(
"There is no user interface registered for this feature."
"Try installing {}.".format(propose))
if not display:
widgets = {}
available_toolkits = set()
used_toolkits = set()
for toolkit, f in UI_REGISTRY[toolkey].items():
if toolkit in toolkits:
used_toolkits.add(toolkit)
thisw = f(obj=self, display=display, **kwargs)
if not display:
widgets[toolkit] = thisw
else:
available_toolkits.add(toolkit)
if not used_toolkits and available_toolkits:
is_or_are = "is" if len(toolkits) == 1 else "are"
raise NotImplementedError(
"The {} {} not available for this functionality,try with "
"the {}.".format(
_toolkits_to_string(toolkits),
is_or_are,
_toolkits_to_string(available_toolkits)))
if not display:
return widgets
def get_partial_gui(toolkey):
def pg(self, display=True, toolkit=None, **kwargs):
return get_gui(self, toolkey=toolkey, display=display,
toolkit=toolkit, **kwargs)
return pg
DISPLAY_DT = """display: bool
If True, display the user interface widgets. If False, return the widgets
container in a dictionary, usually for customisation or testing."""
TOOLKIT_DT = """toolkit: str, iterable of strings or None
If None (default), all available widgets are displayed or returned. If
string, only the widgets of the selected toolkit are displayed if available.
If an interable of toolkit strings, the widgets of all listed toolkits are
displayed or returned."""
GUI_DT = """Display or return interactive GUI element if available.
Parameters
----------
%s
%s
""" % (DISPLAY_DT, TOOLKIT_DT)
def add_gui_method(toolkey):
def decorator(cls):
register_toolkey(toolkey)
# Not using functools.partialmethod because it is not possible to set
# the docstring that way.
setattr(cls, "gui", get_partial_gui(toolkey))
setattr(cls.gui, "__doc__", GUI_DT)
return cls
return decorator
register_toolkey("interactive_range_selector")
register_toolkey("navigation_sliders")
register_toolkey("load")
|
yfried/ansible
|
refs/heads/devel
|
test/sanity/validate-modules/utils.py
|
35
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Matt Martz <matt@sivel.net>
# Copyright (C) 2015 Rackspace US, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import ast
import sys
from io import BytesIO, TextIOWrapper
import yaml
import yaml.reader
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
class AnsibleTextIOWrapper(TextIOWrapper):
def write(self, s):
super(AnsibleTextIOWrapper, self).write(to_text(s, self.encoding, errors='replace'))
def find_globals(g, tree):
"""Uses AST to find globals in an ast tree"""
for child in tree:
if hasattr(child, 'body') and isinstance(child.body, list):
find_globals(g, child.body)
elif isinstance(child, (ast.FunctionDef, ast.ClassDef)):
g.add(child.name)
continue
elif isinstance(child, ast.Assign):
try:
g.add(child.targets[0].id)
except (IndexError, AttributeError):
pass
elif isinstance(child, ast.Import):
g.add(child.names[0].name)
elif isinstance(child, ast.ImportFrom):
for name in child.names:
g_name = name.asname or name.name
if g_name == '*':
continue
g.add(g_name)
class CaptureStd():
"""Context manager to handle capturing stderr and stdout"""
def __enter__(self):
self.sys_stdout = sys.stdout
self.sys_stderr = sys.stderr
sys.stdout = self.stdout = AnsibleTextIOWrapper(BytesIO(), encoding=self.sys_stdout.encoding)
sys.stderr = self.stderr = AnsibleTextIOWrapper(BytesIO(), encoding=self.sys_stderr.encoding)
return self
def __exit__(self, exc_type, exc_value, traceback):
sys.stdout = self.sys_stdout
sys.stderr = self.sys_stderr
def get(self):
"""Return ``(stdout, stderr)``"""
return self.stdout.buffer.getvalue(), self.stderr.buffer.getvalue()
def parse_yaml(value, lineno, module, name, load_all=False):
traces = []
errors = []
data = None
if load_all:
loader = yaml.safe_load_all
else:
loader = yaml.safe_load
try:
data = loader(value)
if load_all:
data = list(data)
except yaml.MarkedYAMLError as e:
e.problem_mark.line += lineno - 1
e.problem_mark.name = '%s.%s' % (module, name)
errors.append({
'msg': '%s is not valid YAML' % name,
'line': e.problem_mark.line + 1,
'column': e.problem_mark.column + 1
})
traces.append(e)
except yaml.reader.ReaderError as e:
traces.append(e)
# TODO: Better line/column detection
errors.append({
'msg': ('%s is not valid YAML. Character '
'0x%x at position %d.' % (name, e.character, e.position)),
'line': lineno
})
except yaml.YAMLError as e:
traces.append(e)
errors.append({
'msg': '%s is not valid YAML: %s: %s' % (name, type(e), e),
'line': lineno
})
return data, errors, traces
def is_empty(value):
"""Evaluate null like values excluding False"""
if value is False:
return False
return not bool(value)
def compare_unordered_lists(a, b):
"""Safe list comparisons
Supports:
- unordered lists
- unhashable elements
"""
return len(a) == len(b) and all(x in b for x in a)
class NoArgsAnsibleModule(AnsibleModule):
"""AnsibleModule that does not actually load params. This is used to get access to the
methods within AnsibleModule without having to fake a bunch of data
"""
def _load_params(self):
self.params = {'_ansible_selinux_special_fs': [], '_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False, '_ansible_check_mode': False}
|
AyoubZahid/odoo
|
refs/heads/9.0
|
openerp/addons/test_workflow/tests/test_workflow.py
|
392
|
# -*- coding: utf-8 -*-
import openerp
from openerp import SUPERUSER_ID
from openerp.tests import common
class test_workflows(common.TransactionCase):
def check_activities(self, model_name, i, names):
""" Check that the record i has workitems in the given activity names.
"""
instance = self.registry('workflow.instance')
workitem = self.registry('workflow.workitem')
# Given the workflow instance associated to the record ...
instance_id = instance.search(
self.cr, SUPERUSER_ID,
[('res_type', '=', model_name), ('res_id', '=', i)])
self.assertTrue( instance_id, 'A workflow instance is expected.')
# ... get all its workitems ...
workitem_ids = workitem.search(
self.cr, SUPERUSER_ID,
[('inst_id', '=', instance_id[0])])
self.assertTrue(
workitem_ids,
'The workflow instance should have workitems.')
# ... and check the activity the are in against the provided names.
workitem_records = workitem.browse(
self.cr, SUPERUSER_ID, workitem_ids)
self.assertEqual(
sorted([item.act_id.name for item in workitem_records]),
sorted(names))
def check_value(self, model_name, i, value):
""" Check that the record i has the given value.
"""
model = self.registry(model_name)
record = model.read(self.cr, SUPERUSER_ID, [i], ['value'])[0]
self.assertEqual(record['value'], value)
def test_workflow(self):
model = self.registry('test.workflow.model')
trigger = self.registry('test.workflow.trigger')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
# a -> b is just a signal.
model.signal_workflow(self.cr, SUPERUSER_ID, [i], 'a-b')
self.check_activities(model._name, i, ['b'])
# b -> c is a trigger (which is False),
# so we remain in the b activity.
model.trigger(self.cr, SUPERUSER_ID)
self.check_activities(model._name, i, ['b'])
# b -> c is a trigger (which is set to True).
# so we go in c when the trigger is called.
trigger.write(self.cr, SUPERUSER_ID, [1], {'value': True})
model.trigger(self.cr, SUPERUSER_ID)
self.check_activities(model._name, i, ['c'])
self.assertEqual(
True,
True)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_a(self):
model = self.registry('test.workflow.model.a')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 0)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_b(self):
model = self.registry('test.workflow.model.b')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 1)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_c(self):
model = self.registry('test.workflow.model.c')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 0)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_d(self):
model = self.registry('test.workflow.model.d')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 1)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_e(self):
model = self.registry('test.workflow.model.e')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['b'])
self.check_value(model._name, i, 2)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_f(self):
model = self.registry('test.workflow.model.f')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 1)
model.signal_workflow(self.cr, SUPERUSER_ID, [i], 'a-b')
self.check_activities(model._name, i, ['b'])
self.check_value(model._name, i, 2)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_g(self):
model = self.registry('test.workflow.model.g')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 1)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_h(self):
model = self.registry('test.workflow.model.h')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['b', 'c'])
self.check_value(model._name, i, 2)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_i(self):
model = self.registry('test.workflow.model.i')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['b'])
self.check_value(model._name, i, 2)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_j(self):
model = self.registry('test.workflow.model.j')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['a'])
self.check_value(model._name, i, 1)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_k(self):
model = self.registry('test.workflow.model.k')
i = model.create(self.cr, SUPERUSER_ID, {})
# Non-determinisitic: can be b or c
# self.check_activities(model._name, i, ['b'])
# self.check_activities(model._name, i, ['c'])
self.check_value(model._name, i, 2)
model.unlink(self.cr, SUPERUSER_ID, [i])
def test_workflow_l(self):
model = self.registry('test.workflow.model.l')
i = model.create(self.cr, SUPERUSER_ID, {})
self.check_activities(model._name, i, ['c', 'c', 'd'])
self.check_value(model._name, i, 3)
model.unlink(self.cr, SUPERUSER_ID, [i])
|
tamac-io/jenkins-job-builder
|
refs/heads/master
|
tests/cmd/test_cmd.py
|
11
|
import os
from jenkins_jobs.cli import entry
from tests import base
from tests.base import mock
class CmdTestsBase(base.BaseTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
def setUp(self):
super(CmdTestsBase, self).setUp()
# Testing the cmd module can sometimes result in the JobCache class
# attempting to create the cache directory multiple times as the tests
# are run in parallel. Stub out the JobCache to ensure that each
# test can safely create the cache directory without risk of
# interference.
cache_patch = mock.patch('jenkins_jobs.builder.JobCache',
autospec=True)
self.cache_mock = cache_patch.start()
self.addCleanup(cache_patch.stop)
self.default_config_file = os.path.join(self.fixtures_path,
'empty_builder.ini')
def execute_jenkins_jobs_with_args(self, args):
jenkins_jobs = entry.JenkinsJobs(args)
jenkins_jobs.execute()
class TestCmd(CmdTestsBase):
def test_with_empty_args(self):
"""
User passes no args, should fail with SystemExit
"""
with mock.patch('sys.stderr'):
self.assertRaises(SystemExit, entry.JenkinsJobs, [])
|
johnny-bui/pygments-sablecc
|
refs/heads/master
|
pygments/styles/friendly.py
|
75
|
# -*- coding: utf-8 -*-
"""
pygments.styles.friendly
~~~~~~~~~~~~~~~~~~~~~~~~
A modern style based on the VIM pyte theme.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class FriendlyStyle(Style):
"""
A modern style based on the VIM pyte theme.
"""
background_color = "#f0f0f0"
default_style = ""
styles = {
Whitespace: "#bbbbbb",
Comment: "italic #60a0b0",
Comment.Preproc: "noitalic #007020",
Comment.Special: "noitalic bg:#fff0f0",
Keyword: "bold #007020",
Keyword.Pseudo: "nobold",
Keyword.Type: "nobold #902000",
Operator: "#666666",
Operator.Word: "bold #007020",
Name.Builtin: "#007020",
Name.Function: "#06287e",
Name.Class: "bold #0e84b5",
Name.Namespace: "bold #0e84b5",
Name.Exception: "#007020",
Name.Variable: "#bb60d5",
Name.Constant: "#60add5",
Name.Label: "bold #002070",
Name.Entity: "bold #d55537",
Name.Attribute: "#4070a0",
Name.Tag: "bold #062873",
Name.Decorator: "bold #555555",
String: "#4070a0",
String.Doc: "italic",
String.Interpol: "italic #70a0d0",
String.Escape: "bold #4070a0",
String.Regex: "#235388",
String.Symbol: "#517918",
String.Other: "#c65d09",
Number: "#40a070",
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
Generic.Deleted: "#A00000",
Generic.Inserted: "#00A000",
Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold #c65d09",
Generic.Output: "#888",
Generic.Traceback: "#04D",
Error: "border:#FF0000"
}
|
google-code/anntools
|
refs/heads/master
|
anntools/common.py
|
3
|
#!/usr/bin/python
# -*- coding: ascii -*-
'''
Common utilities.
(C) 2007-2008 - Viktor Ferenczi (python@cx.hu) - Licence: GNU LGPL
'''
#============================================================================
import sys
#============================================================================
__all__ = ['wraps', 'get_function_argument_names']
#============================================================================
# Function wrapper decorator
if sys.version_info[:2]<(2,5):
# Compatible implementation for Python versions <2.5
def wraps(wrapped):
'''Wrap a function preserving the module, name and docstring of the
original one. This function is only a simple replacement for the
wraps function defined in Python 2.5's functools module that is
missing from Python 2.4. It is not intended for external use.
@param wrapper: wrapper function to be updated with properties of the original (wrapped) function
'''
def update_wrapper(wrapper):
for attr in ('__module__', '__name__', '__doc__'):
setattr(wrapper, attr, getattr(wrapped, attr))
wrapper.__dict__.update(getattr(wrapped, '__dict__', {}))
return wrapper
return update_wrapper
else:
# Use the implementation from the standard library
from functools import wraps
#============================================================================
# Retrieving the list of argument names for a function
if sys.version_info[:2]<(3,0):
from inspect import getargspec
def get_function_argument_names(fn):
return getargspec(fn)[0]
else:
from inspect import getfullargspec
def get_function_argument_names(fn):
return getfullargspec(fn)[0]
#============================================================================
|
pratikmallya/hue
|
refs/heads/master
|
desktop/core/ext-py/pysqlite/doc/includes/sqlite3/text_factory.py
|
49
|
from pysqlite2 import dbapi2 as sqlite3
con = sqlite3.connect(":memory:")
cur = con.cursor()
# Create the table
con.execute("create table person(lastname, firstname)")
AUSTRIA = u"\xd6sterreich"
# by default, rows are returned as Unicode
cur.execute("select ?", (AUSTRIA,))
row = cur.fetchone()
assert row[0] == AUSTRIA
# but we can make pysqlite always return bytestrings ...
con.text_factory = str
cur.execute("select ?", (AUSTRIA,))
row = cur.fetchone()
assert type(row[0]) == str
# the bytestrings will be encoded in UTF-8, unless you stored garbage in the
# database ...
assert row[0] == AUSTRIA.encode("utf-8")
# we can also implement a custom text_factory ...
# here we implement one that will ignore Unicode characters that cannot be
# decoded from UTF-8
con.text_factory = lambda x: unicode(x, "utf-8", "ignore")
cur.execute("select ?", ("this is latin1 and would normally create errors" + u"\xe4\xf6\xfc".encode("latin1"),))
row = cur.fetchone()
assert type(row[0]) == unicode
# pysqlite offers a builtin optimized text_factory that will return bytestring
# objects, if the data is in ASCII only, and otherwise return unicode objects
con.text_factory = sqlite3.OptimizedUnicode
cur.execute("select ?", (AUSTRIA,))
row = cur.fetchone()
assert type(row[0]) == unicode
cur.execute("select ?", ("Germany",))
row = cur.fetchone()
assert type(row[0]) == str
|
Microvellum/Fluid-Designer
|
refs/heads/master
|
win64-vc/2.78/scripts/templates_py/ui_menu.py
|
8
|
import bpy
class CustomMenu(bpy.types.Menu):
bl_label = "Custom Menu"
bl_idname = "OBJECT_MT_custom_menu"
def draw(self, context):
layout = self.layout
layout.operator("wm.open_mainfile")
layout.operator("wm.save_as_mainfile").copy = True
layout.operator("object.shade_smooth")
layout.label(text="Hello world!", icon='WORLD_DATA')
# use an operator enum property to populate a sub-menu
layout.operator_menu_enum("object.select_by_type",
property="type",
text="Select All by Type...",
)
# call another menu
layout.operator("wm.call_menu", text="Unwrap").name = "VIEW3D_MT_uv_map"
def draw_item(self, context):
layout = self.layout
layout.menu(CustomMenu.bl_idname)
def register():
bpy.utils.register_class(CustomMenu)
# lets add ourselves to the main header
bpy.types.INFO_HT_header.append(draw_item)
def unregister():
bpy.utils.unregister_class(CustomMenu)
bpy.types.INFO_HT_header.remove(draw_item)
if __name__ == "__main__":
register()
# The menu can also be called from scripts
bpy.ops.wm.call_menu(name=CustomMenu.bl_idname)
|
rtucker-mozilla/inventory
|
refs/heads/master
|
api_v3/models.py
|
10644
|
from django.db import models
# Create your models here.
|
febinstephen/python_koans_solutions
|
refs/heads/master
|
python3/runner/mockable_test_result.py
|
278
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
# Needed to stop unittest.TestResult itself getting Mocked out of existence,
# which is a problem when testing the helper classes! (It confuses the runner)
class MockableTestResult(unittest.TestResult):
pass
|
mbrondani/django-scheduler
|
refs/heads/develop
|
schedule/models/events.py
|
2
|
from __future__ import division, unicode_literals
from six.moves.builtins import object
from six import with_metaclass
# -*- coding: utf-8 -*-
from django.conf import settings as django_settings
import pytz
from dateutil import rrule
from django.contrib.contenttypes import fields
from django.db import models
from django.db.models.base import ModelBase
from django.db.models import Q
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.template.defaultfilters import date
from django.utils.translation import ugettext, ugettext_lazy as _
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from schedule.conf import settings
from schedule.models.rules import Rule
from schedule.models.calendars import Calendar
from schedule.utils import OccurrenceReplacer
from schedule.utils import get_model_bases
class EventManager(models.Manager):
def get_for_object(self, content_object, distinction=None, inherit=True):
return EventRelation.objects.get_events_for_object(content_object, distinction, inherit)
@python_2_unicode_compatible
class Event(with_metaclass(ModelBase, *get_model_bases())):
'''
This model stores meta data for a date. You can relate this data to many
other models.
'''
start = models.DateTimeField(_("start"))
end = models.DateTimeField(_("end"), help_text=_("The end time must be later than the start time."))
title = models.CharField(_("title"), max_length=255)
description = models.TextField(_("description"), null=True, blank=True)
creator = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, verbose_name=_("creator"),
related_name='creator')
created_on = models.DateTimeField(_("created on"), auto_now_add=True)
updated_on = models.DateTimeField(_("updated on"), auto_now=True)
rule = models.ForeignKey(Rule, null=True, blank=True, verbose_name=_("rule"),
help_text=_("Select '----' for a one time only event."))
end_recurring_period = models.DateTimeField(_("end recurring period"), null=True, blank=True,
help_text=_("This date is ignored for one time only events."))
calendar = models.ForeignKey(Calendar, null=True, blank=True, verbose_name=_("calendar"))
objects = EventManager()
class Meta(object):
verbose_name = _('event')
verbose_name_plural = _('events')
app_label = 'schedule'
def __str__(self):
return ugettext('%(title)s: %(start)s - %(end)s') % {
'title': self.title,
'start': date(self.start, django_settings.DATE_FORMAT),
'end': date(self.end, django_settings.DATE_FORMAT),
}
@property
def seconds(self):
return (self.end - self.start).total_seconds()
@property
def minutes(self):
return float(self.seconds) / 60
@property
def hours(self):
return float(self.seconds) / 3600
def get_absolute_url(self):
return reverse('event', args=[self.id])
def get_occurrences(self, start, end):
"""
>>> rule = Rule(frequency = "MONTHLY", name = "Monthly")
>>> rule.save()
>>> event = Event(rule=rule, start=datetime.datetime(2008,1,1,tzinfo=pytz.utc), end=datetime.datetime(2008,1,2))
>>> event.rule
<Rule: Monthly>
>>> occurrences = event.get_occurrences(datetime.datetime(2008,1,24), datetime.datetime(2008,3,2))
>>> ["%s to %s" %(o.start, o.end) for o in occurrences]
['2008-02-01 00:00:00+00:00 to 2008-02-02 00:00:00+00:00', '2008-03-01 00:00:00+00:00 to 2008-03-02 00:00:00+00:00']
Ensure that if an event has no rule, that it appears only once.
>>> event = Event(start=datetime.datetime(2008,1,1,8,0), end=datetime.datetime(2008,1,1,9,0))
>>> occurrences = event.get_occurrences(datetime.datetime(2008,1,24), datetime.datetime(2008,3,2))
>>> ["%s to %s" %(o.start, o.end) for o in occurrences]
[]
`
"""
persisted_occurrences = self.occurrence_set.all()
occ_replacer = OccurrenceReplacer(persisted_occurrences)
occurrences = self._get_occurrence_list(start, end)
final_occurrences = []
for occ in occurrences:
# replace occurrences with their persisted counterparts
if occ_replacer.has_occurrence(occ):
p_occ = occ_replacer.get_occurrence(occ)
# ...but only if they are within this period
if p_occ.start < end and p_occ.end >= start:
final_occurrences.append(p_occ)
else:
final_occurrences.append(occ)
# then add persisted occurrences which originated outside of this period but now
# fall within it
final_occurrences += occ_replacer.get_additional_occurrences(start, end)
return final_occurrences
def get_rrule_object(self):
if self.rule is not None:
params = self.rule.get_params()
frequency = self.rule.rrule_frequency()
return rrule.rrule(frequency, dtstart=self.start, **params)
def _create_occurrence(self, start, end=None):
if end is None:
end = start + (self.end - self.start)
return Occurrence(event=self, start=start, end=end, original_start=start, original_end=end)
def get_occurrence(self, date):
if timezone.is_naive(date) and django_settings.USE_TZ:
date = timezone.make_aware(date, timezone.utc)
rule = self.get_rrule_object()
if rule:
next_occurrence = rule.after(date, inc=True)
else:
next_occurrence = self.start
if next_occurrence == date:
try:
return Occurrence.objects.get(event=self, original_start=date)
except Occurrence.DoesNotExist:
return self._create_occurrence(next_occurrence)
def _get_occurrence_list(self, start, end):
"""
returns a list of occurrences for this event from start to end.
"""
difference = (self.end - self.start)
if self.rule is not None:
occurrences = []
if self.end_recurring_period and self.end_recurring_period < end:
end = self.end_recurring_period
rule = self.get_rrule_object()
o_starts = []
o_starts.append(rule.between(start, end, inc=True))
o_starts.append(rule.between(start - (difference // 2), end - (difference // 2), inc=True))
o_starts.append(rule.between(start - difference, end - difference, inc=True))
for occ in o_starts:
for o_start in occ:
o_end = o_start + difference
occurrence = self._create_occurrence(o_start, o_end)
if occurrence not in occurrences:
occurrences.append(occurrence)
return occurrences
else:
# check if event is in the period
if self.start < end and self.end > start:
return [self._create_occurrence(self.start)]
else:
return []
def _occurrences_after_generator(self, after=None, tzinfo=pytz.utc):
"""
returns a generator that produces unpresisted occurrences after the
datetime ``after``.
"""
if after is None:
after = timezone.now()
rule = self.get_rrule_object()
if rule is None:
if self.end > after:
yield self._create_occurrence(self.start, self.end)
raise StopIteration
date_iter = iter(rule)
difference = self.end - self.start
while True:
o_start = next(date_iter)
if o_start > self.end_recurring_period:
raise StopIteration
o_end = o_start + difference
if o_end > after:
yield self._create_occurrence(o_start, o_end)
def occurrences_after(self, after=None):
"""
returns a generator that produces occurrences after the datetime
``after``. Includes all of the persisted Occurrences.
"""
occ_replacer = OccurrenceReplacer(self.occurrence_set.all())
generator = self._occurrences_after_generator(after)
while True:
next_occurence = next(generator)
yield occ_replacer.get_occurrence(next_occurence)
class EventRelationManager(models.Manager):
'''
>>> import datetime
>>> EventRelation.objects.all().delete()
>>> CalendarRelation.objects.all().delete()
>>> data = {
... 'title': 'Test1',
... 'start': datetime.datetime(2008, 1, 1),
... 'end': datetime.datetime(2008, 1, 11)
... }
>>> Event.objects.all().delete()
>>> event1 = Event(**data)
>>> event1.save()
>>> data['title'] = 'Test2'
>>> event2 = Event(**data)
>>> event2.save()
>>> user1 = User(username='alice')
>>> user1.save()
>>> user2 = User(username='bob')
>>> user2.save()
>>> event1.create_relation(user1, 'owner')
>>> event1.create_relation(user2, 'viewer')
>>> event2.create_relation(user1, 'viewer')
'''
# Currently not supported
# Multiple level reverse lookups of generic relations appears to be
# unsupported in Django, which makes sense.
#
# def get_objects_for_event(self, event, model, distinction=None):
# '''
# returns a queryset full of instances of model, if it has an EventRelation
# with event, and distinction
# >>> event = Event.objects.get(title='Test1')
# >>> EventRelation.objects.get_objects_for_event(event, User, 'owner')
# [<User: alice>]
# >>> EventRelation.objects.get_objects_for_event(event, User)
# [<User: alice>, <User: bob>]
# '''
# if distinction:
# dist_q = Q(eventrelation__distinction = distinction)
# else:
# dist_q = Q()
# ct = ContentType.objects.get_for_model(model)
# return model.objects.filter(
# dist_q,
# eventrelation__content_type = ct,
# eventrelation__event = event
# )
def get_events_for_object(self, content_object, distinction=None, inherit=True):
'''
returns a queryset full of events, that relate to the object through, the
distinction
If inherit is false it will not consider the calendars that the events
belong to. If inherit is true it will inherit all of the relations and
distinctions that any calendar that it belongs to has, as long as the
relation has inheritable set to True. (See Calendar)
>>> event = Event.objects.get(title='Test1')
>>> user = User.objects.get(username = 'alice')
>>> EventRelation.objects.get_events_for_object(user, 'owner', inherit=False)
[<Event: Test1: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>]
If a distinction is not declared it will not vet the relations based on
distinction.
>>> EventRelation.objects.get_events_for_object(user, inherit=False)
[<Event: Test1: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>, <Event: Test2: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>]
Now if there is a Calendar
>>> calendar = Calendar(name = 'MyProject')
>>> calendar.save()
And an event that belongs to that calendar
>>> event = Event.objects.get(title='Test2')
>>> calendar.events.add(event)
If we relate this calendar to some object with inheritable set to true,
that relation will be inherited
>>> user = User.objects.get(username='bob')
>>> cr = calendar.create_relation(user, 'viewer', True)
>>> EventRelation.objects.get_events_for_object(user, 'viewer')
[<Event: Test1: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>, <Event: Test2: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>]
'''
ct = ContentType.objects.get_for_model(type(content_object))
if distinction:
dist_q = Q(eventrelation__distinction=distinction)
cal_dist_q = Q(calendar__calendarrelation__distinction=distinction)
else:
dist_q = Q()
cal_dist_q = Q()
if inherit:
inherit_q = Q(
cal_dist_q,
calendar__calendarrelation__object_id=content_object.id,
calendar__calendarrelation__content_type=ct,
calendar__calendarrelation__inheritable=True,
)
else:
inherit_q = Q()
event_q = Q(dist_q, Q(eventrelation__object_id=content_object.id), Q(eventrelation__content_type=ct))
return Event.objects.filter(inherit_q | event_q)
def create_relation(self, event, content_object, distinction=None):
"""
Creates a relation between event and content_object.
See EventRelation for help on distinction.
"""
ct = ContentType.objects.get_for_model(type(content_object))
object_id = content_object.id
er = EventRelation(
content_type=ct,
object_id=object_id,
event=event,
distinction=distinction,
content_object=content_object
)
er.save()
return er
@python_2_unicode_compatible
class EventRelation(with_metaclass(ModelBase, *get_model_bases())):
'''
This is for relating data to an Event, there is also a distinction, so that
data can be related in different ways. A good example would be, if you have
events that are only visible by certain users, you could create a relation
between events and users, with the distinction of 'visibility', or
'ownership'.
event: a foreign key relation to an Event model.
content_type: a foreign key relation to ContentType of the generic object
object_id: the id of the generic object
content_object: the generic foreign key to the generic object
distinction: a string representing a distinction of the relation, User could
have a 'viewer' relation and an 'owner' relation for example.
DISCLAIMER: while this model is a nice out of the box feature to have, it
may not scale well. If you use this keep that in mind.
'''
event = models.ForeignKey(Event, verbose_name=_("event"))
content_type = models.ForeignKey(ContentType)
object_id = models.IntegerField()
content_object = fields.GenericForeignKey('content_type', 'object_id')
distinction = models.CharField(_("distinction"), max_length=20, null=True)
objects = EventRelationManager()
class Meta(object):
verbose_name = _("event relation")
verbose_name_plural = _("event relations")
app_label = 'schedule'
def __str__(self):
return '%s(%s)-%s' % (self.event.title, self.distinction, self.content_object)
@python_2_unicode_compatible
class Occurrence(with_metaclass(ModelBase, *get_model_bases())):
event = models.ForeignKey(Event, verbose_name=_("event"))
title = models.CharField(_("title"), max_length=255, blank=True, null=True)
description = models.TextField(_("description"), blank=True, null=True)
start = models.DateTimeField(_("start"))
end = models.DateTimeField(_("end"))
cancelled = models.BooleanField(_("cancelled"), default=False)
original_start = models.DateTimeField(_("original start"))
original_end = models.DateTimeField(_("original end"))
created_on = models.DateTimeField(_("created on"), auto_now_add=True)
updated_on = models.DateTimeField(_("updated on"), auto_now=True)
class Meta(object):
verbose_name = _("occurrence")
verbose_name_plural = _("occurrences")
app_label = 'schedule'
def __init__(self, *args, **kwargs):
super(Occurrence, self).__init__(*args, **kwargs)
if self.title is None and self.event_id:
self.title = self.event.title
if self.description is None and self.event_id:
self.description = self.event.description
def moved(self):
return self.original_start != self.start or self.original_end != self.end
moved = property(moved)
def move(self, new_start, new_end):
self.start = new_start
self.end = new_end
self.save()
def cancel(self):
self.cancelled = True
self.save()
def uncancel(self):
self.cancelled = False
self.save()
def get_absolute_url(self):
if self.pk is not None:
return reverse('occurrence', kwargs={'occurrence_id': self.pk,
'event_id': self.event.id})
return reverse('occurrence_by_date', kwargs={
'event_id': self.event.id,
'year': self.start.year,
'month': self.start.month,
'day': self.start.day,
'hour': self.start.hour,
'minute': self.start.minute,
'second': self.start.second,
})
def get_cancel_url(self):
if self.pk is not None:
return reverse('cancel_occurrence', kwargs={'occurrence_id': self.pk,
'event_id': self.event.id})
return reverse('cancel_occurrence_by_date', kwargs={
'event_id': self.event.id,
'year': self.start.year,
'month': self.start.month,
'day': self.start.day,
'hour': self.start.hour,
'minute': self.start.minute,
'second': self.start.second,
})
def get_edit_url(self):
if self.pk is not None:
return reverse('edit_occurrence', kwargs={'occurrence_id': self.pk,
'event_id': self.event.id})
return reverse('edit_occurrence_by_date', kwargs={
'event_id': self.event.id,
'year': self.start.year,
'month': self.start.month,
'day': self.start.day,
'hour': self.start.hour,
'minute': self.start.minute,
'second': self.start.second,
})
def __str__(self):
return ugettext("%(start)s to %(end)s") % {
'start': date(self.start, django_settings.DATE_FORMAT),
'end': date(self.end, django_settings.DATE_FORMAT)
}
def __lt__(self, other):
return self.end < other.end
def __eq__(self, other):
return (isinstance(other, Occurrence) and
self.original_start == other.original_start and self.original_end == other.original_end)
|
calandryll/transcriptome
|
refs/heads/master
|
scripts/old/poly_cut.py
|
1
|
#!/usr/bin/python -tt
# Can't do this with bash why not python!
# Remove poly-a and poly-t from shortened samples
# Uses fastx_clipper to remove poly-a and poly-t
# Website: http://hannonlab.cshl.edu/fastx_toolkit/commandline.html#fastx_clipper_usage
# Import OS features to run external programs
import os
import glob
v = "Version 0.1"
# Versions:
# 0.1 - Simple script to run fastx_clipper on all of the files
# Adapter listing
polya = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
polyt = "TTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTT"
fastq_indir = "/home/chris/transcriptome/fastq/shorten"
fastq_outdir = "/home/chris/transcriptome/fastq/poly"
# keep a minimum length of 20 and quality score of 20 (probably redundant), remove poly-a and t sequences. Times = 2 should do both poly-a and t
# modified -a (3' cutting) to -b (anywhere)
# Sample 1
print "Analyzing Sample 1...Removing poly-a..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_1_L001_temp.fastq -i %s/Sample_1_L001_shorten.fastq > %s/Sample_1_L001_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 1...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_1_L001_poly.fastq -i %s/Sample_1_L001_temp.fastq > %s/Sample_1_L001_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_1_L001_temp.fastq" % (fastq_outdir))
print "Analyzing Sample 1_2...Removing poly-a"
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_1_L002_temp.fastq -i %s/Sample_1_L002_shorten.fastq > %s/Sample_1_L002_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 1_2...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_1_L002_poly.fastq -i %s/Sample_1_L002_temp.fastq > %s/Sample_1_L002_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_1_L002_temp.fastq" % (fastq_outdir))
# Sample 2
print "Analyzing Sample 2...Removing poly-a..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_2_L001_temp.fastq -i %s/Sample_2_L001_shorten.fastq > %s/Sample_2_L001_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 2...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_2_L001_poly.fastq -i %s/Sample_2_L001_temp.fastq > %s/Sample_2_L001_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_2_L001_temp.fastq" % (fastq_outdir))
print "Analyzing Sample 2_2...Removing poly-a"
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_2_L002_temp.fastq -i %s/Sample_2_L002_shorten.fastq > %s/Sample_2_L002_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 2_2...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_2_L002_poly.fastq -i %s/Sample_2_L002_temp.fastq > %s/Sample_2_L002_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_2_L002_temp.fastq" % (fastq_outdir))
# Sample 3
print "Analyzing Sample 3...Removing poly-a..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_3_L001_temp.fastq -i %s/Sample_3_L001_shorten.fastq > %s/Sample_3_L001_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 3...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_3_L001_poly.fastq -i %s/Sample_3_L001_temp.fastq > %s/Sample_3_L001_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_3_L001_temp.fastq" % (fastq_outdir))
print "Analyzing Sample 3_2...Removing poly-a"
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_3_L002_temp.fastq -i %s/Sample_3_L002_shorten.fastq > %s/Sample_3_L002_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 3_2...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_3_L002_poly.fastq -i %s/Sample_3_L002_temp.fastq > %s/Sample_3_L002_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_3_L002_temp.fastq" % (fastq_outdir))
# Sample 4
print "Analyzing Sample 4...Removing poly-a..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_4_L001_temp.fastq -i %s/Sample_4_L001_shorten.fastq > %s/Sample_4_L001_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 4...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_4_L001_poly.fastq -i %s/Sample_4_L001_temp.fastq > %s/Sample_4_L001_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_4_L001_temp.fastq" % (fastq_outdir))
print "Analyzing Sample 4_2...Removing poly-a"
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_4_L002_temp.fastq -i %s/Sample_4_L002_shorten.fastq > %s/Sample_4_L002_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 4_2...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_4_L002_poly.fastq -i %s/Sample_4_L002_temp.fastq > %s/Sample_4_L002_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_4_L002_temp.fastq" % (fastq_outdir))
# Sample 5
print "Analyzing Sample 5...Removing poly-a..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_5_L001_temp.fastq -i %s/Sample_5_L001_shorten.fastq > %s/Sample_5_L001_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 5...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_5_L001_poly.fastq -i %s/Sample_5_L001_temp.fastq > %s/Sample_5_L001_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_5_L001_temp.fastq" % (fastq_outdir))
print "Analyzing Sample 5_2...Removing poly-a"
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_5_L002_temp.fastq -i %s/Sample_5_L002_shorten.fastq > %s/Sample_5_L002_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 5_2...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_5_L002_poly.fastq -i %s/Sample_5_L002_temp.fastq > %s/Sample_5_L002_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_5_L002_temp.fastq" % (fastq_outdir))
# Sample 6
print "Analyzing Sample 6...Removing poly-a..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_6_L001_temp.fastq -i %s/Sample_6_L001_shorten.fastq > %s/Sample_6_L001_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 6...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_6_L001_poly.fastq -i %s/Sample_6_L001_temp.fastq > %s/Sample_6_L001_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_6_L001_temp.fastq" % (fastq_outdir))
print "Analyzing Sample 6_2...Removing poly-a"
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_6_L002_temp.fastq -i %s/Sample_6_L002_shorten.fastq > %s/Sample_6_L002_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 6_2...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_6_L002_poly.fastq -i %s/Sample_6_L002_temp.fastq > %s/Sample_6_L002_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_6_L002_temp.fastq" % (fastq_outdir))
# Sample 7
print "Analyzing Sample 7...Removing poly-a..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_7_L001_temp.fastq -i %s/Sample_7_L001_shorten.fastq > %s/Sample_7_L001_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 7...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_7_L001_poly.fastq -i %s/Sample_7_L001_temp.fastq > %s/Sample_7_L001_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_7_L001_temp.fastq" % (fastq_outdir))
print "Analyzing Sample 7_2...Removing poly-a"
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_7_L002_temp.fastq -i %s/Sample_7_L002_shorten.fastq > %s/Sample_7_L002_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 7_2...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_7_L002_poly.fastq -i %s/Sample_7_L002_temp.fastq > %s/Sample_7_L002_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_7_L002_temp.fastq" % (fastq_outdir))
# Sample 8
print "Analyzing Sample 8...Removing poly-a..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_8_L001_temp.fastq -i %s/Sample_8_L001_shorten.fastq > %s/Sample_8_L001_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 8...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_8_L001_poly.fastq -i %s/Sample_8_L001_temp.fastq > %s/Sample_8_L001_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_8_L001_temp.fastq" % (fastq_outdir))
print "Analyzing Sample 8_2...Removing poly-a"
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_8_L002_temp.fastq -i %s/Sample_8_L002_shorten.fastq > %s/Sample_8_L002_temp.log" % (polya, fastq_outdir, fastq_indir, fastq_outdir))
print "Analyzing Sample 8_2...Removing poly-t..."
os.system("fastx_clipper -v -Q 32 -l 35 -a %s -o %s/Sample_8_L002_poly.fastq -i %s/Sample_8_L002_temp.fastq > %s/Sample_8_L002_poly.log" % (polyt, fastq_outdir, fastq_outdir, fastq_outdir))
print "Removing temporary file..."
os.system("rm %s/Sample_8_L002_temp.fastq" % (fastq_outdir))
|
jgonthier/psi4
|
refs/heads/master
|
samples/python/mints9/input.py
|
3
|
from __future__ import print_function
import psi4
from psi4.driver import qcdb
#! A test of the basis specification. Various basis sets are specified outright and in blocks, both
#! orbital and auxiliary. Constructs libmints BasisSet objects through the constructor that calls
#! qcdb.BasisSet infrastructure. Checks that the resulting bases are of the right size and checks
#! that symmetry of the Molecule observes the basis assignment to atoms.
# cc-pvdz aug-cc-pvdz
# BASIS H 5/ 5 C 14/15 H +4/ 4 C +9/10
# RIFIT H 14/15 C 56/66 H +9/10 C +16/20
# JKFIT H 23/25 C 70/81 H +9/10 C +16/20
psi4.set_output_file("output.dat", False)
mymol = psi4.geometry("""
C 0.0 0.0 0.0
O 1.4 0.0 0.0
H_r -0.5 -0.7 0.0
H_l -0.5 0.7 0.0
""")
psi4.set_options({'basis': 'cc-pvdz'})
print('[1] <<< uniform cc-pVDZ >>>')
wert = psi4.core.BasisSet.build(mymol, 'BASIS', psi4.core.get_global_option('BASIS'))
mymol.print_out()
print('[2] <<< RIFIT (default) >>>')
wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_MP2', '', 'RIFIT', psi4.core.get_global_option('BASIS'))
mymol.print_out()
print('[3] <<< cc-pVDZ w/ aug-cc-pVDZ on C >>>')
psi4.basis_helper("""
assign cc-pvdz
assign c aug-cc-pvdz
""", name='dz_PLUS')
wert = psi4.core.BasisSet.build(mymol, 'BASIS', psi4.core.get_global_option('BASIS'))
mymol.print_out()
print('[4] <<< RIFIT (default) >>>')
wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_MP2', '', 'RIFIT', psi4.core.get_global_option('BASIS'))
mymol.print_out()
wert.print_out()
mymol.print_out()
print('[5] <<< cc-pVDZ w/ aug-cc-pVDZ on C, H_R >>>')
psi4.basis_helper("""
assign cc-pvdz
assign c aug-cc-pvdz
assign h_r aug-cc-pvdz
""",
name='dz_PLUSplus',
key='BASis')
wert = psi4.core.BasisSet.build(mymol, 'BASIS', psi4.core.get_global_option('BASIS'))
mymol.print_out()
print('[6] <<< RIFIT (custom: force cc-pVDZ on H, default on C, O) >>>')
psi4.basis_helper("""
assign h cc-pvdz-ri
""",
name='dz_PLUSplusRI',
key='df_basis_mp2')
wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_MP2', psi4.core.get_global_option('DF_BASIS_MP2'), 'RIFIT', psi4.core.get_global_option('BASIS'))
mymol.print_out()
mymol.print_out()
print('[7] <<< cc-pVDZ w/ aug-cc-pVDZ on C, H >>>')
psi4.basis_helper("""
assign cc-pvdz
assign c aug-cc-pvdz
assign h aug-cc-pvdz
""",
name = 'dz_PLUSplusplus')
wert = psi4.core.BasisSet.build(mymol, 'BASIS', psi4.core.get_global_option('BASIS'))
mymol.print_out()
print('[8] <<< JKFIT (default) >>>')
wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_SCF', '', 'JKFIT', psi4.core.get_global_option('BASIS'))
mymol.print_out()
psi4.set_options({'basis': 'aug-cc-pvdz'})
print('[9] <<< aug-cc-pVDZ >>>')
wert = psi4.core.BasisSet.build(mymol, 'BASIS', psi4.core.get_global_option('BASIS'))
mymol.print_out()
print('[10] <<< JKFIT (default) >>>')
wert = psi4.core.BasisSet.build(mymol, 'DF_BASIS_SCF', '', 'JKFIT', psi4.core.get_global_option('BASIS'))
mymol.print_out()
mymol2 = psi4.geometry("""
0 2
C 0.0 0.0 0.0
O 1.4 0.0 0.0
H_r -0.5 -0.6 0.3
H_l -0.5 0.6 0.3
H_c -0.5 0.0 0.7
""")
psi4.set_options({'basis': 'dz_plusplusplus'})
print('[11] <<< cc-pVDZ w/ aug-cc-pVDZ on C, H >>>')
wert = psi4.core.BasisSet.build(mymol2, 'BASIS', psi4.core.get_global_option('BASIS'))
mymol2.print_out()
hene = psi4.geometry("""
He
Ne 1 2.0
""")
psi4.basis_helper("""
assign cc-pv5z
""", name='disguised5z')
psi4.core.set_global_option('DF_BASIS_MP2', '') # clear df_basis_mp2 {...} to get autoaux below
print('[12] <<< cc-pV5Z on HeNe >>>')
wert = psi4.core.BasisSet.build(hene, 'BASIS', psi4.core.get_global_option('BASIS'))
hene.print_out()
print('[13] <<< RI for cc-pV5Z on HeNe >>>')
wert = psi4.core.BasisSet.build(hene, 'DF_BASIS_MP2', '', 'RIFIT', psi4.core.get_global_option('BASIS'))
hene.print_out()
print('[14] <<< impossible JK for cc-pV5Z on HeNe >>>')
error_tripped = 0
try:
wert = psi4.core.BasisSet.build(hene, 'DF_BASIS_SCF', '', 'JKFIT', psi4.core.get_global_option('BASIS'))
except qcdb.BasisSetNotFound:
error_tripped = 1
psi4.basis_helper(key='df_basis_scf', name='uggh', block="""
assign he DEF2-QZVPP-JKFIT
""")
hene.print_out()
print('[15] <<< forced JK for cc-pV5Z on HeNe >>>')
wert = psi4.core.BasisSet.build(hene, 'DF_BASIS_SCF', '', 'JKFIT', psi4.core.get_global_option('BASIS'))
|
stackforge/tricircle
|
refs/heads/master
|
tricircle/db/migrate_repo/versions/005_fix_cached_endpoints_pod_id_length.py
|
1
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import migrate
from sqlalchemy import MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
cached_endpoints = Table('cached_endpoints', meta, autoload=True)
pods = Table('pods', meta, autoload=True)
col_pod_id_fkey = cached_endpoints.c.pod_id
col_pod_id_pkey = pods.c.pod_id
# In the migration script 001_init.py, the pod_id string length in
# cached_endpoints table is 64, but pod_id string length in pods table
# is 36. The string length in foreign key and primary key isn't the same
if col_pod_id_fkey.type.length != col_pod_id_pkey.type.length:
# Delete the old constraint. If it exists, we can't modify the
# pod_id length.
migrate.ForeignKeyConstraint(columns=[cached_endpoints.c.pod_id],
refcolumns=[pods.c.pod_id]).drop()
col_pod_id_fkey.alter(type=String(col_pod_id_pkey.type.length))
# Create the foreign key constraint
migrate.ForeignKeyConstraint(columns=[cached_endpoints.c.pod_id],
refcolumns=[pods.c.pod_id]).create()
|
samuelhavron/heroku-buildpack-python
|
refs/heads/master
|
Python-3.4.3/Lib/_strptime.py
|
78
|
"""Strptime-related classes and functions.
CLASSES:
LocaleTime -- Discovers and stores locale-specific time information
TimeRE -- Creates regexes for pattern matching a string of text containing
time information
FUNCTIONS:
_getlang -- Figure out what language is being used for the locale
strptime -- Calculates the time struct represented by the passed-in string
"""
import time
import locale
import calendar
from re import compile as re_compile
from re import IGNORECASE
from re import escape as re_escape
from datetime import (date as datetime_date,
timedelta as datetime_timedelta,
timezone as datetime_timezone)
try:
from _thread import allocate_lock as _thread_allocate_lock
except ImportError:
from _dummy_thread import allocate_lock as _thread_allocate_lock
__all__ = []
def _getlang():
# Figure out what the current language is set to.
return locale.getlocale(locale.LC_TIME)
class LocaleTime(object):
"""Stores and handles locale-specific information related to time.
ATTRIBUTES:
f_weekday -- full weekday names (7-item list)
a_weekday -- abbreviated weekday names (7-item list)
f_month -- full month names (13-item list; dummy value in [0], which
is added by code)
a_month -- abbreviated month names (13-item list, dummy value in
[0], which is added by code)
am_pm -- AM/PM representation (2-item list)
LC_date_time -- format string for date/time representation (string)
LC_date -- format string for date representation (string)
LC_time -- format string for time representation (string)
timezone -- daylight- and non-daylight-savings timezone representation
(2-item list of sets)
lang -- Language used by instance (2-item tuple)
"""
def __init__(self):
"""Set all attributes.
Order of methods called matters for dependency reasons.
The locale language is set at the offset and then checked again before
exiting. This is to make sure that the attributes were not set with a
mix of information from more than one locale. This would most likely
happen when using threads where one thread calls a locale-dependent
function while another thread changes the locale while the function in
the other thread is still running. Proper coding would call for
locks to prevent changing the locale while locale-dependent code is
running. The check here is done in case someone does not think about
doing this.
Only other possible issue is if someone changed the timezone and did
not call tz.tzset . That is an issue for the programmer, though,
since changing the timezone is worthless without that call.
"""
self.lang = _getlang()
self.__calc_weekday()
self.__calc_month()
self.__calc_am_pm()
self.__calc_timezone()
self.__calc_date_time()
if _getlang() != self.lang:
raise ValueError("locale changed during initialization")
def __pad(self, seq, front):
# Add '' to seq to either the front (is True), else the back.
seq = list(seq)
if front:
seq.insert(0, '')
else:
seq.append('')
return seq
def __calc_weekday(self):
# Set self.a_weekday and self.f_weekday using the calendar
# module.
a_weekday = [calendar.day_abbr[i].lower() for i in range(7)]
f_weekday = [calendar.day_name[i].lower() for i in range(7)]
self.a_weekday = a_weekday
self.f_weekday = f_weekday
def __calc_month(self):
# Set self.f_month and self.a_month using the calendar module.
a_month = [calendar.month_abbr[i].lower() for i in range(13)]
f_month = [calendar.month_name[i].lower() for i in range(13)]
self.a_month = a_month
self.f_month = f_month
def __calc_am_pm(self):
# Set self.am_pm by using time.strftime().
# The magic date (1999,3,17,hour,44,55,2,76,0) is not really that
# magical; just happened to have used it everywhere else where a
# static date was needed.
am_pm = []
for hour in (1, 22):
time_tuple = time.struct_time((1999,3,17,hour,44,55,2,76,0))
am_pm.append(time.strftime("%p", time_tuple).lower())
self.am_pm = am_pm
def __calc_date_time(self):
# Set self.date_time, self.date, & self.time by using
# time.strftime().
# Use (1999,3,17,22,44,55,2,76,0) for magic date because the amount of
# overloaded numbers is minimized. The order in which searches for
# values within the format string is very important; it eliminates
# possible ambiguity for what something represents.
time_tuple = time.struct_time((1999,3,17,22,44,55,2,76,0))
date_time = [None, None, None]
date_time[0] = time.strftime("%c", time_tuple).lower()
date_time[1] = time.strftime("%x", time_tuple).lower()
date_time[2] = time.strftime("%X", time_tuple).lower()
replacement_pairs = [('%', '%%'), (self.f_weekday[2], '%A'),
(self.f_month[3], '%B'), (self.a_weekday[2], '%a'),
(self.a_month[3], '%b'), (self.am_pm[1], '%p'),
('1999', '%Y'), ('99', '%y'), ('22', '%H'),
('44', '%M'), ('55', '%S'), ('76', '%j'),
('17', '%d'), ('03', '%m'), ('3', '%m'),
# '3' needed for when no leading zero.
('2', '%w'), ('10', '%I')]
replacement_pairs.extend([(tz, "%Z") for tz_values in self.timezone
for tz in tz_values])
for offset,directive in ((0,'%c'), (1,'%x'), (2,'%X')):
current_format = date_time[offset]
for old, new in replacement_pairs:
# Must deal with possible lack of locale info
# manifesting itself as the empty string (e.g., Swedish's
# lack of AM/PM info) or a platform returning a tuple of empty
# strings (e.g., MacOS 9 having timezone as ('','')).
if old:
current_format = current_format.replace(old, new)
# If %W is used, then Sunday, 2005-01-03 will fall on week 0 since
# 2005-01-03 occurs before the first Monday of the year. Otherwise
# %U is used.
time_tuple = time.struct_time((1999,1,3,1,1,1,6,3,0))
if '00' in time.strftime(directive, time_tuple):
U_W = '%W'
else:
U_W = '%U'
date_time[offset] = current_format.replace('11', U_W)
self.LC_date_time = date_time[0]
self.LC_date = date_time[1]
self.LC_time = date_time[2]
def __calc_timezone(self):
# Set self.timezone by using time.tzname.
# Do not worry about possibility of time.tzname[0] == timetzname[1]
# and time.daylight; handle that in strptime .
try:
time.tzset()
except AttributeError:
pass
no_saving = frozenset(["utc", "gmt", time.tzname[0].lower()])
if time.daylight:
has_saving = frozenset([time.tzname[1].lower()])
else:
has_saving = frozenset()
self.timezone = (no_saving, has_saving)
class TimeRE(dict):
"""Handle conversion from format directives to regexes."""
def __init__(self, locale_time=None):
"""Create keys/values.
Order of execution is important for dependency reasons.
"""
if locale_time:
self.locale_time = locale_time
else:
self.locale_time = LocaleTime()
base = super()
base.__init__({
# The " \d" part of the regex is to make %c from ANSI C work
'd': r"(?P<d>3[0-1]|[1-2]\d|0[1-9]|[1-9]| [1-9])",
'f': r"(?P<f>[0-9]{1,6})",
'H': r"(?P<H>2[0-3]|[0-1]\d|\d)",
'I': r"(?P<I>1[0-2]|0[1-9]|[1-9])",
'j': r"(?P<j>36[0-6]|3[0-5]\d|[1-2]\d\d|0[1-9]\d|00[1-9]|[1-9]\d|0[1-9]|[1-9])",
'm': r"(?P<m>1[0-2]|0[1-9]|[1-9])",
'M': r"(?P<M>[0-5]\d|\d)",
'S': r"(?P<S>6[0-1]|[0-5]\d|\d)",
'U': r"(?P<U>5[0-3]|[0-4]\d|\d)",
'w': r"(?P<w>[0-6])",
# W is set below by using 'U'
'y': r"(?P<y>\d\d)",
#XXX: Does 'Y' need to worry about having less or more than
# 4 digits?
'Y': r"(?P<Y>\d\d\d\d)",
'z': r"(?P<z>[+-]\d\d[0-5]\d)",
'A': self.__seqToRE(self.locale_time.f_weekday, 'A'),
'a': self.__seqToRE(self.locale_time.a_weekday, 'a'),
'B': self.__seqToRE(self.locale_time.f_month[1:], 'B'),
'b': self.__seqToRE(self.locale_time.a_month[1:], 'b'),
'p': self.__seqToRE(self.locale_time.am_pm, 'p'),
'Z': self.__seqToRE((tz for tz_names in self.locale_time.timezone
for tz in tz_names),
'Z'),
'%': '%'})
base.__setitem__('W', base.__getitem__('U').replace('U', 'W'))
base.__setitem__('c', self.pattern(self.locale_time.LC_date_time))
base.__setitem__('x', self.pattern(self.locale_time.LC_date))
base.__setitem__('X', self.pattern(self.locale_time.LC_time))
def __seqToRE(self, to_convert, directive):
"""Convert a list to a regex string for matching a directive.
Want possible matching values to be from longest to shortest. This
prevents the possibility of a match occurring for a value that also
a substring of a larger value that should have matched (e.g., 'abc'
matching when 'abcdef' should have been the match).
"""
to_convert = sorted(to_convert, key=len, reverse=True)
for value in to_convert:
if value != '':
break
else:
return ''
regex = '|'.join(re_escape(stuff) for stuff in to_convert)
regex = '(?P<%s>%s' % (directive, regex)
return '%s)' % regex
def pattern(self, format):
"""Return regex pattern for the format string.
Need to make sure that any characters that might be interpreted as
regex syntax are escaped.
"""
processed_format = ''
# The sub() call escapes all characters that might be misconstrued
# as regex syntax. Cannot use re.escape since we have to deal with
# format directives (%m, etc.).
regex_chars = re_compile(r"([\\.^$*+?\(\){}\[\]|])")
format = regex_chars.sub(r"\\\1", format)
whitespace_replacement = re_compile('\s+')
format = whitespace_replacement.sub('\s+', format)
while '%' in format:
directive_index = format.index('%')+1
processed_format = "%s%s%s" % (processed_format,
format[:directive_index-1],
self[format[directive_index]])
format = format[directive_index+1:]
return "%s%s" % (processed_format, format)
def compile(self, format):
"""Return a compiled re object for the format string."""
return re_compile(self.pattern(format), IGNORECASE)
_cache_lock = _thread_allocate_lock()
# DO NOT modify _TimeRE_cache or _regex_cache without acquiring the cache lock
# first!
_TimeRE_cache = TimeRE()
_CACHE_MAX_SIZE = 5 # Max number of regexes stored in _regex_cache
_regex_cache = {}
def _calc_julian_from_U_or_W(year, week_of_year, day_of_week, week_starts_Mon):
"""Calculate the Julian day based on the year, week of the year, and day of
the week, with week_start_day representing whether the week of the year
assumes the week starts on Sunday or Monday (6 or 0)."""
first_weekday = datetime_date(year, 1, 1).weekday()
# If we are dealing with the %U directive (week starts on Sunday), it's
# easier to just shift the view to Sunday being the first day of the
# week.
if not week_starts_Mon:
first_weekday = (first_weekday + 1) % 7
day_of_week = (day_of_week + 1) % 7
# Need to watch out for a week 0 (when the first day of the year is not
# the same as that specified by %U or %W).
week_0_length = (7 - first_weekday) % 7
if week_of_year == 0:
return 1 + day_of_week - first_weekday
else:
days_to_week = week_0_length + (7 * (week_of_year - 1))
return 1 + days_to_week + day_of_week
def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a 2-tuple consisting of a time struct and an int containing
the number of microseconds based on the input string and the
format string."""
for index, arg in enumerate([data_string, format]):
if not isinstance(arg, str):
msg = "strptime() argument {} must be str, not {}"
raise TypeError(msg.format(index, type(arg)))
global _TimeRE_cache, _regex_cache
with _cache_lock:
if _getlang() != _TimeRE_cache.locale_time.lang:
_TimeRE_cache = TimeRE()
_regex_cache.clear()
if len(_regex_cache) > _CACHE_MAX_SIZE:
_regex_cache.clear()
locale_time = _TimeRE_cache.locale_time
format_regex = _regex_cache.get(format)
if not format_regex:
try:
format_regex = _TimeRE_cache.compile(format)
# KeyError raised when a bad format is found; can be specified as
# \\, in which case it was a stray % but with a space after it
except KeyError as err:
bad_directive = err.args[0]
if bad_directive == "\\":
bad_directive = "%"
del err
raise ValueError("'%s' is a bad directive in format '%s'" %
(bad_directive, format)) from None
# IndexError only occurs when the format string is "%"
except IndexError:
raise ValueError("stray %% in format '%s'" % format) from None
_regex_cache[format] = format_regex
found = format_regex.match(data_string)
if not found:
raise ValueError("time data %r does not match format %r" %
(data_string, format))
if len(data_string) != found.end():
raise ValueError("unconverted data remains: %s" %
data_string[found.end():])
year = None
month = day = 1
hour = minute = second = fraction = 0
tz = -1
tzoffset = None
# Default to -1 to signify that values not known; not critical to have,
# though
week_of_year = -1
week_of_year_start = -1
# weekday and julian defaulted to -1 so as to signal need to calculate
# values
weekday = julian = -1
found_dict = found.groupdict()
for group_key in found_dict.keys():
# Directives not explicitly handled below:
# c, x, X
# handled by making out of other directives
# U, W
# worthless without day of the week
if group_key == 'y':
year = int(found_dict['y'])
# Open Group specification for strptime() states that a %y
#value in the range of [00, 68] is in the century 2000, while
#[69,99] is in the century 1900
if year <= 68:
year += 2000
else:
year += 1900
elif group_key == 'Y':
year = int(found_dict['Y'])
elif group_key == 'm':
month = int(found_dict['m'])
elif group_key == 'B':
month = locale_time.f_month.index(found_dict['B'].lower())
elif group_key == 'b':
month = locale_time.a_month.index(found_dict['b'].lower())
elif group_key == 'd':
day = int(found_dict['d'])
elif group_key == 'H':
hour = int(found_dict['H'])
elif group_key == 'I':
hour = int(found_dict['I'])
ampm = found_dict.get('p', '').lower()
# If there was no AM/PM indicator, we'll treat this like AM
if ampm in ('', locale_time.am_pm[0]):
# We're in AM so the hour is correct unless we're
# looking at 12 midnight.
# 12 midnight == 12 AM == hour 0
if hour == 12:
hour = 0
elif ampm == locale_time.am_pm[1]:
# We're in PM so we need to add 12 to the hour unless
# we're looking at 12 noon.
# 12 noon == 12 PM == hour 12
if hour != 12:
hour += 12
elif group_key == 'M':
minute = int(found_dict['M'])
elif group_key == 'S':
second = int(found_dict['S'])
elif group_key == 'f':
s = found_dict['f']
# Pad to always return microseconds.
s += "0" * (6 - len(s))
fraction = int(s)
elif group_key == 'A':
weekday = locale_time.f_weekday.index(found_dict['A'].lower())
elif group_key == 'a':
weekday = locale_time.a_weekday.index(found_dict['a'].lower())
elif group_key == 'w':
weekday = int(found_dict['w'])
if weekday == 0:
weekday = 6
else:
weekday -= 1
elif group_key == 'j':
julian = int(found_dict['j'])
elif group_key in ('U', 'W'):
week_of_year = int(found_dict[group_key])
if group_key == 'U':
# U starts week on Sunday.
week_of_year_start = 6
else:
# W starts week on Monday.
week_of_year_start = 0
elif group_key == 'z':
z = found_dict['z']
tzoffset = int(z[1:3]) * 60 + int(z[3:5])
if z.startswith("-"):
tzoffset = -tzoffset
elif group_key == 'Z':
# Since -1 is default value only need to worry about setting tz if
# it can be something other than -1.
found_zone = found_dict['Z'].lower()
for value, tz_values in enumerate(locale_time.timezone):
if found_zone in tz_values:
# Deal with bad locale setup where timezone names are the
# same and yet time.daylight is true; too ambiguous to
# be able to tell what timezone has daylight savings
if (time.tzname[0] == time.tzname[1] and
time.daylight and found_zone not in ("utc", "gmt")):
break
else:
tz = value
break
leap_year_fix = False
if year is None and month == 2 and day == 29:
year = 1904 # 1904 is first leap year of 20th century
leap_year_fix = True
elif year is None:
year = 1900
# If we know the week of the year and what day of that week, we can figure
# out the Julian day of the year.
if julian == -1 and week_of_year != -1 and weekday != -1:
week_starts_Mon = True if week_of_year_start == 0 else False
julian = _calc_julian_from_U_or_W(year, week_of_year, weekday,
week_starts_Mon)
# Cannot pre-calculate datetime_date() since can change in Julian
# calculation and thus could have different value for the day of the week
# calculation.
if julian == -1:
# Need to add 1 to result since first day of the year is 1, not 0.
julian = datetime_date(year, month, day).toordinal() - \
datetime_date(year, 1, 1).toordinal() + 1
else: # Assume that if they bothered to include Julian day it will
# be accurate.
datetime_result = datetime_date.fromordinal((julian - 1) + datetime_date(year, 1, 1).toordinal())
year = datetime_result.year
month = datetime_result.month
day = datetime_result.day
if weekday == -1:
weekday = datetime_date(year, month, day).weekday()
# Add timezone info
tzname = found_dict.get("Z")
if tzoffset is not None:
gmtoff = tzoffset * 60
else:
gmtoff = None
if leap_year_fix:
# the caller didn't supply a year but asked for Feb 29th. We couldn't
# use the default of 1900 for computations. We set it back to ensure
# that February 29th is smaller than March 1st.
year = 1900
return (year, month, day,
hour, minute, second,
weekday, julian, tz, tzname, gmtoff), fraction
def _strptime_time(data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a time struct based on the input string and the
format string."""
tt = _strptime(data_string, format)[0]
return time.struct_time(tt[:time._STRUCT_TM_ITEMS])
def _strptime_datetime(cls, data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a class cls instance based on the input string and the
format string."""
tt, fraction = _strptime(data_string, format)
tzname, gmtoff = tt[-2:]
args = tt[:6] + (fraction,)
if gmtoff is not None:
tzdelta = datetime_timedelta(seconds=gmtoff)
if tzname:
tz = datetime_timezone(tzdelta, tzname)
else:
tz = datetime_timezone(tzdelta)
args += (tz,)
return cls(*args)
|
jkvas/eudaq
|
refs/heads/calice_ahcal_testbeam2018May
|
legacy/producers/explorer/srs-software/slow_control/read_counters.py
|
13
|
#! /usr/bin/env python
##
## code testing of class SlowControl
##
import SlowControl # slow control code
dHLVDS = SlowControl.SlowControl(0)
dADC = SlowControl.SlowControl(1)
SlowControl.read_burst(dHLVDS, 6039, 26, 4, False)
SlowControl.read_burst(dADC, 6039, 10, 4, False)
quit()
|
fmaguire/BayeHem
|
refs/heads/master
|
bayehem/rsem/detonate-1.11/ref-eval/boost/tools/build/v2/test/unit_test.py
|
44
|
#!/usr/bin/python
# Copyright 2003, 2004 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Test the unit_test rule.
import BoostBuild
t = BoostBuild.Tester(use_test_config=False)
# Create the needed files.
t.write("jamroot.jam", """
using testing ;
lib helper : helper.cpp ;
unit-test test : test.cpp : <library>helper ;
""")
t.write("test.cpp", """
void helper();
int main() { helper(); }
""")
t.write("helper.cpp", """
void
#if defined(_WIN32)
__declspec(dllexport)
#endif
helper() {}
""")
t.run_build_system(["link=static"])
t.expect_addition("bin/$toolset/debug/link-static/test.passed")
t.cleanup()
|
StefanRijnhart/server-tools
|
refs/heads/7.0
|
fetchmail_attach_from_folder/match_algorithm/email_domain.py
|
54
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2013 Therp BV (<http://therp.nl>)
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from .email_exact import email_exact
class email_domain(email_exact):
'''Search objects by domain name of email address.
Beware of match_first here, this is most likely to get it wrong (gmail)'''
name = 'Domain of email address'
def search_matches(self, cr, uid, conf, mail_message, mail_message_org):
ids = super(email_domain, self).search_matches(
cr, uid, conf, mail_message, mail_message_org)
if not ids:
domains = []
for addr in self._get_mailaddresses(conf, mail_message):
domains.append(addr.split('@')[-1])
ids = conf.pool.get(conf.model_id.model).search(
cr, uid,
self._get_mailaddress_search_domain(
conf, mail_message,
operator='like',
values=['%@' + domain for domain in set(domains)]),
order=conf.model_order)
return ids
|
aelarabawy/hostap
|
refs/heads/master
|
tests/hwsim/vm/parallel-vm.py
|
1
|
#!/usr/bin/env python2
#
# Parallel VM test case executor
# Copyright (c) 2014, Jouni Malinen <j@w1.fi>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import curses
import fcntl
import os
import subprocess
import sys
import time
def get_results():
global vm
started = []
passed = []
failed = []
skipped = []
for i in range(0, num_servers):
lines = vm[i]['out'].splitlines()
started += [ l for l in lines if l.startswith('START ') ]
passed += [ l for l in lines if l.startswith('PASS ') ]
failed += [ l for l in lines if l.startswith('FAIL ') ]
skipped += [ l for l in lines if l.startswith('SKIP ') ]
return (started, passed, failed, skipped)
def show_progress(scr):
global num_servers
global vm
global dir
global timestamp
global tests
total_tests = len(tests)
scr.leaveok(1)
scr.addstr(0, 0, "Parallel test execution status", curses.A_BOLD)
for i in range(0, num_servers):
scr.addstr(i + 1, 0, "VM %d:" % (i + 1), curses.A_BOLD)
scr.addstr(i + 1, 10, "starting VM")
scr.addstr(num_servers + 1, 0, "Total:", curses.A_BOLD)
scr.addstr(num_servers + 1, 20, "TOTAL={} STARTED=0 PASS=0 FAIL=0 SKIP=0".format(total_tests))
scr.refresh()
while True:
running = False
updated = False
for i in range(0, num_servers):
if not vm[i]['proc']:
continue
if vm[i]['proc'].poll() is not None:
vm[i]['proc'] = None
scr.move(i + 1, 10)
scr.clrtoeol()
log = '{}/{}.srv.{}/console'.format(dir, timestamp, i + 1)
with open(log, 'r') as f:
if "Kernel panic" in f.read():
scr.addstr("kernel panic")
else:
scr.addstr("completed run")
updated = True
continue
running = True
try:
err = vm[i]['proc'].stderr.read()
vm[i]['err'] += err
except:
pass
try:
out = vm[i]['proc'].stdout.read()
if "READY" in out or "PASS" in out or "FAIL" in out or "SKIP" in out:
if not tests:
vm[i]['proc'].stdin.write('\n')
else:
name = tests.pop(0)
vm[i]['proc'].stdin.write(name + '\n')
except:
continue
#print("VM {}: '{}'".format(i, out))
vm[i]['out'] += out
lines = vm[i]['out'].splitlines()
last = [ l for l in lines if l.startswith('START ') ]
if len(last) > 0:
try:
info = last[-1].split(' ')
scr.move(i + 1, 10)
scr.clrtoeol()
scr.addstr(info[1])
updated = True
except:
pass
if not running:
break
if updated:
(started, passed, failed, skipped) = get_results()
scr.move(num_servers + 1, 10)
scr.clrtoeol()
scr.addstr("{} %".format(int(100.0 * (len(passed) + len(failed) + len(skipped)) / total_tests)))
scr.addstr(num_servers + 1, 20, "TOTAL={} STARTED={} PASS={} FAIL={} SKIP={}".format(total_tests, len(started), len(passed), len(failed), len(skipped)))
if len(failed) > 0:
scr.move(num_servers + 2, 0)
scr.clrtoeol()
scr.addstr("Failed test cases: ")
for f in failed:
scr.addstr(f.split(' ')[1])
scr.addstr(' ')
scr.refresh()
time.sleep(0.5)
scr.refresh()
time.sleep(0.3)
def main():
global num_servers
global vm
global dir
global timestamp
global tests
if len(sys.argv) < 2:
sys.exit("Usage: %s <number of VMs> [params..]" % sys.argv[0])
num_servers = int(sys.argv[1])
if num_servers < 1:
sys.exit("Too small number of VMs")
tests = []
cmd = [ '../run-tests.py', '-L' ] + sys.argv[2:]
lst = subprocess.Popen(cmd, stdout=subprocess.PIPE)
for l in lst.stdout.readlines():
name = l.split(' ')[0]
tests.append(name)
if len(tests) == 0:
sys.exit("No test cases selected")
dir = '/tmp/hwsim-test-logs'
try:
os.mkdir(dir)
except:
pass
timestamp = int(time.time())
vm = {}
for i in range(0, num_servers):
print("\rStarting virtual machine {}/{}".format(i + 1, num_servers)),
cmd = ['./vm-run.sh', '--timestamp', str(timestamp),
'--ext', 'srv.%d' % (i + 1),
'-i'] + sys.argv[2:]
vm[i] = {}
vm[i]['proc'] = subprocess.Popen(cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
vm[i]['out'] = ""
vm[i]['err'] = ""
for stream in [ vm[i]['proc'].stdout, vm[i]['proc'].stderr ]:
fd = stream.fileno()
fl = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
print
curses.wrapper(show_progress)
with open('{}/{}-parallel.log'.format(dir, timestamp), 'w') as f:
for i in range(0, num_servers):
f.write('VM {}\n{}\n{}\n'.format(i, vm[i]['out'], vm[i]['err']))
(started, passed, failed, skipped) = get_results()
if len(failed) > 0:
print "Failed test cases:"
for f in failed:
print f.split(' ')[1],
print
print("TOTAL={} PASS={} FAIL={} SKIP={}".format(len(started), len(passed), len(failed), len(skipped)))
print "Logs: " + dir + '/' + str(timestamp)
for i in range(0, num_servers):
log = '{}/{}.srv.{}/console'.format(dir, timestamp, i + 1)
with open(log, 'r') as f:
if "Kernel panic" in f.read():
print "Kernel panic in " + log
if __name__ == "__main__":
main()
|
caktus/django-treenav
|
refs/heads/develop
|
treenav/urls.py
|
1
|
from django.urls import path
from .views import treenav_undefined_url
urlpatterns = [
path("item/<slug:item_slug>/", treenav_undefined_url, name="treenav_undefined_url"),
]
|
ahmedaljazzar/edx-platform
|
refs/heads/master
|
common/djangoapps/microsite_configuration/backends/database.py
|
23
|
"""
Microsite backend that reads the configuration from the database
"""
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from mako.template import Template
from microsite_configuration.backends.base import BaseMicrositeBackend, BaseMicrositeTemplateBackend
from microsite_configuration.microsite import get_value as microsite_get_value
from microsite_configuration.models import Microsite, MicrositeOrganizationMapping, MicrositeTemplate
from util.cache import cache
from util.memcache import fasthash
from util.url import strip_port_from_host
class DatabaseMicrositeBackend(BaseMicrositeBackend):
"""
Microsite backend that reads the microsites definitions
from a table in the database according to the models.py file
This backend would allow us to save microsite configurations
into database and load them in local storage when HTTRequest
is originated from microsite.
E.g. we have setup a microsite with key `monster-university-academy` and
We would have a DB entry like this in table created by Microsite model.
key = monster-university-academy
subdomain = mua.edx.org
values = {
"platform_name": "Monster University Academy".
"course_org_filter: "MonsterX"
}
While using DatabaseMicrositeBackend any request coming from mua.edx.org
would get microsite configurations from `values` column.
"""
def has_configuration_set(self):
"""
Returns whether there is any Microsite configuration settings
"""
if Microsite.objects.all()[:1].exists():
return True
else:
return False
def set_config_by_domain(self, domain):
"""
For a given request domain, find a match in our microsite configuration
and then assign it to the thread local in order to make it available
to the complete Django request processing
"""
if not self.has_configuration_set() or not domain:
return
# look up based on the HTTP request domain name
# this will need to be a full domain name match,
# not a 'startswith' match
microsite = Microsite.get_microsite_for_domain(domain)
if not microsite:
# if no match, then try to find a 'default' key in Microsites
try:
microsite = Microsite.objects.get(key='default')
except Microsite.DoesNotExist:
pass
if microsite:
# if we have a match, then set up the microsite thread local
# data
self._set_microsite_config_from_obj(microsite.site.domain, domain, microsite)
def get_all_config(self):
"""
This returns all configuration for all microsites
"""
config = {}
candidates = Microsite.objects.all()
for microsite in candidates:
values = microsite.values
config[microsite.key] = values
return config
def get_value_for_org(self, org, val_name, default=None):
"""
This returns a configuration value for a microsite which has an org_filter that matches
what is passed in
"""
microsite = MicrositeOrganizationMapping.get_microsite_for_organization(org)
if not microsite:
return default
# cdodge: This approach will not leverage any caching, although I think only Studio calls
# this
config = microsite.values
return config.get(val_name, default)
def get_all_orgs(self):
"""
This returns a set of orgs that are considered within a microsite. This can be used,
for example, to do filtering
"""
# This should be cacheable (via memcache to keep consistent across a cluster)
# I believe this is called on the dashboard and catalog pages, so it'd be good to optimize
return set(MicrositeOrganizationMapping.objects.all().values_list('organization', flat=True))
def _set_microsite_config_from_obj(self, subdomain, domain, microsite_object):
"""
Helper internal method to actually find the microsite configuration
"""
config = microsite_object.values
config['subdomain'] = strip_port_from_host(subdomain)
config['site_domain'] = strip_port_from_host(domain)
config['microsite_config_key'] = microsite_object.key
# we take the list of ORGs associated with this microsite from the database mapping
# tables. NOTE, for now, we assume one ORG per microsite
organizations = microsite_object.get_organizations()
# we must have at least one ORG defined
if not organizations:
raise Exception(
'Configuration error. Microsite {key} does not have any ORGs mapped to it!'.format(
key=microsite_object.key
)
)
# just take the first one for now, we'll have to change the upstream logic to allow
# for more than one ORG binding
config['course_org_filter'] = organizations[0]
self.current_request_configuration.data = config
class DatabaseMicrositeTemplateBackend(BaseMicrositeTemplateBackend):
"""
Specialized class to pull templates from the database.
This Backend would allow us to save templates in DB and pull
them from there when required for a specific microsite.
This backend can be enabled by `MICROSITE_TEMPLATE_BACKEND` setting.
E.g. we have setup a microsite for subdomain `mua.edx.org` and
We have a DB entry like this in table created by MicrositeTemplate model.
microsite = Key for microsite(mua.edx.org)
template_uri = about.html
template = <html><body>Template from DB</body></html>
While using DatabaseMicrositeTemplateBackend any request coming from mua.edx.org/about.html
would get about.html template from DB and response would be the value of `template` column.
"""
def get_template_path(self, relative_path, **kwargs):
return relative_path
def get_template(self, uri):
"""
Override of the base class for us to look into the
database tables for a template definition, if we can't find
one we'll return None which means "use default means" (aka filesystem)
"""
cache_key = "template_cache." + fasthash(microsite_get_value('site_domain') + '.' + uri)
template_text = cache.get(cache_key) # pylint: disable=maybe-no-member
if not template_text:
# cache is empty so pull template from DB and fill cache.
template_obj = MicrositeTemplate.get_template_for_microsite(
microsite_get_value('site_domain'),
uri
)
if not template_obj:
# We need to set something in the cache to improve performance
# of the templates stored in the filesystem as well
cache.set( # pylint: disable=maybe-no-member
cache_key, '##none', settings.MICROSITE_DATABASE_TEMPLATE_CACHE_TTL
)
return None
template_text = template_obj.template
cache.set( # pylint: disable=maybe-no-member
cache_key, template_text, settings.MICROSITE_DATABASE_TEMPLATE_CACHE_TTL
)
if template_text == '##none':
return None
return Template(
text=template_text
)
@staticmethod
@receiver(post_save, sender=MicrositeTemplate)
def clear_cache(sender, instance, **kwargs): # pylint: disable=unused-argument
"""
Clear the cached template when the model is saved
"""
cache_key = "template_cache." + fasthash(instance.microsite.site.domain + '.' + instance.template_uri)
cache.delete(cache_key) # pylint: disable=maybe-no-member
|
omakk/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/tools/pywebsocket/src/mod_pywebsocket/util.py
|
497
|
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""WebSocket utilities.
"""
import array
import errno
# Import hash classes from a module available and recommended for each Python
# version and re-export those symbol. Use sha and md5 module in Python 2.4, and
# hashlib module in Python 2.6.
try:
import hashlib
md5_hash = hashlib.md5
sha1_hash = hashlib.sha1
except ImportError:
import md5
import sha
md5_hash = md5.md5
sha1_hash = sha.sha
import StringIO
import logging
import os
import re
import socket
import traceback
import zlib
try:
from mod_pywebsocket import fast_masking
except ImportError:
pass
def get_stack_trace():
"""Get the current stack trace as string.
This is needed to support Python 2.3.
TODO: Remove this when we only support Python 2.4 and above.
Use traceback.format_exc instead.
"""
out = StringIO.StringIO()
traceback.print_exc(file=out)
return out.getvalue()
def prepend_message_to_exception(message, exc):
"""Prepend message to the exception."""
exc.args = (message + str(exc),)
return
def __translate_interp(interp, cygwin_path):
"""Translate interp program path for Win32 python to run cygwin program
(e.g. perl). Note that it doesn't support path that contains space,
which is typically true for Unix, where #!-script is written.
For Win32 python, cygwin_path is a directory of cygwin binaries.
Args:
interp: interp command line
cygwin_path: directory name of cygwin binary, or None
Returns:
translated interp command line.
"""
if not cygwin_path:
return interp
m = re.match('^[^ ]*/([^ ]+)( .*)?', interp)
if m:
cmd = os.path.join(cygwin_path, m.group(1))
return cmd + m.group(2)
return interp
def get_script_interp(script_path, cygwin_path=None):
"""Gets #!-interpreter command line from the script.
It also fixes command path. When Cygwin Python is used, e.g. in WebKit,
it could run "/usr/bin/perl -wT hello.pl".
When Win32 Python is used, e.g. in Chromium, it couldn't. So, fix
"/usr/bin/perl" to "<cygwin_path>\perl.exe".
Args:
script_path: pathname of the script
cygwin_path: directory name of cygwin binary, or None
Returns:
#!-interpreter command line, or None if it is not #!-script.
"""
fp = open(script_path)
line = fp.readline()
fp.close()
m = re.match('^#!(.*)', line)
if m:
return __translate_interp(m.group(1), cygwin_path)
return None
def wrap_popen3_for_win(cygwin_path):
"""Wrap popen3 to support #!-script on Windows.
Args:
cygwin_path: path for cygwin binary if command path is needed to be
translated. None if no translation required.
"""
__orig_popen3 = os.popen3
def __wrap_popen3(cmd, mode='t', bufsize=-1):
cmdline = cmd.split(' ')
interp = get_script_interp(cmdline[0], cygwin_path)
if interp:
cmd = interp + ' ' + cmd
return __orig_popen3(cmd, mode, bufsize)
os.popen3 = __wrap_popen3
def hexify(s):
return ' '.join(map(lambda x: '%02x' % ord(x), s))
def get_class_logger(o):
return logging.getLogger(
'%s.%s' % (o.__class__.__module__, o.__class__.__name__))
class NoopMasker(object):
"""A masking object that has the same interface as RepeatedXorMasker but
just returns the string passed in without making any change.
"""
def __init__(self):
pass
def mask(self, s):
return s
class RepeatedXorMasker(object):
"""A masking object that applies XOR on the string given to mask method
with the masking bytes given to the constructor repeatedly. This object
remembers the position in the masking bytes the last mask method call
ended and resumes from that point on the next mask method call.
"""
def __init__(self, masking_key):
self._masking_key = masking_key
self._masking_key_index = 0
def _mask_using_swig(self, s):
masked_data = fast_masking.mask(
s, self._masking_key, self._masking_key_index)
self._masking_key_index = (
(self._masking_key_index + len(s)) % len(self._masking_key))
return masked_data
def _mask_using_array(self, s):
result = array.array('B')
result.fromstring(s)
# Use temporary local variables to eliminate the cost to access
# attributes
masking_key = map(ord, self._masking_key)
masking_key_size = len(masking_key)
masking_key_index = self._masking_key_index
for i in xrange(len(result)):
result[i] ^= masking_key[masking_key_index]
masking_key_index = (masking_key_index + 1) % masking_key_size
self._masking_key_index = masking_key_index
return result.tostring()
if 'fast_masking' in globals():
mask = _mask_using_swig
else:
mask = _mask_using_array
# By making wbits option negative, we can suppress CMF/FLG (2 octet) and
# ADLER32 (4 octet) fields of zlib so that we can use zlib module just as
# deflate library. DICTID won't be added as far as we don't set dictionary.
# LZ77 window of 32K will be used for both compression and decompression.
# For decompression, we can just use 32K to cover any windows size. For
# compression, we use 32K so receivers must use 32K.
#
# Compression level is Z_DEFAULT_COMPRESSION. We don't have to match level
# to decode.
#
# See zconf.h, deflate.cc, inflate.cc of zlib library, and zlibmodule.c of
# Python. See also RFC1950 (ZLIB 3.3).
class _Deflater(object):
def __init__(self, window_bits):
self._logger = get_class_logger(self)
self._compress = zlib.compressobj(
zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -window_bits)
def compress(self, bytes):
compressed_bytes = self._compress.compress(bytes)
self._logger.debug('Compress input %r', bytes)
self._logger.debug('Compress result %r', compressed_bytes)
return compressed_bytes
def compress_and_flush(self, bytes):
compressed_bytes = self._compress.compress(bytes)
compressed_bytes += self._compress.flush(zlib.Z_SYNC_FLUSH)
self._logger.debug('Compress input %r', bytes)
self._logger.debug('Compress result %r', compressed_bytes)
return compressed_bytes
def compress_and_finish(self, bytes):
compressed_bytes = self._compress.compress(bytes)
compressed_bytes += self._compress.flush(zlib.Z_FINISH)
self._logger.debug('Compress input %r', bytes)
self._logger.debug('Compress result %r', compressed_bytes)
return compressed_bytes
class _Inflater(object):
def __init__(self, window_bits):
self._logger = get_class_logger(self)
self._window_bits = window_bits
self._unconsumed = ''
self.reset()
def decompress(self, size):
if not (size == -1 or size > 0):
raise Exception('size must be -1 or positive')
data = ''
while True:
if size == -1:
data += self._decompress.decompress(self._unconsumed)
# See Python bug http://bugs.python.org/issue12050 to
# understand why the same code cannot be used for updating
# self._unconsumed for here and else block.
self._unconsumed = ''
else:
data += self._decompress.decompress(
self._unconsumed, size - len(data))
self._unconsumed = self._decompress.unconsumed_tail
if self._decompress.unused_data:
# Encountered a last block (i.e. a block with BFINAL = 1) and
# found a new stream (unused_data). We cannot use the same
# zlib.Decompress object for the new stream. Create a new
# Decompress object to decompress the new one.
#
# It's fine to ignore unconsumed_tail if unused_data is not
# empty.
self._unconsumed = self._decompress.unused_data
self.reset()
if size >= 0 and len(data) == size:
# data is filled. Don't call decompress again.
break
else:
# Re-invoke Decompress.decompress to try to decompress all
# available bytes before invoking read which blocks until
# any new byte is available.
continue
else:
# Here, since unused_data is empty, even if unconsumed_tail is
# not empty, bytes of requested length are already in data. We
# don't have to "continue" here.
break
if data:
self._logger.debug('Decompressed %r', data)
return data
def append(self, data):
self._logger.debug('Appended %r', data)
self._unconsumed += data
def reset(self):
self._logger.debug('Reset')
self._decompress = zlib.decompressobj(-self._window_bits)
# Compresses/decompresses given octets using the method introduced in RFC1979.
class _RFC1979Deflater(object):
"""A compressor class that applies DEFLATE to given byte sequence and
flushes using the algorithm described in the RFC1979 section 2.1.
"""
def __init__(self, window_bits, no_context_takeover):
self._deflater = None
if window_bits is None:
window_bits = zlib.MAX_WBITS
self._window_bits = window_bits
self._no_context_takeover = no_context_takeover
def filter(self, bytes, end=True, bfinal=False):
if self._deflater is None:
self._deflater = _Deflater(self._window_bits)
if bfinal:
result = self._deflater.compress_and_finish(bytes)
# Add a padding block with BFINAL = 0 and BTYPE = 0.
result = result + chr(0)
self._deflater = None
return result
result = self._deflater.compress_and_flush(bytes)
if end:
# Strip last 4 octets which is LEN and NLEN field of a
# non-compressed block added for Z_SYNC_FLUSH.
result = result[:-4]
if self._no_context_takeover and end:
self._deflater = None
return result
class _RFC1979Inflater(object):
"""A decompressor class for byte sequence compressed and flushed following
the algorithm described in the RFC1979 section 2.1.
"""
def __init__(self, window_bits=zlib.MAX_WBITS):
self._inflater = _Inflater(window_bits)
def filter(self, bytes):
# Restore stripped LEN and NLEN field of a non-compressed block added
# for Z_SYNC_FLUSH.
self._inflater.append(bytes + '\x00\x00\xff\xff')
return self._inflater.decompress(-1)
class DeflateSocket(object):
"""A wrapper class for socket object to intercept send and recv to perform
deflate compression and decompression transparently.
"""
# Size of the buffer passed to recv to receive compressed data.
_RECV_SIZE = 4096
def __init__(self, socket):
self._socket = socket
self._logger = get_class_logger(self)
self._deflater = _Deflater(zlib.MAX_WBITS)
self._inflater = _Inflater(zlib.MAX_WBITS)
def recv(self, size):
"""Receives data from the socket specified on the construction up
to the specified size. Once any data is available, returns it even
if it's smaller than the specified size.
"""
# TODO(tyoshino): Allow call with size=0. It should block until any
# decompressed data is available.
if size <= 0:
raise Exception('Non-positive size passed')
while True:
data = self._inflater.decompress(size)
if len(data) != 0:
return data
read_data = self._socket.recv(DeflateSocket._RECV_SIZE)
if not read_data:
return ''
self._inflater.append(read_data)
def sendall(self, bytes):
self.send(bytes)
def send(self, bytes):
self._socket.sendall(self._deflater.compress_and_flush(bytes))
return len(bytes)
# vi:sts=4 sw=4 et
|
pastephens/pysal
|
refs/heads/master
|
pysal/examples/__init__.py
|
10
|
import os
import pysal
__all__ = ['get_path']
base = os.path.split(pysal.__file__)[0]
example_dir = os.path.join(base, "examples")
dirs = next(os.walk(example_dir))[1]
file_2_dir = {}
for d in dirs:
tmp = os.path.join(example_dir, d)
files_in_tmp = os.listdir(tmp)
for f in files_in_tmp:
file_2_dir[f] = tmp
def get_path(example_name):
"""
Get path of PySAL or example folders
"""
if type(example_name) != str:
try:
example_name = str(example_name)
except:
raise KeyError('Cannot coerce requested example name to string')
if example_name in dirs:
return os.path.join(example_dir, example_name, example_name)
elif example_name in file_2_dir:
d = file_2_dir[example_name]
return os.path.join(d, example_name)
elif example_name == "":
return os.path.join(base, 'examples', example_name)
else:
raise KeyError(example_name + ' not found in PySAL built-in examples.')
def available(verbose=False):
"""
List available datasets in pysal.examples
"""
base = get_path('')
examples = [os.path.join(get_path(''), d) for d in os.listdir(base)]
examples = [d for d in examples if os.path.isdir(d) and '__' not in d]
if not verbose:
return [os.path.split(d)[-1] for d in examples]
examples = [os.path.join(dty, 'README.md') for dty in examples]
descs = [_read_example(path) for path in examples]
return [{desc['name']:desc['description'] for desc in descs}]
def _read_example(pth):
try:
with open(pth, 'r') as io:
title = io.readline().strip('\n')
io.readline() # titling
io.readline() # pad
short = io.readline().strip('\n')
io.readline() # subtitling
io.readline() # pad
rest = io.readlines()
rest = [l.strip('\n') for l in rest if l.strip('\n') != '']
d = {'name': title, 'description': short, 'explanation': rest}
except IOError:
basename = os.path.split(pth)[-2]
dirname = os.path.split(basename)[-1]
d = {'name': dirname, 'description': None, 'explanation': None}
return d
def explain(name): # would be nice to use pandas for display here
"""
Explain a dataset by name
"""
path = os.path.split(pysal.examples.get_path(name))[0]
fpath = os.path.join(path, 'README.md')
return _read_example(fpath)
|
wking/pycalendar
|
refs/heads/master
|
pycalendar/property/timezone.py
|
1
|
## RFC 5545, section 3.8.3 (Time Zone Component Properties)
### RFC 5545, section 3.8.3.1 (Time Zone Identifier)
### RFC 5545, section 3.8.3.2 (Time Zone Name)
### RFC 5545, section 3.8.3.3 (Time Zone Offset From)
### RFC 5545, section 3.8.3.4 (Time Zone Offset To)
### RFC 5545, section 3.8.3.5 (Time Zone URL)
|
AnotherIvan/calibre
|
refs/heads/master
|
src/calibre/utils/ipc/server.py
|
11
|
#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import sys, os, cPickle, time, tempfile, errno
from math import ceil
from threading import Thread, RLock
from Queue import Queue, Empty
from multiprocessing.connection import Listener, arbitrary_address
from collections import deque
from binascii import hexlify
from calibre.utils.ipc import eintr_retry_call
from calibre.utils.ipc.launch import Worker
from calibre.utils.ipc.worker import PARALLEL_FUNCS
from calibre import detect_ncpus as cpu_count
from calibre.constants import iswindows, DEBUG, islinux
from calibre.ptempfile import base_dir
_counter = 0
class ConnectedWorker(Thread):
def __init__(self, worker, conn, rfile):
Thread.__init__(self)
self.daemon = True
self.conn = conn
self.worker = worker
self.notifications = Queue()
self._returncode = 'dummy'
self.killed = False
self.log_path = worker.log_path
self.rfile = rfile
self.close_log_file = getattr(worker, 'close_log_file', None)
def start_job(self, job):
notification = PARALLEL_FUNCS[job.name][-1] is not None
eintr_retry_call(self.conn.send, (job.name, job.args, job.kwargs, job.description))
if notification:
self.start()
else:
self.conn.close()
self.job = job
def run(self):
while True:
try:
x = eintr_retry_call(self.conn.recv)
self.notifications.put(x)
except BaseException:
break
try:
self.conn.close()
except BaseException:
pass
def kill(self):
self.killed = True
try:
self.worker.kill()
except BaseException:
pass
@property
def is_alive(self):
return not self.killed and self.worker.is_alive
@property
def returncode(self):
if self._returncode != 'dummy':
return self._returncode
r = self.worker.returncode
if self.killed and r is None:
self._returncode = 1
return 1
if r is not None:
self._returncode = r
return r
class CriticalError(Exception):
pass
_name_counter = 0
if islinux:
import fcntl
class LinuxListener(Listener):
def __init__(self, *args, **kwargs):
Listener.__init__(self, *args, **kwargs)
# multiprocessing tries to call unlink even on abstract
# named sockets, prevent it from doing so.
self._listener._unlink.cancel()
# Prevent child processes from inheriting this socket
# If we dont do this child processes not created by calibre, will
# inherit this socket, preventing the calibre GUI from being restarted.
# Examples of such processes are external viewers launched by Qt
# using openUrl().
fd = self._listener._socket.fileno()
old_flags = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, old_flags | fcntl.FD_CLOEXEC)
def close(self):
# To ensure that the socket is released, we have to call
# shutdown() not close(). This is needed to allow calibre to
# restart using the same socket address.
import socket
self._listener._socket.shutdown(socket.SHUT_RDWR)
self._listener._socket.close()
def accept(self, *args, **kwargs):
ans = Listener.accept(self, *args, **kwargs)
fd = ans.fileno()
old_flags = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, old_flags | fcntl.FD_CLOEXEC)
return ans
def create_listener(authkey, backlog=4):
# Use abstract named sockets on linux to avoid creating unnecessary temp files
global _name_counter
prefix = u'\0calibre-ipc-listener-%d-%%d' % os.getpid()
while True:
_name_counter += 1
address = (prefix % _name_counter).encode('ascii')
try:
l = LinuxListener(address=address, authkey=authkey, backlog=backlog)
return address, l
except EnvironmentError as err:
if err.errno == errno.EADDRINUSE:
continue
raise
else:
def create_listener(authkey, backlog=4):
address = arbitrary_address('AF_PIPE' if iswindows else 'AF_UNIX')
if iswindows and address[1] == ':':
address = address[2:]
listener = Listener(address=address, authkey=authkey, backlog=backlog)
return address, listener
class Server(Thread):
def __init__(self, notify_on_job_done=lambda x: x, pool_size=None,
limit=sys.maxint, enforce_cpu_limit=True):
Thread.__init__(self)
self.daemon = True
global _counter
self.id = _counter+1
_counter += 1
if enforce_cpu_limit:
limit = min(limit, cpu_count())
self.pool_size = limit if pool_size is None else pool_size
self.notify_on_job_done = notify_on_job_done
self.auth_key = os.urandom(32)
self.address, self.listener = create_listener(self.auth_key, backlog=4)
self.add_jobs_queue, self.changed_jobs_queue = Queue(), Queue()
self.kill_queue = Queue()
self.waiting_jobs = []
self.workers = deque()
self.launched_worker_count = 0
self._worker_launch_lock = RLock()
self.start()
def launch_worker(self, gui=False, redirect_output=None, job_name=None):
start = time.time()
with self._worker_launch_lock:
self.launched_worker_count += 1
id = self.launched_worker_count
fd, rfile = tempfile.mkstemp(prefix=u'ipc_result_%d_%d_'%(self.id, id),
dir=base_dir(), suffix=u'.pickle')
os.close(fd)
if redirect_output is None:
redirect_output = not gui
env = {
'CALIBRE_WORKER_ADDRESS' : hexlify(cPickle.dumps(self.listener.address, -1)),
'CALIBRE_WORKER_KEY' : hexlify(self.auth_key),
'CALIBRE_WORKER_RESULT' : hexlify(rfile.encode('utf-8')),
}
cw = self.do_launch(env, gui, redirect_output, rfile, job_name=job_name)
if isinstance(cw, basestring):
raise CriticalError('Failed to launch worker process:\n'+cw)
if DEBUG:
print 'Worker Launch took:', time.time() - start
return cw
def do_launch(self, env, gui, redirect_output, rfile, job_name=None):
w = Worker(env, gui=gui, job_name=job_name)
try:
w(redirect_output=redirect_output)
conn = eintr_retry_call(self.listener.accept)
if conn is None:
raise Exception('Failed to launch worker process')
except BaseException:
try:
w.kill()
except:
pass
import traceback
return traceback.format_exc()
return ConnectedWorker(w, conn, rfile)
def add_job(self, job):
job.done2 = self.notify_on_job_done
self.add_jobs_queue.put(job)
def run_job(self, job, gui=True, redirect_output=False):
w = self.launch_worker(gui=gui, redirect_output=redirect_output, job_name=getattr(job, 'name', None))
w.start_job(job)
def run(self):
while True:
try:
job = self.add_jobs_queue.get(True, 0.2)
if job is None:
break
self.waiting_jobs.insert(0, job)
except Empty:
pass
# Get notifications from worker process
for worker in self.workers:
while True:
try:
n = worker.notifications.get_nowait()
worker.job.notifications.put(n)
self.changed_jobs_queue.put(worker.job)
except Empty:
break
# Remove finished jobs
for worker in [w for w in self.workers if not w.is_alive]:
try:
worker.close_log_file()
except:
pass
self.workers.remove(worker)
job = worker.job
if worker.returncode != 0:
job.failed = True
job.returncode = worker.returncode
elif os.path.exists(worker.rfile):
try:
job.result = cPickle.load(open(worker.rfile, 'rb'))
os.remove(worker.rfile)
except:
pass
job.duration = time.time() - job.start_time
self.changed_jobs_queue.put(job)
# Start waiting jobs
sj = self.suitable_waiting_job()
if sj is not None:
job = self.waiting_jobs.pop(sj)
job.start_time = time.time()
if job.kill_on_start:
job.duration = 0.0
job.returncode = 1
job.killed = job.failed = True
job.result = None
else:
worker = self.launch_worker()
worker.start_job(job)
self.workers.append(worker)
job.log_path = worker.log_path
self.changed_jobs_queue.put(job)
while True:
try:
j = self.kill_queue.get_nowait()
self._kill_job(j)
except Empty:
break
def suitable_waiting_job(self):
available_workers = self.pool_size - len(self.workers)
for worker in self.workers:
job = worker.job
if job.core_usage == -1:
available_workers = 0
elif job.core_usage > 1:
available_workers -= job.core_usage - 1
if available_workers < 1:
return None
for i, job in enumerate(self.waiting_jobs):
if job.core_usage == -1:
if available_workers >= self.pool_size:
return i
elif job.core_usage <= available_workers:
return i
def kill_job(self, job):
self.kill_queue.put(job)
def killall(self):
for worker in self.workers:
self.kill_queue.put(worker.job)
def _kill_job(self, job):
if job.start_time is None:
job.kill_on_start = True
return
for worker in self.workers:
if job is worker.job:
worker.kill()
job.killed = True
break
def split(self, tasks):
'''
Split a list into a list of sub lists, with the number of sub lists being
no more than the number of workers this server supports. Each sublist contains
2-tuples of the form (i, x) where x is an element from the original list
and i is the index of the element x in the original list.
'''
ans, count, pos = [], 0, 0
delta = int(ceil(len(tasks)/float(self.pool_size)))
while count < len(tasks):
section = []
for t in tasks[pos:pos+delta]:
section.append((count, t))
count += 1
ans.append(section)
pos += delta
return ans
def close(self):
try:
self.add_jobs_queue.put(None)
except:
pass
try:
self.listener.close()
except:
pass
time.sleep(0.2)
for worker in list(self.workers):
try:
worker.kill()
except:
pass
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
|
masschallenge/impact-api
|
refs/heads/development
|
web/impact/impact/tests/test_program_family_list_view.py
|
1
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
import json
from jsonschema import Draft4Validator
from django.urls import reverse
from impact.tests.factories import ProgramFamilyFactory
from impact.tests.api_test_case import APITestCase
from impact.tests.test_program_family_detail_view import (
PROGRAM_FAMILY_GET_FIELDS,
)
from impact.tests.utils import assert_fields
from impact.v1.views import ProgramFamilyListView
class TestProgramFamilyListView(APITestCase):
url = reverse(ProgramFamilyListView.view_name)
def test_get(self):
count = 5
program_families = ProgramFamilyFactory.create_batch(count)
with self.login(email=self.basic_user().email):
response = self.client.get(self.url)
assert response.data["count"] == count
assert all([ProgramFamilyListView.serialize(program_family)
in response.data["results"]
for program_family in program_families])
def test_options(self):
with self.login(email=self.basic_user().email):
response = self.client.options(self.url)
assert response.status_code == 200
results = response.data["actions"]["GET"]["properties"]["results"]
get_options = results["item"]["properties"]
assert_fields(PROGRAM_FAMILY_GET_FIELDS, get_options)
def test_options_against_get(self):
with self.login(email=self.basic_user().email):
options_response = self.client.options(self.url)
get_response = self.client.get(self.url)
schema = options_response.data["actions"]["GET"]
validator = Draft4Validator(schema)
assert validator.is_valid(json.loads(get_response.content))
|
synthicity/activitysim
|
refs/heads/master
|
activitysim/core/mem.py
|
2
|
# ActivitySim
# See full license in LICENSE.txt.
from __future__ import (absolute_import, division, print_function, )
from future.standard_library import install_aliases
install_aliases() # noqa: E402
import time
import datetime
import psutil
import logging
import gc
from activitysim.core import config
logger = logging.getLogger(__name__)
MEM = {}
HWM = {}
DEFAULT_TICK_LEN = 30
def force_garbage_collect():
gc.collect()
def GB(bytes):
return (bytes / (1024 * 1024 * 1024.0))
def init_trace(tick_len=None, file_name="mem.csv"):
MEM['tick'] = 0
if file_name is not None:
MEM['file_name'] = file_name
if tick_len is None:
MEM['tick_len'] = DEFAULT_TICK_LEN
else:
MEM['tick_len'] = tick_len
logger.info("init_trace file_name %s" % file_name)
def trace_hwm(tag, value, timestamp, label):
hwm = HWM.setdefault(tag, {})
if value > hwm.get('mark', 0):
hwm['mark'] = value
hwm['timestamp'] = timestamp
hwm['label'] = label
def log_hwm():
for tag in HWM:
hwm = HWM[tag]
logger.info("high water mark %s: %.2f timestamp: %s label: %s" %
(tag, hwm['mark'], hwm['timestamp'], hwm['label']))
with config.open_log_file(MEM['file_name'], 'a') as log_file:
for tag in HWM:
hwm = HWM[tag]
print("high water mark %s: %.2f timestamp: %s label: %s" %
(tag, hwm['mark'], hwm['timestamp'], hwm['label']), file=log_file)
def trace_memory_info(event=''):
if not MEM:
return
last_tick = MEM['tick']
tick_len = MEM['tick_len'] or float('inf')
t = time.time()
if (t - last_tick < tick_len) and not event:
return
vmi = psutil.virtual_memory()
if last_tick == 0:
with config.open_log_file(MEM['file_name'], 'w') as log_file:
print("time,rss,used,available,percent,event", file=log_file)
MEM['tick'] = t
current_process = psutil.Process()
rss = current_process.memory_info().rss
for child in current_process.children(recursive=True):
try:
rss += child.memory_info().rss
except (psutil.NoSuchProcess, psutil.AccessDenied) as e:
pass
timestamp = datetime.datetime.now().strftime("%d/%m/%Y %H:%M:%S")
trace_hwm('rss', GB(rss), timestamp, event)
trace_hwm('used', GB(vmi.used), timestamp, event)
# logger.debug("memory_info: rss: %s available: %s percent: %s"
# % (GB(mi.rss), GB(vmi.available), GB(vmi.percent)))
with config.open_log_file(MEM['file_name'], 'a') as output_file:
print("%s, %.2f, %.2f, %.2f, %s%%, %s" %
(timestamp,
GB(rss),
GB(vmi.used),
GB(vmi.available),
vmi.percent,
event), file=output_file)
def get_memory_info():
mi = psutil.Process().memory_info()
# cur_mem = mi.vms
cur_mem = mi.rss
return cur_mem
|
rhyolight/nupic.research
|
refs/heads/master
|
projects/lateral_pooler/src/sp_wrapper.py
|
4
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2017, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from nupic.algorithms.spatial_pooler import SpatialPooler
import numpy as np
class SpatialPoolerWrapper(SpatialPooler):
"""
This is a wrapper for the spatial pooler.
It just collects more statistics, otherwise behaves the exact same.
"""
def __init__(self, **args):
super(SpatialPoolerWrapper, self).__init__(**args)
n = self._numColumns
inhibitionArea = ((2*self._inhibitionRadius + 1)** self._columnDimensions.size)
inhibitionArea = min(n, inhibitionArea)
density = float(self._numActiveColumnsPerInhArea) / inhibitionArea
self.sparsity = density
self.avgActivityPairs = np.ones((n,n))*(density**2)
np.fill_diagonal(self.avgActivityPairs, density)
def compute(self, inputVector, learn, activeArray):
"""
This method resembles the primary public method of the SpatialPooler class.
"""
super(SpatialPoolerWrapper, self).compute(inputVector, learn, activeArray)
self._updateAvgActivityPairs(activeArray)
def encode(self, X):
d = X.shape[1]
n = self._numColumns
Y = np.zeros((n,d))
for t in range(d):
self.compute(X[:,t], False, Y[:,t])
return Y
def _updateAvgActivityPairs(self, activeArray):
n, m = self.shape
Y = activeArray.reshape((n,1))
beta = 1.0 - 1.0/self._dutyCyclePeriod
# period = self._dutyCyclePeriod
# if (period > self._iterationNum):
# period = self._iterationNum
Q = np.dot(Y, Y.T)
self.avgActivityPairs = beta*self.avgActivityPairs + (1-beta)*Q
# self.avgActivityPairs = self._updateDutyCyclesHelper(
# self.avgActivityPairs,
# Q,
# period)
@property
def code_weight(self):
return self._numActiveColumnsPerInhArea
@property
def feedforward(self):
m = self._numInputs
n = self._numColumns
W = np.zeros((n, m))
for i in range(self._numColumns):
self.getPermanence(i, W[i, :])
return W
@property
def shape(self):
return self._numColumns, self._numInputs
@property
def avg_activity_pairs(self):
return self.avgActivityPairs
|
MichaelKohler/bedrock
|
refs/heads/master
|
bedrock/externalfiles/models.py
|
18
|
from django.db import models
class ExternalFile(models.Model):
name = models.CharField(max_length=50, primary_key=True)
content = models.TextField()
last_modified = models.DateTimeField(auto_now=True)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.