repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
aricchen/openHR
|
refs/heads/master
|
openerp/addons/project/project.py
|
4
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime, date
from lxml import etree
import time
from openerp import SUPERUSER_ID
from openerp import tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.addons.base_status.base_stage import base_stage
from openerp.addons.resource.faces import task as Task
_TASK_STATE = [('draft', 'New'),('open', 'In Progress'),('pending', 'Pending'), ('done', 'Done'), ('cancelled', 'Cancelled')]
class project_task_type(osv.osv):
_name = 'project.task.type'
_description = 'Task Stage'
_order = 'sequence'
_columns = {
'name': fields.char('Stage Name', required=True, size=64, translate=True),
'description': fields.text('Description'),
'sequence': fields.integer('Sequence'),
'case_default': fields.boolean('Default for New Projects',
help="If you check this field, this stage will be proposed by default on each new project. It will not assign this stage to existing projects."),
'project_ids': fields.many2many('project.project', 'project_task_type_rel', 'type_id', 'project_id', 'Projects'),
'state': fields.selection(_TASK_STATE, 'Related Status', required=True,
help="The status of your document is automatically changed regarding the selected stage. " \
"For example, if a stage is related to the status 'Close', when your document reaches this stage, it is automatically closed."),
'fold': fields.boolean('Folded by Default',
help="This stage is not visible, for example in status bar or kanban view, when there are no records in that stage to display."),
}
def _get_default_project_id(self, cr, uid, ctx={}):
proj = ctx.get('default_project_id', False)
if type(proj) is int:
return [proj]
return proj
_defaults = {
'sequence': 1,
'state': 'open',
'fold': False,
'case_default': False,
'project_ids': _get_default_project_id
}
_order = 'sequence'
def short_name(name):
"""Keep first word(s) of name to make it small enough
but distinctive"""
if not name: return name
# keep 7 chars + end of the last word
keep_words = name[:7].strip().split()
return ' '.join(name.split()[:len(keep_words)])
class project(osv.osv):
_name = "project.project"
_description = "Project"
_inherits = {'account.analytic.account': "analytic_account_id",
"mail.alias": "alias_id"}
_inherit = ['mail.thread', 'ir.needaction_mixin']
def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
if user == 1:
return super(project, self).search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
if context and context.get('user_preference'):
cr.execute("""SELECT project.id FROM project_project project
LEFT JOIN account_analytic_account account ON account.id = project.analytic_account_id
LEFT JOIN project_user_rel rel ON rel.project_id = project.id
WHERE (account.user_id = %s or rel.uid = %s)"""%(user, user))
return [(r[0]) for r in cr.fetchall()]
return super(project, self).search(cr, user, args, offset=offset, limit=limit, order=order,
context=context, count=count)
def _complete_name(self, cr, uid, ids, name, args, context=None):
res = {}
for m in self.browse(cr, uid, ids, context=context):
res[m.id] = (m.parent_id and (m.parent_id.name + '/') or '') + m.name
return res
def onchange_partner_id(self, cr, uid, ids, part=False, context=None):
partner_obj = self.pool.get('res.partner')
if not part:
return {'value':{}}
val = {}
if 'pricelist_id' in self.fields_get(cr, uid, context=context):
pricelist = partner_obj.read(cr, uid, part, ['property_product_pricelist'], context=context)
pricelist_id = pricelist.get('property_product_pricelist', False) and pricelist.get('property_product_pricelist')[0] or False
val['pricelist_id'] = pricelist_id
return {'value': val}
def _get_projects_from_tasks(self, cr, uid, task_ids, context=None):
tasks = self.pool.get('project.task').browse(cr, uid, task_ids, context=context)
project_ids = [task.project_id.id for task in tasks if task.project_id]
return self.pool.get('project.project')._get_project_and_parents(cr, uid, project_ids, context)
def _get_project_and_parents(self, cr, uid, ids, context=None):
""" return the project ids and all their parent projects """
res = set(ids)
while ids:
cr.execute("""
SELECT DISTINCT parent.id
FROM project_project project, project_project parent, account_analytic_account account
WHERE project.analytic_account_id = account.id
AND parent.analytic_account_id = account.parent_id
AND project.id IN %s
""", (tuple(ids),))
ids = [t[0] for t in cr.fetchall()]
res.update(ids)
return list(res)
def _get_project_and_children(self, cr, uid, ids, context=None):
""" retrieve all children projects of project ids;
return a dictionary mapping each project to its parent project (or None)
"""
res = dict.fromkeys(ids, None)
while ids:
cr.execute("""
SELECT project.id, parent.id
FROM project_project project, project_project parent, account_analytic_account account
WHERE project.analytic_account_id = account.id
AND parent.analytic_account_id = account.parent_id
AND parent.id IN %s
""", (tuple(ids),))
dic = dict(cr.fetchall())
res.update(dic)
ids = dic.keys()
return res
def _progress_rate(self, cr, uid, ids, names, arg, context=None):
child_parent = self._get_project_and_children(cr, uid, ids, context)
# compute planned_hours, total_hours, effective_hours specific to each project
cr.execute("""
SELECT project_id, COALESCE(SUM(planned_hours), 0.0),
COALESCE(SUM(total_hours), 0.0), COALESCE(SUM(effective_hours), 0.0)
FROM project_task WHERE project_id IN %s AND state <> 'cancelled'
GROUP BY project_id
""", (tuple(child_parent.keys()),))
# aggregate results into res
res = dict([(id, {'planned_hours':0.0,'total_hours':0.0,'effective_hours':0.0}) for id in ids])
for id, planned, total, effective in cr.fetchall():
# add the values specific to id to all parent projects of id in the result
while id:
if id in ids:
res[id]['planned_hours'] += planned
res[id]['total_hours'] += total
res[id]['effective_hours'] += effective
id = child_parent[id]
# compute progress rates
for id in ids:
if res[id]['total_hours']:
res[id]['progress_rate'] = round(100.0 * res[id]['effective_hours'] / res[id]['total_hours'], 2)
else:
res[id]['progress_rate'] = 0.0
return res
def unlink(self, cr, uid, ids, context=None):
alias_ids = []
mail_alias = self.pool.get('mail.alias')
for proj in self.browse(cr, uid, ids, context=context):
if proj.tasks:
raise osv.except_osv(_('Invalid Action!'),
_('You cannot delete a project containing tasks. You can either delete all the project\'s tasks and then delete the project or simply deactivate the project.'))
elif proj.alias_id:
alias_ids.append(proj.alias_id.id)
res = super(project, self).unlink(cr, uid, ids, context=context)
mail_alias.unlink(cr, uid, alias_ids, context=context)
return res
def _get_attached_docs(self, cr, uid, ids, field_name, arg, context):
res = {}
attachment = self.pool.get('ir.attachment')
task = self.pool.get('project.task')
for id in ids:
project_attachments = attachment.search(cr, uid, [('res_model', '=', 'project.project'), ('res_id', '=', id)], context=context, count=True)
task_ids = task.search(cr, uid, [('project_id', '=', id)], context=context)
task_attachments = attachment.search(cr, uid, [('res_model', '=', 'project.task'), ('res_id', 'in', task_ids)], context=context, count=True)
res[id] = (project_attachments or 0) + (task_attachments or 0)
return res
def _task_count(self, cr, uid, ids, field_name, arg, context=None):
if context is None:
context = {}
res = dict.fromkeys(ids, 0)
ctx = context.copy()
ctx['active_test'] = False
task_ids = self.pool.get('project.task').search(cr, uid, [('project_id', 'in', ids)], context=ctx)
for task in self.pool.get('project.task').browse(cr, uid, task_ids, context):
res[task.project_id.id] += 1
return res
def _get_alias_models(self, cr, uid, context=None):
"""Overriden in project_issue to offer more options"""
return [('project.task', "Tasks")]
def _get_visibility_selection(self, cr, uid, context=None):
""" Overriden in portal_project to offer more options """
return [('public', 'All Users'),
('employees', 'Employees Only'),
('followers', 'Followers Only')]
def attachment_tree_view(self, cr, uid, ids, context):
task_ids = self.pool.get('project.task').search(cr, uid, [('project_id', 'in', ids)])
domain = [
'|',
'&', ('res_model', '=', 'project.project'), ('res_id', 'in', ids),
'&', ('res_model', '=', 'project.task'), ('res_id', 'in', task_ids)
]
res_id = ids and ids[0] or False
return {
'name': _('Attachments'),
'domain': domain,
'res_model': 'ir.attachment',
'type': 'ir.actions.act_window',
'view_id': False,
'view_mode': 'tree,form',
'view_type': 'form',
'limit': 80,
'context': "{'default_res_model': '%s','default_res_id': %d}" % (self._name, res_id)
}
# Lambda indirection method to avoid passing a copy of the overridable method when declaring the field
_alias_models = lambda self, *args, **kwargs: self._get_alias_models(*args, **kwargs)
_visibility_selection = lambda self, *args, **kwargs: self._get_visibility_selection(*args, **kwargs)
_columns = {
'complete_name': fields.function(_complete_name, string="Project Name", type='char', size=250),
'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the project without removing it."),
'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of Projects."),
'analytic_account_id': fields.many2one('account.analytic.account', 'Contract/Analytic', help="Link this project to an analytic account if you need financial management on projects. It enables you to connect projects with budgets, planning, cost and revenue analysis, timesheets on projects, etc.", ondelete="cascade", required=True),
'priority': fields.integer('Sequence', help="Gives the sequence order when displaying the list of projects"),
'members': fields.many2many('res.users', 'project_user_rel', 'project_id', 'uid', 'Project Members',
help="Project's members are users who can have an access to the tasks related to this project.", states={'close':[('readonly',True)], 'cancelled':[('readonly',True)]}),
'tasks': fields.one2many('project.task', 'project_id', "Task Activities"),
'planned_hours': fields.function(_progress_rate, multi="progress", string='Planned Time', help="Sum of planned hours of all tasks related to this project and its child projects.",
store = {
'project.project': (_get_project_and_parents, ['tasks', 'parent_id', 'child_ids'], 10),
'project.task': (_get_projects_from_tasks, ['planned_hours', 'remaining_hours', 'work_ids', 'state'], 20),
}),
'effective_hours': fields.function(_progress_rate, multi="progress", string='Time Spent', help="Sum of spent hours of all tasks related to this project and its child projects.",
store = {
'project.project': (_get_project_and_parents, ['tasks', 'parent_id', 'child_ids'], 10),
'project.task': (_get_projects_from_tasks, ['planned_hours', 'remaining_hours', 'work_ids', 'state'], 20),
}),
'total_hours': fields.function(_progress_rate, multi="progress", string='Total Time', help="Sum of total hours of all tasks related to this project and its child projects.",
store = {
'project.project': (_get_project_and_parents, ['tasks', 'parent_id', 'child_ids'], 10),
'project.task': (_get_projects_from_tasks, ['planned_hours', 'remaining_hours', 'work_ids', 'state'], 20),
}),
'progress_rate': fields.function(_progress_rate, multi="progress", string='Progress', type='float', group_operator="avg", help="Percent of tasks closed according to the total of tasks todo.",
store = {
'project.project': (_get_project_and_parents, ['tasks', 'parent_id', 'child_ids'], 10),
'project.task': (_get_projects_from_tasks, ['planned_hours', 'remaining_hours', 'work_ids', 'state'], 20),
}),
'resource_calendar_id': fields.many2one('resource.calendar', 'Working Time', help="Timetable working hours to adjust the gantt diagram report", states={'close':[('readonly',True)]} ),
'type_ids': fields.many2many('project.task.type', 'project_task_type_rel', 'project_id', 'type_id', 'Tasks Stages', states={'close':[('readonly',True)], 'cancelled':[('readonly',True)]}),
'task_count': fields.function(_task_count, type='integer', string="Open Tasks"),
'color': fields.integer('Color Index'),
'alias_id': fields.many2one('mail.alias', 'Alias', ondelete="cascade", required=True,
help="Internal email associated with this project. Incoming emails are automatically synchronized"
"with Tasks (or optionally Issues if the Issue Tracker module is installed)."),
'alias_model': fields.selection(_alias_models, "Alias Model", select=True, required=True,
help="The kind of document created when an email is received on this project's email alias"),
'privacy_visibility': fields.selection(_visibility_selection, 'Privacy / Visibility', required=True),
'state': fields.selection([('template', 'Template'),('draft','New'),('open','In Progress'), ('cancelled', 'Cancelled'),('pending','Pending'),('close','Closed')], 'Status', required=True,),
'doc_count':fields.function(_get_attached_docs, string="Number of documents attached", type='int')
}
def _get_type_common(self, cr, uid, context):
ids = self.pool.get('project.task.type').search(cr, uid, [('case_default','=',1)], context=context)
return ids
_order = "sequence, id"
_defaults = {
'active': True,
'type': 'contract',
'state': 'open',
'priority': 1,
'sequence': 10,
'type_ids': _get_type_common,
'alias_model': 'project.task',
'privacy_visibility': 'employees',
'alias_domain': False, # always hide alias during creation
}
# TODO: Why not using a SQL contraints ?
def _check_dates(self, cr, uid, ids, context=None):
for leave in self.read(cr, uid, ids, ['date_start', 'date'], context=context):
if leave['date_start'] and leave['date']:
if leave['date_start'] > leave['date']:
return False
return True
_constraints = [
(_check_dates, 'Error! project start-date must be lower then project end-date.', ['date_start', 'date'])
]
def set_template(self, cr, uid, ids, context=None):
res = self.setActive(cr, uid, ids, value=False, context=context)
return res
def set_done(self, cr, uid, ids, context=None):
task_obj = self.pool.get('project.task')
task_ids = task_obj.search(cr, uid, [('project_id', 'in', ids), ('state', 'not in', ('cancelled', 'done'))])
task_obj.case_close(cr, uid, task_ids, context=context)
return self.write(cr, uid, ids, {'state':'close'}, context=context)
def set_cancel(self, cr, uid, ids, context=None):
task_obj = self.pool.get('project.task')
task_ids = task_obj.search(cr, uid, [('project_id', 'in', ids), ('state', '!=', 'done')])
task_obj.case_cancel(cr, uid, task_ids, context=context)
return self.write(cr, uid, ids, {'state':'cancelled'}, context=context)
def set_pending(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state':'pending'}, context=context)
def set_open(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state':'open'}, context=context)
def reset_project(self, cr, uid, ids, context=None):
return self.setActive(cr, uid, ids, value=True, context=context)
def map_tasks(self, cr, uid, old_project_id, new_project_id, context=None):
""" copy and map tasks from old to new project """
if context is None:
context = {}
map_task_id = {}
task_obj = self.pool.get('project.task')
proj = self.browse(cr, uid, old_project_id, context=context)
for task in proj.tasks:
map_task_id[task.id] = task_obj.copy(cr, uid, task.id, {}, context=context)
self.write(cr, uid, [new_project_id], {'tasks':[(6,0, map_task_id.values())]})
task_obj.duplicate_task(cr, uid, map_task_id, context=context)
return True
def copy(self, cr, uid, id, default=None, context=None):
if context is None:
context = {}
if default is None:
default = {}
context['active_test'] = False
default['state'] = 'open'
default['line_ids'] = []
default['tasks'] = []
default.pop('alias_name', None)
default.pop('alias_id', None)
proj = self.browse(cr, uid, id, context=context)
if not default.get('name', False):
default.update(name=_("%s (copy)") % (proj.name))
res = super(project, self).copy(cr, uid, id, default, context)
self.map_tasks(cr,uid,id,res,context)
return res
def duplicate_template(self, cr, uid, ids, context=None):
if context is None:
context = {}
data_obj = self.pool.get('ir.model.data')
result = []
for proj in self.browse(cr, uid, ids, context=context):
parent_id = context.get('parent_id', False)
context.update({'analytic_project_copy': True})
new_date_start = time.strftime('%Y-%m-%d')
new_date_end = False
if proj.date_start and proj.date:
start_date = date(*time.strptime(proj.date_start,'%Y-%m-%d')[:3])
end_date = date(*time.strptime(proj.date,'%Y-%m-%d')[:3])
new_date_end = (datetime(*time.strptime(new_date_start,'%Y-%m-%d')[:3])+(end_date-start_date)).strftime('%Y-%m-%d')
context.update({'copy':True})
new_id = self.copy(cr, uid, proj.id, default = {
'name':_("%s (copy)") % (proj.name),
'state':'open',
'date_start':new_date_start,
'date':new_date_end,
'parent_id':parent_id}, context=context)
result.append(new_id)
child_ids = self.search(cr, uid, [('parent_id','=', proj.analytic_account_id.id)], context=context)
parent_id = self.read(cr, uid, new_id, ['analytic_account_id'])['analytic_account_id'][0]
if child_ids:
self.duplicate_template(cr, uid, child_ids, context={'parent_id': parent_id})
if result and len(result):
res_id = result[0]
form_view_id = data_obj._get_id(cr, uid, 'project', 'edit_project')
form_view = data_obj.read(cr, uid, form_view_id, ['res_id'])
tree_view_id = data_obj._get_id(cr, uid, 'project', 'view_project')
tree_view = data_obj.read(cr, uid, tree_view_id, ['res_id'])
search_view_id = data_obj._get_id(cr, uid, 'project', 'view_project_project_filter')
search_view = data_obj.read(cr, uid, search_view_id, ['res_id'])
return {
'name': _('Projects'),
'view_type': 'form',
'view_mode': 'form,tree',
'res_model': 'project.project',
'view_id': False,
'res_id': res_id,
'views': [(form_view['res_id'],'form'),(tree_view['res_id'],'tree')],
'type': 'ir.actions.act_window',
'search_view_id': search_view['res_id'],
'nodestroy': True
}
# set active value for a project, its sub projects and its tasks
def setActive(self, cr, uid, ids, value=True, context=None):
task_obj = self.pool.get('project.task')
for proj in self.browse(cr, uid, ids, context=None):
self.write(cr, uid, [proj.id], {'state': value and 'open' or 'template'}, context)
cr.execute('select id from project_task where project_id=%s', (proj.id,))
tasks_id = [x[0] for x in cr.fetchall()]
if tasks_id:
task_obj.write(cr, uid, tasks_id, {'active': value}, context=context)
child_ids = self.search(cr, uid, [('parent_id','=', proj.analytic_account_id.id)])
if child_ids:
self.setActive(cr, uid, child_ids, value, context=None)
return True
def _schedule_header(self, cr, uid, ids, force_members=True, context=None):
context = context or {}
if type(ids) in (long, int,):
ids = [ids]
projects = self.browse(cr, uid, ids, context=context)
for project in projects:
if (not project.members) and force_members:
raise osv.except_osv(_('Warning!'),_("You must assign members on the project '%s'!") % (project.name,))
resource_pool = self.pool.get('resource.resource')
result = "from openerp.addons.resource.faces import *\n"
result += "import datetime\n"
for project in self.browse(cr, uid, ids, context=context):
u_ids = [i.id for i in project.members]
if project.user_id and (project.user_id.id not in u_ids):
u_ids.append(project.user_id.id)
for task in project.tasks:
if task.state in ('done','cancelled'):
continue
if task.user_id and (task.user_id.id not in u_ids):
u_ids.append(task.user_id.id)
calendar_id = project.resource_calendar_id and project.resource_calendar_id.id or False
resource_objs = resource_pool.generate_resources(cr, uid, u_ids, calendar_id, context=context)
for key, vals in resource_objs.items():
result +='''
class User_%s(Resource):
efficiency = %s
''' % (key, vals.get('efficiency', False))
result += '''
def Project():
'''
return result
def _schedule_project(self, cr, uid, project, context=None):
resource_pool = self.pool.get('resource.resource')
calendar_id = project.resource_calendar_id and project.resource_calendar_id.id or False
working_days = resource_pool.compute_working_calendar(cr, uid, calendar_id, context=context)
# TODO: check if we need working_..., default values are ok.
puids = [x.id for x in project.members]
if project.user_id:
puids.append(project.user_id.id)
result = """
def Project_%d():
start = \'%s\'
working_days = %s
resource = %s
""" % (
project.id,
project.date_start or time.strftime('%Y-%m-%d'), working_days,
'|'.join(['User_'+str(x) for x in puids])
)
vacation = calendar_id and tuple(resource_pool.compute_vacation(cr, uid, calendar_id, context=context)) or False
if vacation:
result+= """
vacation = %s
""" % ( vacation, )
return result
#TODO: DO Resource allocation and compute availability
def compute_allocation(self, rc, uid, ids, start_date, end_date, context=None):
if context == None:
context = {}
allocation = {}
return allocation
def schedule_tasks(self, cr, uid, ids, context=None):
context = context or {}
if type(ids) in (long, int,):
ids = [ids]
projects = self.browse(cr, uid, ids, context=context)
result = self._schedule_header(cr, uid, ids, False, context=context)
for project in projects:
result += self._schedule_project(cr, uid, project, context=context)
result += self.pool.get('project.task')._generate_task(cr, uid, project.tasks, ident=4, context=context)
local_dict = {}
exec result in local_dict
projects_gantt = Task.BalancedProject(local_dict['Project'])
for project in projects:
project_gantt = getattr(projects_gantt, 'Project_%d' % (project.id,))
for task in project.tasks:
if task.state in ('done','cancelled'):
continue
p = getattr(project_gantt, 'Task_%d' % (task.id,))
self.pool.get('project.task').write(cr, uid, [task.id], {
'date_start': p.start.strftime('%Y-%m-%d %H:%M:%S'),
'date_end': p.end.strftime('%Y-%m-%d %H:%M:%S')
}, context=context)
if (not task.user_id) and (p.booked_resource):
self.pool.get('project.task').write(cr, uid, [task.id], {
'user_id': int(p.booked_resource[0].name[5:]),
}, context=context)
return True
# ------------------------------------------------
# OpenChatter methods and notifications
# ------------------------------------------------
def create(self, cr, uid, vals, context=None):
if context is None: context = {}
# Prevent double project creation when 'use_tasks' is checked!
context = dict(context, project_creation_in_progress=True)
mail_alias = self.pool.get('mail.alias')
if not vals.get('alias_id') and vals.get('name', False):
vals.pop('alias_name', None) # prevent errors during copy()
alias_id = mail_alias.create_unique_alias(cr, uid,
# Using '+' allows using subaddressing for those who don't
# have a catchall domain setup.
{'alias_name': "project+"+short_name(vals['name'])},
model_name=vals.get('alias_model', 'project.task'),
context=context)
vals['alias_id'] = alias_id
if vals.get('type', False) not in ('template','contract'):
vals['type'] = 'contract'
project_id = super(project, self).create(cr, uid, vals, context)
mail_alias.write(cr, uid, [vals['alias_id']], {'alias_defaults': {'project_id': project_id} }, context)
return project_id
def write(self, cr, uid, ids, vals, context=None):
# if alias_model has been changed, update alias_model_id accordingly
if vals.get('alias_model'):
model_ids = self.pool.get('ir.model').search(cr, uid, [('model', '=', vals.get('alias_model', 'project.task'))])
vals.update(alias_model_id=model_ids[0])
return super(project, self).write(cr, uid, ids, vals, context=context)
class task(base_stage, osv.osv):
_name = "project.task"
_description = "Task"
_date_name = "date_start"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_track = {
'state': {
'project.mt_task_new': lambda self, cr, uid, obj, ctx=None: obj['state'] in ['new', 'draft'],
'project.mt_task_started': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'open',
'project.mt_task_closed': lambda self, cr, uid, obj, ctx=None: obj['state'] == 'done',
},
'stage_id': {
'project.mt_task_stage': lambda self, cr, uid, obj, ctx=None: obj['state'] not in ['new', 'draft', 'done', 'open'],
},
'kanban_state': { # kanban state: tracked, but only block subtype
'project.mt_task_blocked': lambda self, cr, uid, obj, ctx=None: obj['kanban_state'] == 'blocked',
},
}
def _get_default_partner(self, cr, uid, context=None):
""" Override of base_stage to add project specific behavior """
project_id = self._get_default_project_id(cr, uid, context)
if project_id:
project = self.pool.get('project.project').browse(cr, uid, project_id, context=context)
if project and project.partner_id:
return project.partner_id.id
return super(task, self)._get_default_partner(cr, uid, context=context)
def _get_default_project_id(self, cr, uid, context=None):
""" Gives default section by checking if present in the context """
return (self._resolve_project_id_from_context(cr, uid, context=context) or False)
def _get_default_stage_id(self, cr, uid, context=None):
""" Gives default stage_id """
project_id = self._get_default_project_id(cr, uid, context=context)
return self.stage_find(cr, uid, [], project_id, [('state', '=', 'draft')], context=context)
def _resolve_project_id_from_context(self, cr, uid, context=None):
""" Returns ID of project based on the value of 'default_project_id'
context key, or None if it cannot be resolved to a single
project.
"""
if context is None:
context = {}
if type(context.get('default_project_id')) in (int, long):
return context['default_project_id']
if isinstance(context.get('default_project_id'), basestring):
project_name = context['default_project_id']
project_ids = self.pool.get('project.project').name_search(cr, uid, name=project_name, context=context)
if len(project_ids) == 1:
return project_ids[0][0]
return None
def _read_group_stage_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None):
stage_obj = self.pool.get('project.task.type')
order = stage_obj._order
access_rights_uid = access_rights_uid or uid
if read_group_order == 'stage_id desc':
order = '%s desc' % order
search_domain = []
project_id = self._resolve_project_id_from_context(cr, uid, context=context)
if project_id:
search_domain += ['|', ('project_ids', '=', project_id)]
search_domain += [('id', 'in', ids)]
stage_ids = stage_obj._search(cr, uid, search_domain, order=order, access_rights_uid=access_rights_uid, context=context)
result = stage_obj.name_get(cr, access_rights_uid, stage_ids, context=context)
# restore order of the search
result.sort(lambda x,y: cmp(stage_ids.index(x[0]), stage_ids.index(y[0])))
fold = {}
for stage in stage_obj.browse(cr, access_rights_uid, stage_ids, context=context):
fold[stage.id] = stage.fold or False
return result, fold
def _read_group_user_id(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None):
res_users = self.pool.get('res.users')
project_id = self._resolve_project_id_from_context(cr, uid, context=context)
access_rights_uid = access_rights_uid or uid
if project_id:
ids += self.pool.get('project.project').read(cr, access_rights_uid, project_id, ['members'], context=context)['members']
order = res_users._order
# lame way to allow reverting search, should just work in the trivial case
if read_group_order == 'user_id desc':
order = '%s desc' % order
# de-duplicate and apply search order
ids = res_users._search(cr, uid, [('id','in',ids)], order=order, access_rights_uid=access_rights_uid, context=context)
result = res_users.name_get(cr, access_rights_uid, ids, context=context)
# restore order of the search
result.sort(lambda x,y: cmp(ids.index(x[0]), ids.index(y[0])))
return result, {}
_group_by_full = {
'stage_id': _read_group_stage_ids,
'user_id': _read_group_user_id,
}
def _str_get(self, task, level=0, border='***', context=None):
return border+' '+(task.user_id and task.user_id.name.upper() or '')+(level and (': L'+str(level)) or '')+(' - %.1fh / %.1fh'%(task.effective_hours or 0.0,task.planned_hours))+' '+border+'\n'+ \
border[0]+' '+(task.name or '')+'\n'+ \
(task.description or '')+'\n\n'
# Compute: effective_hours, total_hours, progress
def _hours_get(self, cr, uid, ids, field_names, args, context=None):
res = {}
cr.execute("SELECT task_id, COALESCE(SUM(hours),0) FROM project_task_work WHERE task_id IN %s GROUP BY task_id",(tuple(ids),))
hours = dict(cr.fetchall())
for task in self.browse(cr, uid, ids, context=context):
res[task.id] = {'effective_hours': hours.get(task.id, 0.0), 'total_hours': (task.remaining_hours or 0.0) + hours.get(task.id, 0.0)}
res[task.id]['delay_hours'] = res[task.id]['total_hours'] - task.planned_hours
res[task.id]['progress'] = 0.0
if (task.remaining_hours + hours.get(task.id, 0.0)):
res[task.id]['progress'] = round(min(100.0 * hours.get(task.id, 0.0) / res[task.id]['total_hours'], 99.99),2)
if task.state in ('done','cancelled'):
res[task.id]['progress'] = 100.0
return res
def onchange_remaining(self, cr, uid, ids, remaining=0.0, planned=0.0):
if remaining and not planned:
return {'value':{'planned_hours': remaining}}
return {}
def onchange_planned(self, cr, uid, ids, planned=0.0, effective=0.0):
return {'value':{'remaining_hours': planned - effective}}
def onchange_project(self, cr, uid, id, project_id, context=None):
if project_id:
project = self.pool.get('project.project').browse(cr, uid, project_id, context=context)
if project and project.partner_id:
return {'value': {'partner_id': project.partner_id.id}}
return {}
def duplicate_task(self, cr, uid, map_ids, context=None):
for new in map_ids.values():
task = self.browse(cr, uid, new, context)
child_ids = [ ch.id for ch in task.child_ids]
if task.child_ids:
for child in task.child_ids:
if child.id in map_ids.keys():
child_ids.remove(child.id)
child_ids.append(map_ids[child.id])
parent_ids = [ ch.id for ch in task.parent_ids]
if task.parent_ids:
for parent in task.parent_ids:
if parent.id in map_ids.keys():
parent_ids.remove(parent.id)
parent_ids.append(map_ids[parent.id])
#FIXME why there is already the copy and the old one
self.write(cr, uid, new, {'parent_ids':[(6,0,set(parent_ids))], 'child_ids':[(6,0, set(child_ids))]})
def copy_data(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
default = default or {}
default.update({'work_ids':[], 'date_start': False, 'date_end': False, 'date_deadline': False})
if not default.get('remaining_hours', False):
default['remaining_hours'] = float(self.read(cr, uid, id, ['planned_hours'])['planned_hours'])
default['active'] = True
if not default.get('name', False):
default['name'] = self.browse(cr, uid, id, context=context).name or ''
if not context.get('copy',False):
new_name = _("%s (copy)") % (default.get('name', ''))
default.update({'name':new_name})
return super(task, self).copy_data(cr, uid, id, default, context)
def _is_template(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for task in self.browse(cr, uid, ids, context=context):
res[task.id] = True
if task.project_id:
if task.project_id.active == False or task.project_id.state == 'template':
res[task.id] = False
return res
def _get_task(self, cr, uid, ids, context=None):
result = {}
for work in self.pool.get('project.task.work').browse(cr, uid, ids, context=context):
if work.task_id: result[work.task_id.id] = True
return result.keys()
_columns = {
'active': fields.function(_is_template, store=True, string='Not a Template Task', type='boolean', help="This field is computed automatically and have the same behavior than the boolean 'active' field: if the task is linked to a template or unactivated project, it will be hidden unless specifically asked."),
'name': fields.char('Task Summary', size=128, required=True, select=True),
'description': fields.text('Description'),
'priority': fields.selection([('4','Very Low'), ('3','Low'), ('2','Medium'), ('1','Important'), ('0','Very important')], 'Priority', select=True),
'sequence': fields.integer('Sequence', select=True, help="Gives the sequence order when displaying a list of tasks."),
'stage_id': fields.many2one('project.task.type', 'Stage', track_visibility='onchange',
domain="['&', ('fold', '=', False), ('project_ids', '=', project_id)]"),
'state': fields.related('stage_id', 'state', type="selection", store=True,
selection=_TASK_STATE, string="Status", readonly=True,
help='The status is set to \'Draft\', when a case is created.\
If the case is in progress the status is set to \'Open\'.\
When the case is over, the status is set to \'Done\'.\
If the case needs to be reviewed then the status is \
set to \'Pending\'.'),
'categ_ids': fields.many2many('project.category', string='Tags'),
'kanban_state': fields.selection([('normal', 'Normal'),('blocked', 'Blocked'),('done', 'Ready for next stage')], 'Kanban State',
track_visibility='onchange',
help="A task's kanban state indicates special situations affecting it:\n"
" * Normal is the default situation\n"
" * Blocked indicates something is preventing the progress of this task\n"
" * Ready for next stage indicates the task is ready to be pulled to the next stage",
readonly=True, required=False),
'create_date': fields.datetime('Create Date', readonly=True, select=True),
'write_date': fields.datetime('Last Modification Date', readonly=True, select=True), #not displayed in the view but it might be useful with base_action_rule module (and it needs to be defined first for that)
'date_start': fields.datetime('Starting Date',select=True),
'date_end': fields.datetime('Ending Date',select=True),
'date_deadline': fields.date('Deadline',select=True),
'project_id': fields.many2one('project.project', 'Project', ondelete='set null', select="1", track_visibility='onchange'),
'parent_ids': fields.many2many('project.task', 'project_task_parent_rel', 'task_id', 'parent_id', 'Parent Tasks'),
'child_ids': fields.many2many('project.task', 'project_task_parent_rel', 'parent_id', 'task_id', 'Delegated Tasks'),
'notes': fields.text('Notes'),
'planned_hours': fields.float('Initially Planned Hours', help='Estimated time to do the task, usually set by the project manager when the task is in draft state.'),
'effective_hours': fields.function(_hours_get, string='Hours Spent', multi='hours', help="Computed using the sum of the task work done.",
store = {
'project.task': (lambda self, cr, uid, ids, c={}: ids, ['work_ids', 'remaining_hours', 'planned_hours'], 10),
'project.task.work': (_get_task, ['hours'], 10),
}),
'remaining_hours': fields.float('Remaining Hours', digits=(16,2), help="Total remaining time, can be re-estimated periodically by the assignee of the task."),
'total_hours': fields.function(_hours_get, string='Total', multi='hours', help="Computed as: Time Spent + Remaining Time.",
store = {
'project.task': (lambda self, cr, uid, ids, c={}: ids, ['work_ids', 'remaining_hours', 'planned_hours'], 10),
'project.task.work': (_get_task, ['hours'], 10),
}),
'progress': fields.function(_hours_get, string='Progress (%)', multi='hours', group_operator="avg", help="If the task has a progress of 99.99% you should close the task if it's finished or reevaluate the time",
store = {
'project.task': (lambda self, cr, uid, ids, c={}: ids, ['work_ids', 'remaining_hours', 'planned_hours','state'], 10),
'project.task.work': (_get_task, ['hours'], 10),
}),
'delay_hours': fields.function(_hours_get, string='Delay Hours', multi='hours', help="Computed as difference between planned hours by the project manager and the total hours of the task.",
store = {
'project.task': (lambda self, cr, uid, ids, c={}: ids, ['work_ids', 'remaining_hours', 'planned_hours'], 10),
'project.task.work': (_get_task, ['hours'], 10),
}),
'user_id': fields.many2one('res.users', 'Assigned to', track_visibility='onchange'),
'delegated_user_id': fields.related('child_ids', 'user_id', type='many2one', relation='res.users', string='Delegated To'),
'partner_id': fields.many2one('res.partner', 'Customer'),
'work_ids': fields.one2many('project.task.work', 'task_id', 'Work done'),
'manager_id': fields.related('project_id', 'analytic_account_id', 'user_id', type='many2one', relation='res.users', string='Project Manager'),
'company_id': fields.many2one('res.company', 'Company'),
'id': fields.integer('ID', readonly=True),
'color': fields.integer('Color Index'),
'user_email': fields.related('user_id', 'email', type='char', string='User Email', readonly=True),
}
_defaults = {
'stage_id': _get_default_stage_id,
'project_id': _get_default_project_id,
'kanban_state': 'normal',
'priority': '2',
'progress': 0,
'sequence': 10,
'active': True,
'user_id': lambda obj, cr, uid, ctx=None: uid,
'company_id': lambda self, cr, uid, ctx=None: self.pool.get('res.company')._company_default_get(cr, uid, 'project.task', context=ctx),
'partner_id': lambda self, cr, uid, ctx=None: self._get_default_partner(cr, uid, context=ctx),
}
_order = "priority, sequence, date_start, name, id"
def set_high_priority(self, cr, uid, ids, *args):
"""Set task priority to high
"""
return self.write(cr, uid, ids, {'priority' : '0'})
def set_normal_priority(self, cr, uid, ids, *args):
"""Set task priority to normal
"""
return self.write(cr, uid, ids, {'priority' : '2'})
def _check_recursion(self, cr, uid, ids, context=None):
for id in ids:
visited_branch = set()
visited_node = set()
res = self._check_cycle(cr, uid, id, visited_branch, visited_node, context=context)
if not res:
return False
return True
def _check_cycle(self, cr, uid, id, visited_branch, visited_node, context=None):
if id in visited_branch: #Cycle
return False
if id in visited_node: #Already tested don't work one more time for nothing
return True
visited_branch.add(id)
visited_node.add(id)
#visit child using DFS
task = self.browse(cr, uid, id, context=context)
for child in task.child_ids:
res = self._check_cycle(cr, uid, child.id, visited_branch, visited_node, context=context)
if not res:
return False
visited_branch.remove(id)
return True
def _check_dates(self, cr, uid, ids, context=None):
if context == None:
context = {}
obj_task = self.browse(cr, uid, ids[0], context=context)
start = obj_task.date_start or False
end = obj_task.date_end or False
if start and end :
if start > end:
return False
return True
_constraints = [
(_check_recursion, 'Error ! You cannot create recursive tasks.', ['parent_ids']),
(_check_dates, 'Error ! Task end-date must be greater then task start-date', ['date_start','date_end'])
]
# Override view according to the company definition
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
users_obj = self.pool.get('res.users')
if context is None: context = {}
# read uom as admin to avoid access rights issues, e.g. for portal/share users,
# this should be safe (no context passed to avoid side-effects)
obj_tm = users_obj.browse(cr, SUPERUSER_ID, uid, context=context).company_id.project_time_mode_id
tm = obj_tm and obj_tm.name or 'Hours'
res = super(task, self).fields_view_get(cr, uid, view_id, view_type, context, toolbar, submenu=submenu)
if tm in ['Hours','Hour']:
return res
eview = etree.fromstring(res['arch'])
def _check_rec(eview):
if eview.attrib.get('widget','') == 'float_time':
eview.set('widget','float')
for child in eview:
_check_rec(child)
return True
_check_rec(eview)
res['arch'] = etree.tostring(eview)
for f in res['fields']:
if 'Hours' in res['fields'][f]['string']:
res['fields'][f]['string'] = res['fields'][f]['string'].replace('Hours',tm)
return res
# ----------------------------------------
# Case management
# ----------------------------------------
def stage_find(self, cr, uid, cases, section_id, domain=[], order='sequence', context=None):
""" Override of the base.stage method
Parameter of the stage search taken from the lead:
- section_id: if set, stages must belong to this section or
be a default stage; if not set, stages must be default
stages
"""
if isinstance(cases, (int, long)):
cases = self.browse(cr, uid, cases, context=context)
# collect all section_ids
section_ids = []
if section_id:
section_ids.append(section_id)
for task in cases:
if task.project_id:
section_ids.append(task.project_id.id)
search_domain = []
if section_ids:
search_domain = [('|')] * (len(section_ids)-1)
for section_id in section_ids:
search_domain.append(('project_ids', '=', section_id))
search_domain += list(domain)
# perform search, return the first found
stage_ids = self.pool.get('project.task.type').search(cr, uid, search_domain, order=order, context=context)
if stage_ids:
return stage_ids[0]
return False
def _check_child_task(self, cr, uid, ids, context=None):
if context == None:
context = {}
tasks = self.browse(cr, uid, ids, context=context)
for task in tasks:
if task.child_ids:
for child in task.child_ids:
if child.state in ['draft', 'open', 'pending']:
raise osv.except_osv(_("Warning!"), _("Child task still open.\nPlease cancel or complete child task first."))
return True
def action_close(self, cr, uid, ids, context=None):
""" This action closes the task
"""
task_id = len(ids) and ids[0] or False
self._check_child_task(cr, uid, ids, context=context)
if not task_id: return False
return self.do_close(cr, uid, [task_id], context=context)
def do_close(self, cr, uid, ids, context=None):
""" Compatibility when changing to case_close. """
return self.case_close(cr, uid, ids, context=context)
def case_close(self, cr, uid, ids, context=None):
""" Closes Task """
if not isinstance(ids, list): ids = [ids]
for task in self.browse(cr, uid, ids, context=context):
vals = {}
project = task.project_id
for parent_id in task.parent_ids:
if parent_id.state in ('pending','draft'):
reopen = True
for child in parent_id.child_ids:
if child.id != task.id and child.state not in ('done','cancelled'):
reopen = False
if reopen:
self.do_reopen(cr, uid, [parent_id.id], context=context)
# close task
vals['remaining_hours'] = 0.0
if not task.date_end:
vals['date_end'] = fields.datetime.now()
self.case_set(cr, uid, [task.id], 'done', vals, context=context)
return True
def do_reopen(self, cr, uid, ids, context=None):
for task in self.browse(cr, uid, ids, context=context):
project = task.project_id
self.case_set(cr, uid, [task.id], 'open', {}, context=context)
return True
def do_cancel(self, cr, uid, ids, context=None):
""" Compatibility when changing to case_cancel. """
return self.case_cancel(cr, uid, ids, context=context)
def case_cancel(self, cr, uid, ids, context=None):
tasks = self.browse(cr, uid, ids, context=context)
self._check_child_task(cr, uid, ids, context=context)
for task in tasks:
self.case_set(cr, uid, [task.id], 'cancelled', {'remaining_hours': 0.0}, context=context)
return True
def do_open(self, cr, uid, ids, context=None):
""" Compatibility when changing to case_open. """
return self.case_open(cr, uid, ids, context=context)
def case_open(self, cr, uid, ids, context=None):
if not isinstance(ids,list): ids = [ids]
return self.case_set(cr, uid, ids, 'open', {'date_start': fields.datetime.now()}, context=context)
def do_draft(self, cr, uid, ids, context=None):
""" Compatibility when changing to case_draft. """
return self.case_draft(cr, uid, ids, context=context)
def case_draft(self, cr, uid, ids, context=None):
return self.case_set(cr, uid, ids, 'draft', {}, context=context)
def do_pending(self, cr, uid, ids, context=None):
""" Compatibility when changing to case_pending. """
return self.case_pending(cr, uid, ids, context=context)
def case_pending(self, cr, uid, ids, context=None):
return self.case_set(cr, uid, ids, 'pending', {}, context=context)
def _delegate_task_attachments(self, cr, uid, task_id, delegated_task_id, context=None):
attachment = self.pool.get('ir.attachment')
attachment_ids = attachment.search(cr, uid, [('res_model', '=', self._name), ('res_id', '=', task_id)], context=context)
new_attachment_ids = []
for attachment_id in attachment_ids:
new_attachment_ids.append(attachment.copy(cr, uid, attachment_id, default={'res_id': delegated_task_id}, context=context))
return new_attachment_ids
def do_delegate(self, cr, uid, ids, delegate_data=None, context=None):
"""
Delegate Task to another users.
"""
if delegate_data is None:
delegate_data = {}
assert delegate_data['user_id'], _("Delegated User should be specified")
delegated_tasks = {}
for task in self.browse(cr, uid, ids, context=context):
delegated_task_id = self.copy(cr, uid, task.id, {
'name': delegate_data['name'],
'project_id': delegate_data['project_id'] and delegate_data['project_id'][0] or False,
'user_id': delegate_data['user_id'] and delegate_data['user_id'][0] or False,
'planned_hours': delegate_data['planned_hours'] or 0.0,
'parent_ids': [(6, 0, [task.id])],
'description': delegate_data['new_task_description'] or '',
'child_ids': [],
'work_ids': []
}, context=context)
self._delegate_task_attachments(cr, uid, task.id, delegated_task_id, context=context)
newname = delegate_data['prefix'] or ''
task.write({
'remaining_hours': delegate_data['planned_hours_me'],
'planned_hours': delegate_data['planned_hours_me'] + (task.effective_hours or 0.0),
'name': newname,
}, context=context)
if delegate_data['state'] == 'pending':
self.do_pending(cr, uid, [task.id], context=context)
elif delegate_data['state'] == 'done':
self.do_close(cr, uid, [task.id], context=context)
delegated_tasks[task.id] = delegated_task_id
return delegated_tasks
def set_remaining_time(self, cr, uid, ids, remaining_time=1.0, context=None):
for task in self.browse(cr, uid, ids, context=context):
if (task.state=='draft') or (task.planned_hours==0.0):
self.write(cr, uid, [task.id], {'planned_hours': remaining_time}, context=context)
self.write(cr, uid, ids, {'remaining_hours': remaining_time}, context=context)
return True
def set_remaining_time_1(self, cr, uid, ids, context=None):
return self.set_remaining_time(cr, uid, ids, 1.0, context)
def set_remaining_time_2(self, cr, uid, ids, context=None):
return self.set_remaining_time(cr, uid, ids, 2.0, context)
def set_remaining_time_5(self, cr, uid, ids, context=None):
return self.set_remaining_time(cr, uid, ids, 5.0, context)
def set_remaining_time_10(self, cr, uid, ids, context=None):
return self.set_remaining_time(cr, uid, ids, 10.0, context)
def set_kanban_state_blocked(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'kanban_state': 'blocked'}, context=context)
def set_kanban_state_normal(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'kanban_state': 'normal'}, context=context)
def set_kanban_state_done(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'kanban_state': 'done'}, context=context)
return False
def _store_history(self, cr, uid, ids, context=None):
for task in self.browse(cr, uid, ids, context=context):
self.pool.get('project.task.history').create(cr, uid, {
'task_id': task.id,
'remaining_hours': task.remaining_hours,
'planned_hours': task.planned_hours,
'kanban_state': task.kanban_state,
'type_id': task.stage_id.id,
'state': task.state,
'user_id': task.user_id.id
}, context=context)
return True
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
if vals.get('project_id') and not context.get('default_project_id'):
context['default_project_id'] = vals.get('project_id')
# context: no_log, because subtype already handle this
create_context = dict(context, mail_create_nolog=True)
task_id = super(task, self).create(cr, uid, vals, context=create_context)
self._store_history(cr, uid, [task_id], context=context)
return task_id
# Overridden to reset the kanban_state to normal whenever
# the stage (stage_id) of the task changes.
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
if vals and not 'kanban_state' in vals and 'stage_id' in vals:
new_stage = vals.get('stage_id')
vals_reset_kstate = dict(vals, kanban_state='normal')
for t in self.browse(cr, uid, ids, context=context):
#TO FIX:Kanban view doesn't raise warning
#stages = [stage.id for stage in t.project_id.type_ids]
#if new_stage not in stages:
#raise osv.except_osv(_('Warning!'), _('Stage is not defined in the project.'))
write_vals = vals_reset_kstate if t.stage_id != new_stage else vals
super(task, self).write(cr, uid, [t.id], write_vals, context=context)
result = True
else:
result = super(task, self).write(cr, uid, ids, vals, context=context)
if ('stage_id' in vals) or ('remaining_hours' in vals) or ('user_id' in vals) or ('state' in vals) or ('kanban_state' in vals):
self._store_history(cr, uid, ids, context=context)
return result
def unlink(self, cr, uid, ids, context=None):
if context == None:
context = {}
self._check_child_task(cr, uid, ids, context=context)
res = super(task, self).unlink(cr, uid, ids, context)
return res
def _generate_task(self, cr, uid, tasks, ident=4, context=None):
context = context or {}
result = ""
ident = ' '*ident
for task in tasks:
if task.state in ('done','cancelled'):
continue
result += '''
%sdef Task_%s():
%s todo = \"%.2fH\"
%s effort = \"%.2fH\"''' % (ident,task.id, ident,task.remaining_hours, ident,task.total_hours)
start = []
for t2 in task.parent_ids:
start.append("up.Task_%s.end" % (t2.id,))
if start:
result += '''
%s start = max(%s)
''' % (ident,','.join(start))
if task.user_id:
result += '''
%s resource = %s
''' % (ident, 'User_'+str(task.user_id.id))
result += "\n"
return result
# ---------------------------------------------------
# Mail gateway
# ---------------------------------------------------
def message_get_reply_to(self, cr, uid, ids, context=None):
""" Override to get the reply_to of the parent project. """
return [task.project_id.message_get_reply_to()[0] if task.project_id else False
for task in self.browse(cr, uid, ids, context=context)]
def message_new(self, cr, uid, msg, custom_values=None, context=None):
""" Override to updates the document according to the email. """
if custom_values is None: custom_values = {}
defaults = {
'name': msg.get('subject'),
'planned_hours': 0.0,
}
defaults.update(custom_values)
return super(task,self).message_new(cr, uid, msg, custom_values=defaults, context=context)
def message_update(self, cr, uid, ids, msg, update_vals=None, context=None):
""" Override to update the task according to the email. """
if update_vals is None: update_vals = {}
act = False
maps = {
'cost':'planned_hours',
}
for line in msg['body'].split('\n'):
line = line.strip()
res = tools.command_re.match(line)
if res:
match = res.group(1).lower()
field = maps.get(match)
if field:
try:
update_vals[field] = float(res.group(2).lower())
except (ValueError, TypeError):
pass
elif match.lower() == 'state' \
and res.group(2).lower() in ['cancel','close','draft','open','pending']:
act = 'do_%s' % res.group(2).lower()
if act:
getattr(self,act)(cr, uid, ids, context=context)
return super(task,self).message_update(cr, uid, ids, msg, update_vals=update_vals, context=context)
def project_task_reevaluate(self, cr, uid, ids, context=None):
if self.pool.get('res.users').has_group(cr, uid, 'project.group_time_work_estimation_tasks'):
return {
'view_type': 'form',
"view_mode": 'form',
'res_model': 'project.task.reevaluate',
'type': 'ir.actions.act_window',
'target': 'new',
}
return self.do_reopen(cr, uid, ids, context=context)
class project_work(osv.osv):
_name = "project.task.work"
_description = "Project Task Work"
_columns = {
'name': fields.char('Work summary', size=128),
'date': fields.datetime('Date', select="1"),
'task_id': fields.many2one('project.task', 'Task', ondelete='cascade', required=True, select="1"),
'hours': fields.float('Time Spent'),
'user_id': fields.many2one('res.users', 'Done by', required=True, select="1"),
'company_id': fields.related('task_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True)
}
_defaults = {
'user_id': lambda obj, cr, uid, context: uid,
'date': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S')
}
_order = "date desc"
def create(self, cr, uid, vals, *args, **kwargs):
if 'hours' in vals and (not vals['hours']):
vals['hours'] = 0.00
if 'task_id' in vals:
cr.execute('update project_task set remaining_hours=remaining_hours - %s where id=%s', (vals.get('hours',0.0), vals['task_id']))
return super(project_work,self).create(cr, uid, vals, *args, **kwargs)
def write(self, cr, uid, ids, vals, context=None):
if 'hours' in vals and (not vals['hours']):
vals['hours'] = 0.00
if 'hours' in vals:
for work in self.browse(cr, uid, ids, context=context):
cr.execute('update project_task set remaining_hours=remaining_hours - %s + (%s) where id=%s', (vals.get('hours',0.0), work.hours, work.task_id.id))
return super(project_work,self).write(cr, uid, ids, vals, context)
def unlink(self, cr, uid, ids, *args, **kwargs):
for work in self.browse(cr, uid, ids):
cr.execute('update project_task set remaining_hours=remaining_hours + %s where id=%s', (work.hours, work.task_id.id))
return super(project_work,self).unlink(cr, uid, ids,*args, **kwargs)
class account_analytic_account(osv.osv):
_inherit = 'account.analytic.account'
_description = 'Analytic Account'
_columns = {
'use_tasks': fields.boolean('Tasks',help="If checked, this contract will be available in the project menu and you will be able to manage tasks or track issues"),
'company_uom_id': fields.related('company_id', 'project_time_mode_id', type='many2one', relation='product.uom'),
}
def on_change_template(self, cr, uid, ids, template_id, context=None):
res = super(account_analytic_account, self).on_change_template(cr, uid, ids, template_id, context=context)
if template_id and 'value' in res:
template = self.browse(cr, uid, template_id, context=context)
res['value']['use_tasks'] = template.use_tasks
return res
def _trigger_project_creation(self, cr, uid, vals, context=None):
'''
This function is used to decide if a project needs to be automatically created or not when an analytic account is created. It returns True if it needs to be so, False otherwise.
'''
if context is None: context = {}
return vals.get('use_tasks') and not 'project_creation_in_progress' in context
def project_create(self, cr, uid, analytic_account_id, vals, context=None):
'''
This function is called at the time of analytic account creation and is used to create a project automatically linked to it if the conditions are meet.
'''
project_pool = self.pool.get('project.project')
project_id = project_pool.search(cr, uid, [('analytic_account_id','=', analytic_account_id)])
if not project_id and self._trigger_project_creation(cr, uid, vals, context=context):
project_values = {
'name': vals.get('name'),
'analytic_account_id': analytic_account_id,
'type': vals.get('type','contract'),
}
return project_pool.create(cr, uid, project_values, context=context)
return False
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
if vals.get('child_ids', False) and context.get('analytic_project_copy', False):
vals['child_ids'] = []
analytic_account_id = super(account_analytic_account, self).create(cr, uid, vals, context=context)
self.project_create(cr, uid, analytic_account_id, vals, context=context)
return analytic_account_id
def write(self, cr, uid, ids, vals, context=None):
vals_for_project = vals.copy()
for account in self.browse(cr, uid, ids, context=context):
if not vals.get('name'):
vals_for_project['name'] = account.name
if not vals.get('type'):
vals_for_project['type'] = account.type
self.project_create(cr, uid, account.id, vals_for_project, context=context)
return super(account_analytic_account, self).write(cr, uid, ids, vals, context=context)
def unlink(self, cr, uid, ids, *args, **kwargs):
project_obj = self.pool.get('project.project')
analytic_ids = project_obj.search(cr, uid, [('analytic_account_id','in',ids)])
if analytic_ids:
raise osv.except_osv(_('Warning!'), _('Please delete the project linked with this account first.'))
return super(account_analytic_account, self).unlink(cr, uid, ids, *args, **kwargs)
class project_project(osv.osv):
_inherit = 'project.project'
_defaults = {
'use_tasks': True
}
class project_task_history(osv.osv):
"""
Tasks History, used for cumulative flow charts (Lean/Agile)
"""
_name = 'project.task.history'
_description = 'History of Tasks'
_rec_name = 'task_id'
_log_access = False
def _get_date(self, cr, uid, ids, name, arg, context=None):
result = {}
for history in self.browse(cr, uid, ids, context=context):
if history.state in ('done','cancelled'):
result[history.id] = history.date
continue
cr.execute('''select
date
from
project_task_history
where
task_id=%s and
id>%s
order by id limit 1''', (history.task_id.id, history.id))
res = cr.fetchone()
result[history.id] = res and res[0] or False
return result
def _get_related_date(self, cr, uid, ids, context=None):
result = []
for history in self.browse(cr, uid, ids, context=context):
cr.execute('''select
id
from
project_task_history
where
task_id=%s and
id<%s
order by id desc limit 1''', (history.task_id.id, history.id))
res = cr.fetchone()
if res:
result.append(res[0])
return result
_columns = {
'task_id': fields.many2one('project.task', 'Task', ondelete='cascade', required=True, select=True),
'type_id': fields.many2one('project.task.type', 'Stage'),
'state': fields.selection([('draft', 'New'), ('cancelled', 'Cancelled'),('open', 'In Progress'),('pending', 'Pending'), ('done', 'Done')], 'Status'),
'kanban_state': fields.selection([('normal', 'Normal'),('blocked', 'Blocked'),('done', 'Ready for next stage')], 'Kanban State', required=False),
'date': fields.date('Date', select=True),
'end_date': fields.function(_get_date, string='End Date', type="date", store={
'project.task.history': (_get_related_date, None, 20)
}),
'remaining_hours': fields.float('Remaining Time', digits=(16,2)),
'planned_hours': fields.float('Planned Time', digits=(16,2)),
'user_id': fields.many2one('res.users', 'Responsible'),
}
_defaults = {
'date': fields.date.context_today,
}
class project_task_history_cumulative(osv.osv):
_name = 'project.task.history.cumulative'
_table = 'project_task_history_cumulative'
_inherit = 'project.task.history'
_auto = False
_columns = {
'end_date': fields.date('End Date'),
'project_id': fields.many2one('project.project', 'Project'),
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'project_task_history_cumulative')
cr.execute(""" CREATE VIEW project_task_history_cumulative AS (
SELECT
history.date::varchar||'-'||history.history_id::varchar AS id,
history.date AS end_date,
*
FROM (
SELECT
h.id AS history_id,
h.date+generate_series(0, CAST((coalesce(h.end_date, DATE 'tomorrow')::date - h.date) AS integer)-1) AS date,
h.task_id, h.type_id, h.user_id, h.kanban_state, h.state,
greatest(h.remaining_hours, 1) AS remaining_hours, greatest(h.planned_hours, 1) AS planned_hours,
t.project_id
FROM
project_task_history AS h
JOIN project_task AS t ON (h.task_id = t.id)
) AS history
)
""")
class project_category(osv.osv):
""" Category of project's task (or issue) """
_name = "project.category"
_description = "Category of project's task, issue, ..."
_columns = {
'name': fields.char('Name', size=64, required=True, translate=True),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
will-hart/twitter_sentiment
|
refs/heads/master
|
classifier/run_test_classifier.py
|
1
|
from tweet_classifier import TweetClassifier as TC
def run_test(val, expected):
print "{0} (exp {1}) >> {2}".format(t.classify(val), expected, val)
# create the classifier
t = TC()
# Start by gathering some data.
print "Gathering data..."
t.fetch_data()
# train the classifier
print "Training the classifier..."
t.train("train_data.txt")
# test the classifier
print "Testing the classifier..."
tested = 0
correct = 0
with open('test_data.txt', 'r') as f:
for line in f.readlines():
tested += 1
line = line[:-1]
if t.classify(line[:-1]) == int(line[-1]):
correct += 1
print "Tested {0} tweets, got {1} correct ({2:.0%})".format(tested, correct, correct/float(tested))
|
jumpstarter-io/nova
|
refs/heads/master
|
nova/db/sqlalchemy/migrate_repo/manage.py
|
131
|
#!/usr/bin/env python
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate.versioning.shell import main
if __name__ == '__main__':
main(debug='False', repository='.')
|
jackie6chang/aits
|
refs/heads/master
|
bs2grproxy/bs2grpadmin.py
|
1
|
# BS2 GAE Reverse Proxy
# Russell <yufeiwu@gmail.com>
# Please use wisely
from bs2grpfile import *
from bs2grpconfig import *
from google.appengine.api import users
import webapp2
from google.appengine.api import memcache
import datetime
def redirect(response, url):
response.set_status(307)
response.headers['Location'] = url
def user_check(response):
user = users.get_current_user()
if user and users.is_current_user_admin():
return True
else:
redirect(response, users.create_login_url(BS2GRPAdmin.BASE_URL))
return False
class BS2GRPAdmin(webapp2.RequestHandler):
BASE_URL = r'/_bs2admin/'
def get(self):
if not user_check(self.response):
return
config = BS2GRPConfig.get_config()
f = [users.create_logout_url("/bs2grpabout/")]
f.append(BS2GRPAdminAction.BASE_URL)
f.append(config.target_host)
st_stat = []
st_stat.append(BS2GRPFile.all().filter('status_code >=', 100).filter('status_code <', 200).count())
st_stat.append(BS2GRPFile.all().filter('status_code =', 200).count())
st_stat.append(BS2GRPFile.all().filter('status_code >=', 300).filter('status_code <', 400).count())
st_stat.append(BS2GRPFile.all().filter('status_code >=', 400).filter('status_code <', 500).count())
st_stat.append(BS2GRPFile.all().filter('status_code >=', 500).count())
st_stat.append(reduce(lambda x,y: x+y, st_stat, 0))
f.extend(st_stat)
f.append(BS2GRPAdminAction.BASE_URL)
f.append(BS2GRPAdminAction.BASE_URL)
f.append(BS2GRPAdminAction.BASE_URL)
f = tuple(f)
self.response.set_status(200)
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write(
"""
<html>
<head>
<style>
h1{color:#fefe5c;}
body {font-family: Arial, "Microsoft Yahei", simsun; background-color:#000;color:#ddd;font-size:16px;}
a, a:visited {font-size:12px;color:#fff;padding-left:15px;}
a:hover {color:red;text-decoration:none;}
</style>
</head>
<body>
<center>
<table>
<tr><td width='550px'>
<h1>BS2 GAE Reverse Proxy Admin</h1>
</td><td width='100px'><a href='%s'>Log out</a></td></tr>
</table>
<form action='%s' method='get'>
<table>
<tr><td>Target Host: <input type='text' name='th' value='%s' /></td><td><input type='submit' value='Update' /></td></tr>
</table>
</form>
<p>Cached files:
<table>
<tr><td width='200px'>1xx Status</td><td width='100px'>%d</td></tr>
<tr><td>2xx Status</td><td>%d</td></tr>
<tr><td>3xx Status</td><td>%d</td></tr>
<tr><td>4xx Status</td><td>%d</td></tr>
<tr><td>>=5xx Status</td><td>%d</td></tr>
<tr><td>Total</td><td>%d</td></tr>
</table>
</p>
<table>
<tr><td nowrap width='100px'><a href='%s?fr=1'>Force Check</a></td><td>Force check cache on next client request regardless of CACHE_CHECK configuration</td></tr>
<tr><td nowrap><a href='%s?clear=1'>Clear All</a></td><td>Clear all cached files</td></tr>
<tr><td colspan=2><hr></td></tr>
<tr><td nowrap><a href='%s?rc=1'>Refresh Config</a></td><td>Clear the entity in memcache to refresh the config</td></tr>
</table>
</center>
</body>
</html>
""" % f)
class BS2GRPAdminAction(webapp2.RequestHandler):
BASE_URL = r'/_bs2adminaction/'
def get(self):
if not user_check(self.response):
return
body = ""
force_check = self.request.get('fr')
force_clear = self.request.get('clear')
refresh_config = self.request.get('rc')
target_host = self.request.get('th')
if force_check:
md = datetime.datetime.min
ret = BS2GRPFile.all().filter('last_check >', md).fetch(1000)
count = 0
while ret:
for i in ret:
i.last_check = md
i.put()
count += len(ret)
ret = BS2GRPFile.all().filter('last_check >', md).fetch(1000)
body += "%d files are processed." % count
if force_clear:
ret = BS2GRPFile.all().fetch(1000)
count = 0
while ret:
for i in ret:
i.delete()
count += len(ret)
ret = BS2GRPFile.all().fetch(1000)
body += "%d files are deleted." % count
if target_host:
config = BS2GRPConfig.get_config()
config.target_host = target_host
config.put()
refresh_config = True
body += "Target host is set to %s." % target_host
if refresh_config:
try:
memcache.delete('www')
body += "Config is refreshed."
except Exception, e:
body += "Config is not refreshed. Error happened: " + str(e)
self.response.set_status(200)
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write(
"""
<html>
<head>
<style>
h1{color:#fefe5c;}
body {font-family: Arial, "Microsoft Yahei", simsun; background-color:#000;color:#ddd;font-size:16px;}
a, a:visited {font-size:12px;color:#fff;padding-left:15px;}
a:hover {color:red;text-decoration:none;}
</style>
</head>
<body>
<center>
<h1>BS2 GAE Reverse Proxy Admin</h1>
<p><b>Action Result:</b></p>
<p>%s</p>
<p><a href="%s"><< Back</a></p>
</body>
</html>
""" % (body, BS2GRPAdmin.BASE_URL))
|
YUPlayGodDev/platform_external_skia
|
refs/heads/android-6.0.0
|
tools/skp/page_sets/skia_css3gradients_desktop.py
|
33
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
page_set=page_set,
credentials_path='data/credentials.json')
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/skia_css3gradients_desktop.json'
def RunSmoothness(self, action_runner):
action_runner.ScrollElement()
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(15)
class SkiaCss3gradientsDesktopPageSet(page_set_module.PageSet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaCss3gradientsDesktopPageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/skia_css3gradients_desktop.json')
urls_list = [
# Why: http://code.google.com/p/chromium/issues/detail?id=168448
'https://www.webkit.org/blog/1424/css3-gradients/',
]
for url in urls_list:
self.AddUserStory(SkiaBuildbotDesktopPage(url, self))
|
marcoitur/Freecad_test
|
refs/heads/master
|
src/Mod/TemplatePyMod/PythonQt.py
|
56
|
"""
Examples for customizing the FreeCAD application with PySide facilities.
(c) 2007 Werner Mayer LGPL
"""
__author__ = "Werner Mayer <werner.wm.mayer@gmx.de>"
from PySide import QtCore,QtGui
import FreeCAD,FreeCADGui, __main__
class MainWindow:
def __init__(self):
self.app = QtGui.qApp
self.mw = FreeCADGui.getMainWindow()
self.dock = {}
def setWindowTitle(self, name):
self.mw.setWindowTitle(name)
def addCalendar(self):
d = QtGui.QDockWidget()
d.setWindowTitle("Calendar")
c = QtGui.QCalendarWidget()
d.setWidget(c)
self.mw.addDockWidget(QtCore.Qt.RightDockWidgetArea,d)
self.dock[d] = c
def information(self, title, text):
QtGui.QMessageBox.information(self.mw, title, text)
def warning(self, title, text):
QtGui.QMessageBox.warning(self.mw, title, text)
def critical(self, title, text):
QtGui.QMessageBox.critical(self.mw, title, text)
def question(self, title, text):
QtGui.QMessageBox.question(self.mw, title, text)
def aboutQt(self):
QtGui.QMessageBox.aboutQt(self.mw, self.mw.tr("About Qt"))
class PythonQtWorkbench (__main__.Workbench):
"Python Qt workbench object"
Icon = "python"
MenuText = "PySide sandbox"
ToolTip = "Python Qt workbench"
def __init__(self):
self.mw = FreeCADGui.getMainWindow()
self.dock = {}
self.item = []
def information(self):
QtGui.QMessageBox.information(self.mw, "Info", "This is an information")
def warning(self):
QtGui.QMessageBox.warning(self.mw, "Warning", "This is a warning")
def critical(self):
QtGui.QMessageBox.critical(self.mw, "Error", "This is an error")
def Initialize(self):
self.menu = QtGui.QMenu()
self.menu.setTitle("Python Qt")
self.item.append(self.menu.addAction("Test 1"))
self.item.append(self.menu.addAction("Test 2"))
self.item.append(self.menu.addAction("Test 3"))
QtCore.QObject.connect(self.item[0], QtCore.SIGNAL("triggered()"), self.information)
QtCore.QObject.connect(self.item[1], QtCore.SIGNAL("triggered()"), self.warning)
QtCore.QObject.connect(self.item[2], QtCore.SIGNAL("triggered()"), self.critical)
def Activated(self):
self.__title__ = self.mw.windowTitle()
self.mw.setWindowTitle("FreeCAD -- PythonQt")
d = QtGui.QDockWidget()
d.setWindowTitle("Calendar")
c = QtGui.QCalendarWidget()
d.setWidget(c)
self.mw.addDockWidget(QtCore.Qt.RightDockWidgetArea,d)
self.dock[d] = c
bar = self.mw.menuBar()
a=bar.actions()
for i in a:
if i.objectName() == "&Windows":
break
bar.insertMenu(i, self.menu)
self.menu.setTitle("Python Qt")
self.menu.menuAction().setVisible(True)
def Deactivated(self):
self.mw.setWindowTitle(self.__title__)
self.dock.clear()
FreeCADGui.addWorkbench(PythonQtWorkbench)
|
timothsp/where2ate
|
refs/heads/master
|
venv/lib/python3.3/site-packages/pip/req/req_file.py
|
239
|
"""
Requirements file parsing
"""
from __future__ import absolute_import
import os
import re
import shlex
import optparse
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves import filterfalse
import pip
from pip.download import get_file_content
from pip.req.req_install import InstallRequirement
from pip.exceptions import (RequirementsFileParseError)
from pip.utils import normalize_name
from pip import cmdoptions
__all__ = ['parse_requirements']
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
COMMENT_RE = re.compile(r'(^|\s)+#.*$')
SUPPORTED_OPTIONS = [
cmdoptions.constraints,
cmdoptions.editable,
cmdoptions.requirements,
cmdoptions.no_index,
cmdoptions.index_url,
cmdoptions.find_links,
cmdoptions.extra_index_url,
cmdoptions.allow_external,
cmdoptions.allow_all_external,
cmdoptions.no_allow_external,
cmdoptions.allow_unsafe,
cmdoptions.no_allow_unsafe,
cmdoptions.use_wheel,
cmdoptions.no_use_wheel,
cmdoptions.always_unzip,
cmdoptions.no_binary,
cmdoptions.only_binary,
]
# options to be passed to requirements
SUPPORTED_OPTIONS_REQ = [
cmdoptions.install_options,
cmdoptions.global_options
]
# the 'dest' string values
SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ]
def parse_requirements(filename, finder=None, comes_from=None, options=None,
session=None, constraint=False, wheel_cache=None):
"""Parse a requirements file and yield InstallRequirement instances.
:param filename: Path or url of requirements file.
:param finder: Instance of pip.index.PackageFinder.
:param comes_from: Origin description of requirements.
:param options: Global options.
:param session: Instance of pip.download.PipSession.
:param constraint: If true, parsing a constraint file rather than
requirements file.
:param wheel_cache: Instance of pip.wheel.WheelCache
"""
if session is None:
raise TypeError(
"parse_requirements() missing 1 required keyword argument: "
"'session'"
)
_, content = get_file_content(
filename, comes_from=comes_from, session=session
)
lines = content.splitlines()
lines = ignore_comments(lines)
lines = join_lines(lines)
lines = skip_regex(lines, options)
for line_number, line in enumerate(lines, 1):
req_iter = process_line(line, filename, line_number, finder,
comes_from, options, session, wheel_cache,
constraint=constraint)
for req in req_iter:
yield req
def process_line(line, filename, line_number, finder=None, comes_from=None,
options=None, session=None, wheel_cache=None,
constraint=False):
"""Process a single requirements line; This can result in creating/yielding
requirements, or updating the finder.
For lines that contain requirements, the only options that have an effect
are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
requirement. Other options from SUPPORTED_OPTIONS may be present, but are
ignored.
For lines that do not contain requirements, the only options that have an
effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
be present, but are ignored. These lines may contain multiple options
(although our docs imply only one is supported), and all our parsed and
affect the finder.
:param constraint: If True, parsing a constraints file.
"""
parser = build_parser()
defaults = parser.get_default_values()
defaults.index_url = None
if finder:
# `finder.format_control` will be updated during parsing
defaults.format_control = finder.format_control
args_str, options_str = break_args_options(line)
opts, _ = parser.parse_args(shlex.split(options_str), defaults)
# preserve for the nested code path
line_comes_from = '%s %s (line %s)' % (
'-c' if constraint else '-r', filename, line_number)
# yield a line requirement
if args_str:
isolated = options.isolated_mode if options else False
if options:
cmdoptions.check_install_build_global(options, opts)
# get the options that apply to requirements
req_options = {}
for dest in SUPPORTED_OPTIONS_REQ_DEST:
if dest in opts.__dict__ and opts.__dict__[dest]:
req_options[dest] = opts.__dict__[dest]
yield InstallRequirement.from_line(
args_str, line_comes_from, constraint=constraint,
isolated=isolated, options=req_options, wheel_cache=wheel_cache
)
# yield an editable requirement
elif opts.editables:
isolated = options.isolated_mode if options else False
default_vcs = options.default_vcs if options else None
yield InstallRequirement.from_editable(
opts.editables[0], comes_from=line_comes_from,
constraint=constraint, default_vcs=default_vcs, isolated=isolated,
wheel_cache=wheel_cache
)
# parse a nested requirements file
elif opts.requirements or opts.constraints:
if opts.requirements:
req_path = opts.requirements[0]
nested_constraint = False
else:
req_path = opts.constraints[0]
nested_constraint = True
# original file is over http
if SCHEME_RE.search(filename):
# do a url join so relative paths work
req_path = urllib_parse.urljoin(filename, req_path)
# original file and nested file are paths
elif not SCHEME_RE.search(req_path):
# do a join so relative paths work
req_dir = os.path.dirname(filename)
req_path = os.path.join(os.path.dirname(filename), req_path)
# TODO: Why not use `comes_from='-r {} (line {})'` here as well?
parser = parse_requirements(
req_path, finder, comes_from, options, session,
constraint=nested_constraint, wheel_cache=wheel_cache
)
for req in parser:
yield req
# set finder options
elif finder:
if opts.index_url:
finder.index_urls = [opts.index_url]
if opts.use_wheel is False:
finder.use_wheel = False
pip.index.fmt_ctl_no_use_wheel(finder.format_control)
if opts.no_index is True:
finder.index_urls = []
if opts.allow_all_external:
finder.allow_all_external = opts.allow_all_external
if opts.extra_index_urls:
finder.index_urls.extend(opts.extra_index_urls)
if opts.allow_external:
finder.allow_external |= set(
[normalize_name(v).lower() for v in opts.allow_external])
if opts.allow_unverified:
# Remove after 7.0
finder.allow_unverified |= set(
[normalize_name(v).lower() for v in opts.allow_unverified])
if opts.find_links:
# FIXME: it would be nice to keep track of the source
# of the find_links: support a find-links local path
# relative to a requirements file.
value = opts.find_links[0]
req_dir = os.path.dirname(os.path.abspath(filename))
relative_to_reqs_file = os.path.join(req_dir, value)
if os.path.exists(relative_to_reqs_file):
value = relative_to_reqs_file
finder.find_links.append(value)
def break_args_options(line):
"""Break up the line into an args and options string. We only want to shlex
(and then optparse) the options, not the args. args can contain markers
which are corrupted by shlex.
"""
tokens = line.split(' ')
args = []
options = tokens[:]
for token in tokens:
if token.startswith('-') or token.startswith('--'):
break
else:
args.append(token)
options.pop(0)
return ' '.join(args), ' '.join(options)
def build_parser():
"""
Return a parser for parsing requirement lines
"""
parser = optparse.OptionParser(add_help_option=False)
option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
for option_factory in option_factories:
option = option_factory()
parser.add_option(option)
# By default optparse sys.exits on parsing errors. We want to wrap
# that in our own exception.
def parser_exit(self, msg):
raise RequirementsFileParseError(msg)
parser.exit = parser_exit
return parser
def join_lines(iterator):
"""
Joins a line ending in '\' with the previous line.
"""
lines = []
for line in iterator:
if not line.endswith('\\'):
if lines:
lines.append(line)
yield ''.join(lines)
lines = []
else:
yield line
else:
lines.append(line.strip('\\'))
# TODO: handle space after '\'.
# TODO: handle '\' on last line.
def ignore_comments(iterator):
"""
Strips and filters empty or commented lines.
"""
for line in iterator:
line = COMMENT_RE.sub('', line)
line = line.strip()
if line:
yield line
def skip_regex(lines, options):
"""
Optionally exclude lines that match '--skip-requirements-regex'
"""
skip_regex = options.skip_requirements_regex if options else None
if skip_regex:
lines = filterfalse(re.compile(skip_regex).search, lines)
return lines
|
mitchellrj/touchdown
|
refs/heads/master
|
touchdown/tests/__init__.py
|
2
|
# Copyright 2014 Isotoma Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
acsone/account-financial-tools
|
refs/heads/8.0
|
account_move_line_search_extension/models/res_partner.py
|
4
|
# -*- coding: utf-8 -*-
# Copyright 2009-2016 Noviat.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import models
class ResPartner(models.Model):
_inherit = 'res.partner'
def search(self, cr, uid, args,
offset=0, limit=None, order=None, context=None, count=False):
if context and 'account_move_line_search_extension' in context:
args.extend(['|', ('active', '=', False), ('active', '=', True)])
return super(ResPartner, self).search(
cr, uid, args, offset=offset, limit=limit, order=order,
context=context, count=count)
|
rubyinhell/brython
|
refs/heads/master
|
www/src/Lib/test/test_timeit.py
|
107
|
import timeit
import unittest
import sys
import io
import time
from textwrap import dedent
from test.support import run_unittest
from test.support import captured_stdout
from test.support import captured_stderr
# timeit's default number of iterations.
DEFAULT_NUMBER = 1000000
# timeit's default number of repetitions.
DEFAULT_REPEAT = 3
# XXX: some tests are commented out that would improve the coverage but take a
# long time to run because they test the default number of loops, which is
# large. The tests could be enabled if there was a way to override the default
# number of loops during testing, but this would require changing the signature
# of some functions that use the default as a default argument.
class FakeTimer:
BASE_TIME = 42.0
def __init__(self, seconds_per_increment=1.0):
self.count = 0
self.setup_calls = 0
self.seconds_per_increment=seconds_per_increment
timeit._fake_timer = self
def __call__(self):
return self.BASE_TIME + self.count * self.seconds_per_increment
def inc(self):
self.count += 1
def setup(self):
self.setup_calls += 1
def wrap_timer(self, timer):
"""Records 'timer' and returns self as callable timer."""
self.saved_timer = timer
return self
class TestTimeit(unittest.TestCase):
def tearDown(self):
try:
del timeit._fake_timer
except AttributeError:
pass
def test_reindent_empty(self):
self.assertEqual(timeit.reindent("", 0), "")
self.assertEqual(timeit.reindent("", 4), "")
def test_reindent_single(self):
self.assertEqual(timeit.reindent("pass", 0), "pass")
self.assertEqual(timeit.reindent("pass", 4), "pass")
def test_reindent_multi_empty(self):
self.assertEqual(timeit.reindent("\n\n", 0), "\n\n")
self.assertEqual(timeit.reindent("\n\n", 4), "\n \n ")
def test_reindent_multi(self):
self.assertEqual(timeit.reindent(
"print()\npass\nbreak", 0),
"print()\npass\nbreak")
self.assertEqual(timeit.reindent(
"print()\npass\nbreak", 4),
"print()\n pass\n break")
def test_timer_invalid_stmt(self):
self.assertRaises(ValueError, timeit.Timer, stmt=None)
def test_timer_invalid_setup(self):
self.assertRaises(ValueError, timeit.Timer, setup=None)
fake_setup = "import timeit; timeit._fake_timer.setup()"
fake_stmt = "import timeit; timeit._fake_timer.inc()"
def fake_callable_setup(self):
self.fake_timer.setup()
def fake_callable_stmt(self):
self.fake_timer.inc()
def timeit(self, stmt, setup, number=None):
self.fake_timer = FakeTimer()
t = timeit.Timer(stmt=stmt, setup=setup, timer=self.fake_timer)
kwargs = {}
if number is None:
number = DEFAULT_NUMBER
else:
kwargs['number'] = number
delta_time = t.timeit(**kwargs)
self.assertEqual(self.fake_timer.setup_calls, 1)
self.assertEqual(self.fake_timer.count, number)
self.assertEqual(delta_time, number)
# Takes too long to run in debug build.
#def test_timeit_default_iters(self):
# self.timeit(self.fake_stmt, self.fake_setup)
def test_timeit_zero_iters(self):
self.timeit(self.fake_stmt, self.fake_setup, number=0)
def test_timeit_few_iters(self):
self.timeit(self.fake_stmt, self.fake_setup, number=3)
def test_timeit_callable_stmt(self):
self.timeit(self.fake_callable_stmt, self.fake_setup, number=3)
def test_timeit_callable_stmt_and_setup(self):
self.timeit(self.fake_callable_stmt,
self.fake_callable_setup, number=3)
# Takes too long to run in debug build.
#def test_timeit_function(self):
# delta_time = timeit.timeit(self.fake_stmt, self.fake_setup,
# timer=FakeTimer())
# self.assertEqual(delta_time, DEFAULT_NUMBER)
def test_timeit_function_zero_iters(self):
delta_time = timeit.timeit(self.fake_stmt, self.fake_setup, number=0,
timer=FakeTimer())
self.assertEqual(delta_time, 0)
def repeat(self, stmt, setup, repeat=None, number=None):
self.fake_timer = FakeTimer()
t = timeit.Timer(stmt=stmt, setup=setup, timer=self.fake_timer)
kwargs = {}
if repeat is None:
repeat = DEFAULT_REPEAT
else:
kwargs['repeat'] = repeat
if number is None:
number = DEFAULT_NUMBER
else:
kwargs['number'] = number
delta_times = t.repeat(**kwargs)
self.assertEqual(self.fake_timer.setup_calls, repeat)
self.assertEqual(self.fake_timer.count, repeat * number)
self.assertEqual(delta_times, repeat * [float(number)])
# Takes too long to run in debug build.
#def test_repeat_default(self):
# self.repeat(self.fake_stmt, self.fake_setup)
def test_repeat_zero_reps(self):
self.repeat(self.fake_stmt, self.fake_setup, repeat=0)
def test_repeat_zero_iters(self):
self.repeat(self.fake_stmt, self.fake_setup, number=0)
def test_repeat_few_reps_and_iters(self):
self.repeat(self.fake_stmt, self.fake_setup, repeat=3, number=5)
def test_repeat_callable_stmt(self):
self.repeat(self.fake_callable_stmt, self.fake_setup,
repeat=3, number=5)
def test_repeat_callable_stmt_and_setup(self):
self.repeat(self.fake_callable_stmt, self.fake_callable_setup,
repeat=3, number=5)
# Takes too long to run in debug build.
#def test_repeat_function(self):
# delta_times = timeit.repeat(self.fake_stmt, self.fake_setup,
# timer=FakeTimer())
# self.assertEqual(delta_times, DEFAULT_REPEAT * [float(DEFAULT_NUMBER)])
def test_repeat_function_zero_reps(self):
delta_times = timeit.repeat(self.fake_stmt, self.fake_setup, repeat=0,
timer=FakeTimer())
self.assertEqual(delta_times, [])
def test_repeat_function_zero_iters(self):
delta_times = timeit.repeat(self.fake_stmt, self.fake_setup, number=0,
timer=FakeTimer())
self.assertEqual(delta_times, DEFAULT_REPEAT * [0.0])
def assert_exc_string(self, exc_string, expected_exc_name):
exc_lines = exc_string.splitlines()
self.assertGreater(len(exc_lines), 2)
self.assertTrue(exc_lines[0].startswith('Traceback'))
self.assertTrue(exc_lines[-1].startswith(expected_exc_name))
def test_print_exc(self):
s = io.StringIO()
t = timeit.Timer("1/0")
try:
t.timeit()
except:
t.print_exc(s)
self.assert_exc_string(s.getvalue(), 'ZeroDivisionError')
MAIN_DEFAULT_OUTPUT = "10 loops, best of 3: 1 sec per loop\n"
def run_main(self, seconds_per_increment=1.0, switches=None, timer=None):
if timer is None:
timer = FakeTimer(seconds_per_increment=seconds_per_increment)
if switches is None:
args = []
else:
args = switches[:]
args.append(self.fake_stmt)
# timeit.main() modifies sys.path, so save and restore it.
orig_sys_path = sys.path[:]
with captured_stdout() as s:
timeit.main(args=args, _wrap_timer=timer.wrap_timer)
sys.path[:] = orig_sys_path[:]
return s.getvalue()
def test_main_bad_switch(self):
s = self.run_main(switches=['--bad-switch'])
self.assertEqual(s, dedent("""\
option --bad-switch not recognized
use -h/--help for command line help
"""))
def test_main_seconds(self):
s = self.run_main(seconds_per_increment=5.5)
self.assertEqual(s, "10 loops, best of 3: 5.5 sec per loop\n")
def test_main_milliseconds(self):
s = self.run_main(seconds_per_increment=0.0055)
self.assertEqual(s, "100 loops, best of 3: 5.5 msec per loop\n")
def test_main_microseconds(self):
s = self.run_main(seconds_per_increment=0.0000025, switches=['-n100'])
self.assertEqual(s, "100 loops, best of 3: 2.5 usec per loop\n")
def test_main_fixed_iters(self):
s = self.run_main(seconds_per_increment=2.0, switches=['-n35'])
self.assertEqual(s, "35 loops, best of 3: 2 sec per loop\n")
def test_main_setup(self):
s = self.run_main(seconds_per_increment=2.0,
switches=['-n35', '-s', 'print("CustomSetup")'])
self.assertEqual(s, "CustomSetup\n" * 3 +
"35 loops, best of 3: 2 sec per loop\n")
def test_main_fixed_reps(self):
s = self.run_main(seconds_per_increment=60.0, switches=['-r9'])
self.assertEqual(s, "10 loops, best of 9: 60 sec per loop\n")
def test_main_negative_reps(self):
s = self.run_main(seconds_per_increment=60.0, switches=['-r-5'])
self.assertEqual(s, "10 loops, best of 1: 60 sec per loop\n")
@unittest.skipIf(sys.flags.optimize >= 2, "need __doc__")
def test_main_help(self):
s = self.run_main(switches=['-h'])
# Note: It's not clear that the trailing space was intended as part of
# the help text, but since it's there, check for it.
self.assertEqual(s, timeit.__doc__ + ' ')
def test_main_using_time(self):
fake_timer = FakeTimer()
s = self.run_main(switches=['-t'], timer=fake_timer)
self.assertEqual(s, self.MAIN_DEFAULT_OUTPUT)
self.assertIs(fake_timer.saved_timer, time.time)
def test_main_using_clock(self):
fake_timer = FakeTimer()
s = self.run_main(switches=['-c'], timer=fake_timer)
self.assertEqual(s, self.MAIN_DEFAULT_OUTPUT)
self.assertIs(fake_timer.saved_timer, time.clock)
def test_main_verbose(self):
s = self.run_main(switches=['-v'])
self.assertEqual(s, dedent("""\
10 loops -> 10 secs
raw times: 10 10 10
10 loops, best of 3: 1 sec per loop
"""))
def test_main_very_verbose(self):
s = self.run_main(seconds_per_increment=0.000050, switches=['-vv'])
self.assertEqual(s, dedent("""\
10 loops -> 0.0005 secs
100 loops -> 0.005 secs
1000 loops -> 0.05 secs
10000 loops -> 0.5 secs
raw times: 0.5 0.5 0.5
10000 loops, best of 3: 50 usec per loop
"""))
def test_main_exception(self):
with captured_stderr() as error_stringio:
s = self.run_main(switches=['1/0'])
self.assert_exc_string(error_stringio.getvalue(), 'ZeroDivisionError')
def test_main_exception_fixed_reps(self):
with captured_stderr() as error_stringio:
s = self.run_main(switches=['-n1', '1/0'])
self.assert_exc_string(error_stringio.getvalue(), 'ZeroDivisionError')
def test_main():
run_unittest(TestTimeit)
if __name__ == '__main__':
test_main()
|
ubgarbage/gae-blog
|
refs/heads/master
|
openid/test/test_accept.py
|
75
|
import unittest
import os.path
from openid.yadis import accept
def getTestData():
"""Read the test data off of disk
() -> [(int, str)]
"""
filename = os.path.join(os.path.dirname(__file__), 'data', 'accept.txt')
i = 1
lines = []
for line in file(filename):
lines.append((i, line))
i += 1
return lines
def chunk(lines):
"""Return groups of lines separated by whitespace or comments
[(int, str)] -> [[(int, str)]]
"""
chunks = []
chunk = []
for lineno, line in lines:
stripped = line.strip()
if not stripped or stripped[0] == '#':
if chunk:
chunks.append(chunk)
chunk = []
else:
chunk.append((lineno, stripped))
if chunk:
chunks.append(chunk)
return chunks
def parseLines(chunk):
"""Take the given chunk of lines and turn it into a test data dictionary
[(int, str)] -> {str:(int, str)}
"""
items = {}
for (lineno, line) in chunk:
header, data = line.split(':', 1)
header = header.lower()
items[header] = (lineno, data.strip())
return items
def parseAvailable(available_text):
"""Parse an Available: line's data
str -> [str]
"""
return [s.strip() for s in available_text.split(',')]
def parseExpected(expected_text):
"""Parse an Expected: line's data
str -> [(str, float)]
"""
expected = []
if expected_text:
for chunk in expected_text.split(','):
chunk = chunk.strip()
mtype, qstuff = chunk.split(';')
mtype = mtype.strip()
assert '/' in mtype
qstuff = qstuff.strip()
q, qstr = qstuff.split('=')
assert q == 'q'
qval = float(qstr)
expected.append((mtype, qval))
return expected
class MatchAcceptTest(unittest.TestCase):
def __init__(self, descr, accept_header, available, expected):
unittest.TestCase.__init__(self)
self.accept_header = accept_header
self.available = available
self.expected = expected
self.descr = descr
def shortDescription(self):
return self.descr
def runTest(self):
accepted = accept.parseAcceptHeader(self.accept_header)
actual = accept.matchTypes(accepted, self.available)
self.failUnlessEqual(self.expected, actual)
def pyUnitTests():
lines = getTestData()
chunks = chunk(lines)
data_sets = map(parseLines, chunks)
cases = []
for data in data_sets:
lnos = []
lno, header = data['accept']
lnos.append(lno)
lno, avail_data = data['available']
lnos.append(lno)
try:
available = parseAvailable(avail_data)
except:
print 'On line', lno
raise
lno, exp_data = data['expected']
lnos.append(lno)
try:
expected = parseExpected(exp_data)
except:
print 'On line', lno
raise
descr = 'MatchAcceptTest for lines %r' % (lnos,)
case = MatchAcceptTest(descr, header, available, expected)
cases.append(case)
return unittest.TestSuite(cases)
if __name__ == '__main__':
runner = unittest.TextTestRunner()
runner.run(loadTests())
|
SUSE/azure-sdk-for-python
|
refs/heads/master
|
azure-mgmt-devtestlabs/azure/mgmt/devtestlabs/models/subnet.py
|
2
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Subnet(Model):
"""Subnet information.
:param resource_id: The resource ID of the subnet.
:type resource_id: str
:param lab_subnet_name: The name of the subnet as seen in the lab.
:type lab_subnet_name: str
:param allow_public_ip: The permission policy of the subnet for allowing
public IP addresses (i.e. Allow, Deny)). Possible values include:
'Default', 'Deny', 'Allow'
:type allow_public_ip: str or :class:`UsagePermissionType
<azure.mgmt.devtestlabs.models.UsagePermissionType>`
"""
_attribute_map = {
'resource_id': {'key': 'resourceId', 'type': 'str'},
'lab_subnet_name': {'key': 'labSubnetName', 'type': 'str'},
'allow_public_ip': {'key': 'allowPublicIp', 'type': 'str'},
}
def __init__(self, resource_id=None, lab_subnet_name=None, allow_public_ip=None):
self.resource_id = resource_id
self.lab_subnet_name = lab_subnet_name
self.allow_public_ip = allow_public_ip
|
notwin/WeRoBot
|
refs/heads/develop
|
docs/conf.py
|
13
|
# -*- coding: utf-8 -*-
#
# WeRoBot documentation build configuration file, created by
# sphinx-quickstart on Mon Feb 4 16:51:10 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
sys.path.append(os.path.abspath('_themes'))
import werobot
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'WeRoBot'
copyright = u'2014, whtsky'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
version = release = werobot.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'flask_theme_support.FlaskyStyle'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes']
html_theme = 'flask'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'index': ['sourcelink.html', 'sidebarintro.html', 'searchbox.html'],
'**': ['localtoc.html', 'relations.html', 'sidebarintro.html',
'searchbox.html']
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'WeRoBotdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'WeRoBot.tex', u'WeRoBot Documentation',
u'whtsky', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'werobot', u'WeRoBot Documentation',
[u'whtsky'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'WeRoBot', u'WeRoBot Documentation',
u'whtsky', 'WeRoBot', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
MarcoForte/PyDAQmx_Helper
|
refs/heads/master
|
pydaqmx_helper/examples/AtoD_MultiChannelSample.py
|
1
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
""" Example program to show how to read multiple
samples from multiple AtoD channels at a given sample rate
"""
from pydaqmx_helper.atod import AtoD
myAtoD = AtoD()
myAtoD.addChannels([0, 1, 2])
sample = myAtoD.sampleVoltages(10, 10, [2, 1])
print(sample)
print('Printing just values ')
print(list(sample.values()))
|
hosaka/micropython
|
refs/heads/master
|
tests/basics/bytearray_slice_assign.py
|
18
|
try:
bytearray()[:] = bytearray()
except TypeError:
print("SKIP")
import sys
sys.exit()
# test slices; only 2 argument version supported by Micro Python at the moment
x = bytearray(range(10))
# Assignment
l = bytearray(x)
l[1:3] = bytearray([10, 20])
print(l)
l = bytearray(x)
l[1:3] = bytearray([10])
print(l)
l = bytearray(x)
l[1:3] = bytearray()
print(l)
l = bytearray(x)
#del l[1:3]
print(l)
l = bytearray(x)
l[:3] = bytearray([10, 20])
print(l)
l = bytearray(x)
l[:3] = bytearray()
print(l)
l = bytearray(x)
#del l[:3]
print(l)
l = bytearray(x)
l[:-3] = bytearray([10, 20])
print(l)
l = bytearray(x)
l[:-3] = bytearray()
print(l)
l = bytearray(x)
#del l[:-3]
print(l)
# slice assignment that extends the array
b = bytearray(2)
b[2:] = bytearray(10)
print(b)
b = bytearray(10)
b[:-1] = bytearray(500)
print(len(b), b[0], b[-1])
# Assignment of bytes to array slice
b = bytearray(2)
b[1:1] = b"12345"
print(b)
|
maxfischer2781/chainlet
|
refs/heads/master
|
chainlet/primitives/chain.py
|
1
|
from .. import signals
from ..chainsend import lazy_send
from .link import ChainLink
from .compound import CompoundLink
class Chain(CompoundLink):
"""
A group of chainlets that sequentially process each :term:`data chunk`
:param elements: the chainlets making up this chain
:type elements: iterable[:py:class:`ChainLink`]
:note: If ``elements`` contains a :py:class:`~.Chain`, this is flattened
and any sub-elements are directly included in the new :py:class:`~.Chain`.
Slicing a chain guarantees consistency of the sum of parts and the chain.
Linking an ordered, complete sequence of subslices recreates an equivalent chain.
.. code:: python
chain == chain[:i] >> chain[i:]
Also, splitting a chain allows to pass values along the parts for equal results.
This is useful if you want to inspect a chain at a specific position.
.. code:: python
chain_result = chain.send(value)
temp_value = chain[:i].send(value)
split_result = chain[i:].send(temp_value)
chain_result == temp_value
:note: Some optimised chainlets may assimilate subsequent chainlets during linking.
The rules for splitting chains still apply, though the actual chain elements
may differ from the provided ones.
"""
__slots__ = ('chain_join', 'chain_fork')
def __new__(cls, elements):
if not any(element.chain_fork or element.chain_join for element in cls._flatten(elements)):
return super(Chain, cls).__new__(cls.chain_types.flat_chain_type)
return super(Chain, cls).__new__(cls.chain_types.base_chain_type)
def __init__(self, elements):
super(Chain, self).__init__(self._flatten(elements))
if elements:
self.chain_fork = self._chain_forks(elements)
self.chain_join = elements[0].chain_join
else:
self.chain_fork = False
self.chain_join = False
@classmethod
def _flatten(cls, elements):
for element in elements:
if not element:
continue
elif isinstance(element, Chain) and not element.chain_types.supersedes(cls.chain_types):
for sub_element in element.elements:
yield sub_element
else:
yield element
@staticmethod
def _chain_forks(elements):
"""Detect whether a sequence of elements leads to a fork of streams"""
# we are only interested in the result, so unwind from the end
for element in reversed(elements):
if element.chain_fork:
return True
elif element.chain_join:
return False
return False
# extract-link for first element
# When linking to a chain, the chain as a single element shadows
# the link behaviour of the head/tail.
def __rshift__(self, child):
"""
self >> child
:param child: following link to bind
:type child: ChainLink or iterable[ChainLink]
:returns: link between self and child
:rtype: ChainLink, FlatChain, Bundle or Chain
"""
child = self.chain_types.convert(child)
if self and type(self.elements[-1]).__rshift__ not in (
self.chain_types.base_link_type.__rshift__, self.chain_types.base_chain_type.__rshift__
):
return self._link(self[:-1], self.elements[-1] >> child)
return self._link(self, child)
def __lshift__(self, parent):
"""
self << parents
:param parent: preceding link to bind
:type parent: ChainLink or iterable[ChainLink]
:returns: link between self and children
:rtype: ChainLink, FlatChain, Bundle or Chain
"""
parent = self.chain_types.convert(parent)
if self and type(self.elements[0]).__lshift__ not in (
self.chain_types.base_link_type.__lshift__, self.chain_types.base_chain_type.__lshift__
):
return self._link(self.elements[0] << parent, self[1:])
return self._link(parent, self)
def chainlet_send(self, value=None):
# traverse breadth first to allow for synchronized forking and joining
if self.chain_join:
values = value
else:
values = [value]
try:
for element in self.elements:
values = lazy_send(element, values)
if not values:
break
if self.chain_fork:
return list(values)
else:
try:
return next(iter(values))
except IndexError:
raise signals.StopTraversal
# An element in the chain is exhausted permanently
except signals.ChainExit:
raise StopIteration
def __repr__(self):
return ' >> '.join(repr(elem) for elem in self.elements)
class FlatChain(Chain):
"""
A specialised :py:class:`Chain` which never forks or joins internally
"""
chain_join = False
chain_fork = False
# short circuit to the flat iter/send, since this is all we ever need
__iter__ = ChainLink._iter_flat # pylint:disable=protected-access
send = ChainLink._send_flat # pylint:disable=protected-access
def chainlet_send(self, value=None):
for element in self.elements:
# a StopTraversal may be raised here
# we do NOT catch it, but let it bubble up instead
# whoever catches it can extract a potential early return value
value = element.chainlet_send(value)
return value
ChainLink.chain_types.base_chain_type = Chain
ChainLink.chain_types.flat_chain_type = FlatChain
|
daphnei/nn_chatbot
|
refs/heads/master
|
seq2seq/seq2seq/global_vars.py
|
6
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Collection of global variables.
"""
SYNC_REPLICAS_OPTIMIZER = None
|
omnirom/android_kernel_htc_flounder
|
refs/heads/android-7.1
|
scripts/tracing/draw_functrace.py
|
14679
|
#!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
eayunstack/fuel-web
|
refs/heads/master
|
nailgun/nailgun/api/v1/validators/json_schema/assignment.py
|
6
|
# -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
assignment_format_schema = {
'$schema': 'http://json-schema.org/draft-04/schema#',
'title': 'assignment',
'description': 'assignment map, node ids to arrays of roles',
'type': 'array',
'items': {
'type': 'object',
'properties': {
'id': {
'description': 'The unique identifier for id',
'type': 'integer'
},
'roles': {
'type': 'array',
'items': {'type': 'string'}
}
},
'required': ['id', 'roles'],
}
}
unassignment_format_schema = {
'$schema': 'http://json-schema.org/draft-04/schema#',
'title': 'unassignment',
'description': 'List with node ids for unassignment',
'type': 'array',
'items': {
'type': 'object',
'properties': {
'id': {
'description': 'The unique identifier for id',
'type': 'integer'
}
}
}
}
|
totemtang/hadoop-RHJoin
|
refs/heads/master
|
src/examples/python/WordCount.py
|
123
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from org.apache.hadoop.fs import Path
from org.apache.hadoop.io import *
from org.apache.hadoop.mapred import *
import sys
import getopt
class WordCountMap(Mapper, MapReduceBase):
one = IntWritable(1)
def map(self, key, value, output, reporter):
for w in value.toString().split():
output.collect(Text(w), self.one)
class Summer(Reducer, MapReduceBase):
def reduce(self, key, values, output, reporter):
sum = 0
while values.hasNext():
sum += values.next().get()
output.collect(key, IntWritable(sum))
def printUsage(code):
print "wordcount [-m <maps>] [-r <reduces>] <input> <output>"
sys.exit(code)
def main(args):
conf = JobConf(WordCountMap);
conf.setJobName("wordcount");
conf.setOutputKeyClass(Text);
conf.setOutputValueClass(IntWritable);
conf.setMapperClass(WordCountMap);
conf.setCombinerClass(Summer);
conf.setReducerClass(Summer);
try:
flags, other_args = getopt.getopt(args[1:], "m:r:")
except getopt.GetoptError:
printUsage(1)
if len(other_args) != 2:
printUsage(1)
for f,v in flags:
if f == "-m":
conf.setNumMapTasks(int(v))
elif f == "-r":
conf.setNumReduceTasks(int(v))
conf.setInputPath(Path(other_args[0]))
conf.setOutputPath(Path(other_args[1]))
JobClient.runJob(conf);
if __name__ == "__main__":
main(sys.argv)
|
krzysztof-magosa/ansible-modules-extras
|
refs/heads/devel
|
cloud/cloudstack/cs_zone_facts.py
|
42
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2016, René Moser <mail@renemoser.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: cs_zone_facts
short_description: Gathering facts of zones from Apache CloudStack based clouds.
description:
- Gathering facts from the API of a zone.
version_added: "2.1"
author: "René Moser (@resmo)"
options:
name:
description:
- Name of the zone.
required: true
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- local_action:
module: cs_zone_facts
name: ch-gva-1
- debug: var=cloudstack_zone
'''
RETURN = '''
---
cloudstack_zone.id:
description: UUID of the zone.
returned: success
type: string
sample: 04589590-ac63-4ffc-93f5-b698b8ac38b6
cloudstack_zone.name:
description: Name of the zone.
returned: success
type: string
sample: zone01
cloudstack_zone.dns1:
description: First DNS for the zone.
returned: success
type: string
sample: 8.8.8.8
cloudstack_zone.dns2:
description: Second DNS for the zone.
returned: success
type: string
sample: 8.8.4.4
cloudstack_zone.internal_dns1:
description: First internal DNS for the zone.
returned: success
type: string
sample: 8.8.8.8
cloudstack_zone.internal_dns2:
description: Second internal DNS for the zone.
returned: success
type: string
sample: 8.8.4.4
cloudstack_zone.dns1_ipv6:
description: First IPv6 DNS for the zone.
returned: success
type: string
sample: "2001:4860:4860::8888"
cloudstack_zone.dns2_ipv6:
description: Second IPv6 DNS for the zone.
returned: success
type: string
sample: "2001:4860:4860::8844"
cloudstack_zone.allocation_state:
description: State of the zone.
returned: success
type: string
sample: Enabled
cloudstack_zone.domain:
description: Domain the zone is related to.
returned: success
type: string
sample: ROOT
cloudstack_zone.network_domain:
description: Network domain for the zone.
returned: success
type: string
sample: example.com
cloudstack_zone.network_type:
description: Network type for the zone.
returned: success
type: string
sample: basic
cloudstack_zone.local_storage_enabled:
description: Local storage offering enabled.
returned: success
type: bool
sample: false
cloudstack_zone.securitygroups_enabled:
description: Security groups support is enabled.
returned: success
type: bool
sample: false
cloudstack_zone.guest_cidr_address:
description: Guest CIDR address for the zone
returned: success
type: string
sample: 10.1.1.0/24
cloudstack_zone.dhcp_provider:
description: DHCP provider for the zone
returned: success
type: string
sample: VirtualRouter
cloudstack_zone.zone_token:
description: Zone token
returned: success
type: string
sample: ccb0a60c-79c8-3230-ab8b-8bdbe8c45bb7
cloudstack_zone.tags:
description: List of resource tags associated with the zone.
returned: success
type: dict
sample: [ { "key": "foo", "value": "bar" } ]
'''
import base64
# import cloudstack common
from ansible.module_utils.cloudstack import *
class AnsibleCloudStackZoneFacts(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackZoneFacts, self).__init__(module)
self.returns = {
'dns1': 'dns1',
'dns2': 'dns2',
'internaldns1': 'internal_dns1',
'internaldns2': 'internal_dns2',
'ipv6dns1': 'dns1_ipv6',
'ipv6dns2': 'dns2_ipv6',
'domain': 'network_domain',
'networktype': 'network_type',
'securitygroupsenabled': 'securitygroups_enabled',
'localstorageenabled': 'local_storage_enabled',
'guestcidraddress': 'guest_cidr_address',
'dhcpprovider': 'dhcp_provider',
'allocationstate': 'allocation_state',
'zonetoken': 'zone_token',
}
self.facts = {
'cloudstack_zone': None,
}
def get_zone(self):
if not self.zone:
# TODO: add param key signature in get_zone()
self.module.params['zone'] = self.module.params.get('name')
super(AnsibleCloudStackZoneFacts, self).get_zone()
return self.zone
def run(self):
zone = self.get_zone()
self.facts['cloudstack_zone'] = self.get_result(zone)
return self.facts
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name = dict(required=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=False,
)
cs_zone_facts = AnsibleCloudStackZoneFacts(module=module).run()
cs_facts_result = dict(changed=False, ansible_facts=cs_zone_facts)
module.exit_json(**cs_facts_result)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
jt6562/XX-Net
|
refs/heads/master
|
python27/1.0/lib/encodings/latin_1.py
|
853
|
""" Python 'latin-1' Codec
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.latin_1_encode
decode = codecs.latin_1_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.latin_1_encode(input,self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.latin_1_decode(input,self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
class StreamConverter(StreamWriter,StreamReader):
encode = codecs.latin_1_decode
decode = codecs.latin_1_encode
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-1',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
sasukeh/neutron
|
refs/heads/master
|
neutron/plugins/common/constants.py
|
19
|
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Neutron well-known service type constants:
CORE = "CORE"
DUMMY = "DUMMY"
LOADBALANCER = "LOADBALANCER"
LOADBALANCERV2 = "LOADBALANCERV2"
FIREWALL = "FIREWALL"
VPN = "VPN"
METERING = "METERING"
L3_ROUTER_NAT = "L3_ROUTER_NAT"
FLAVORS = "FLAVORS"
QOS = "QOS"
# Maps extension alias to service type
EXT_TO_SERVICE_MAPPING = {
'dummy': DUMMY,
'lbaas': LOADBALANCER,
'lbaasv2': LOADBALANCERV2,
'fwaas': FIREWALL,
'vpnaas': VPN,
'metering': METERING,
'router': L3_ROUTER_NAT,
'flavors': FLAVORS,
'qos': QOS,
}
# Service operation status constants
ACTIVE = "ACTIVE"
DOWN = "DOWN"
CREATED = "CREATED"
PENDING_CREATE = "PENDING_CREATE"
PENDING_UPDATE = "PENDING_UPDATE"
PENDING_DELETE = "PENDING_DELETE"
INACTIVE = "INACTIVE"
ERROR = "ERROR"
ACTIVE_PENDING_STATUSES = (
ACTIVE,
PENDING_CREATE,
PENDING_UPDATE
)
# Network Type constants
TYPE_FLAT = 'flat'
TYPE_GENEVE = 'geneve'
TYPE_GRE = 'gre'
TYPE_LOCAL = 'local'
TYPE_VXLAN = 'vxlan'
TYPE_VLAN = 'vlan'
TYPE_NONE = 'none'
# Values for network_type
# For VLAN Network
MIN_VLAN_TAG = 1
MAX_VLAN_TAG = 4094
# For Geneve Tunnel
MIN_GENEVE_VNI = 1
MAX_GENEVE_VNI = 2 ** 24 - 1
# For GRE Tunnel
MIN_GRE_ID = 1
MAX_GRE_ID = 2 ** 32 - 1
# For VXLAN Tunnel
MIN_VXLAN_VNI = 1
MAX_VXLAN_VNI = 2 ** 24 - 1
VXLAN_UDP_PORT = 4789
# Network Type MTU overhead
GENEVE_ENCAP_MIN_OVERHEAD = 50
GRE_ENCAP_OVERHEAD = 42
VXLAN_ENCAP_OVERHEAD = 50
|
prasannav7/ggrc-core
|
refs/heads/develop
|
src/ggrc/models/clause.py
|
3
|
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: dan@reciprocitylabs.com
# Maintained By: urban@reciprocitylabs.com
from ggrc import db
from ggrc.models.mixins import CustomAttributable
from ggrc.models.mixins import deferred
from ggrc.models.mixins import Described
from ggrc.models.mixins import Hierarchical
from ggrc.models.mixins import Hyperlinked
from ggrc.models.mixins import Noted
from ggrc.models.mixins import Slugged
from ggrc.models.mixins import Stateful
from ggrc.models.mixins import Timeboxed
from ggrc.models.mixins import Titled
from ggrc.models.mixins import WithContact
from ggrc.models.object_document import Documentable
from ggrc.models.object_owner import Ownable
from ggrc.models.object_person import Personable
from ggrc.models.relationship import Relatable
from ggrc.models.track_object_state import HasObjectState
from ggrc.models.track_object_state import track_state_for_class
class Clause(HasObjectState, Hierarchical, Noted, Described, Hyperlinked,
WithContact, Titled, Slugged, Stateful,
db.Model, CustomAttributable, Documentable,
Personable, Ownable, Timeboxed, Relatable):
VALID_STATES = [
'Draft',
'Final',
'Effective',
'Ineffective',
'Launched',
'Not Launched',
'In Scope',
'Not in Scope',
'Deprecated',
]
__tablename__ = 'clauses'
_table_plural = 'clauses'
_title_uniqueness = True
_aliases = {
"url": "Clause URL",
"description": "Text of Clause",
"directive": None,
}
na = deferred(db.Column(db.Boolean, default=False, nullable=False),
'Clause')
notes = deferred(db.Column(db.Text), 'Clause')
_publish_attrs = [
'na',
'notes',
]
_sanitize_html = ['notes']
_include_links = []
track_state_for_class(Clause)
|
dirn/ansible
|
refs/heads/devel
|
lib/ansible/plugins/connections/funcd.py
|
140
|
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# Based on chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com>
# (c) 2013, Michael Scherer <misc@zarb.org>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# ---
# The func transport permit to use ansible over func. For people who have already setup
# func and that wish to play with ansible, this permit to move gradually to ansible
# without having to redo completely the setup of the network.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
HAVE_FUNC=False
try:
import func.overlord.client as fc
HAVE_FUNC=True
except ImportError:
pass
import os
from ansible.callbacks import vvv
from ansible import errors
import tempfile
import shutil
class Connection(object):
''' Func-based connections '''
def __init__(self, runner, host, port, *args, **kwargs):
self.runner = runner
self.host = host
self.has_pipelining = False
# port is unused, this go on func
self.port = port
def connect(self, port=None):
if not HAVE_FUNC:
raise errors.AnsibleError("func is not installed")
self.client = fc.Client(self.host)
return self
def exec_command(self, cmd, tmp_path, become_user=None, sudoable=False,
executable='/bin/sh', in_data=None):
''' run a command on the remote minion '''
if in_data:
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
# totally ignores privlege escalation
vvv("EXEC %s" % (cmd), host=self.host)
p = self.client.command.run(cmd)[self.host]
return (p[0], '', p[1], p[2])
def _normalize_path(self, path, prefix):
if not path.startswith(os.path.sep):
path = os.path.join(os.path.sep, path)
normpath = os.path.normpath(path)
return os.path.join(prefix, normpath[1:])
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
out_path = self._normalize_path(out_path, '/')
vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
self.client.local.copyfile.send(in_path, out_path)
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
in_path = self._normalize_path(in_path, '/')
vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
# need to use a tmp dir due to difference of semantic for getfile
# ( who take a # directory as destination) and fetch_file, who
# take a file directly
tmpdir = tempfile.mkdtemp(prefix="func_ansible")
self.client.local.getfile.get(in_path, tmpdir)
shutil.move(os.path.join(tmpdir, self.host, os.path.basename(in_path)),
out_path)
shutil.rmtree(tmpdir)
def close(self):
''' terminate the connection; nothing to do here '''
pass
|
renegelinas/mi-instrument
|
refs/heads/master
|
mi/core/versioning.py
|
8
|
from functools import wraps
def version(version_number):
"""Wrapper to add version name to parser.
Placing the version number on the parse function seems to make
more sense than adding __version__ to the code and relying on that.
"""
def put_version(to_wrap):
@wraps(to_wrap)
def inner(*args, **kwargs):
return to_wrap(*args, **kwargs)
inner.version = version_number
return inner
return put_version
|
atruberg/django-custom
|
refs/heads/master
|
django/contrib/contenttypes/views.py
|
115
|
from __future__ import unicode_literals
from django import http
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site, get_current_site
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
def shortcut(request, content_type_id, object_id):
"""
Redirect to an object's page based on a content-type ID and an object ID.
"""
# Look up the object, making sure it's got a get_absolute_url() function.
try:
content_type = ContentType.objects.get(pk=content_type_id)
if not content_type.model_class():
raise http.Http404(_("Content type %(ct_id)s object has no associated model") %
{'ct_id': content_type_id})
obj = content_type.get_object_for_this_type(pk=object_id)
except (ObjectDoesNotExist, ValueError):
raise http.Http404(_("Content type %(ct_id)s object %(obj_id)s doesn't exist") %
{'ct_id': content_type_id, 'obj_id': object_id})
try:
get_absolute_url = obj.get_absolute_url
except AttributeError:
raise http.Http404(_("%(ct_name)s objects don't have a get_absolute_url() method") %
{'ct_name': content_type.name})
absurl = get_absolute_url()
# Try to figure out the object's domain, so we can do a cross-site redirect
# if necessary.
# If the object actually defines a domain, we're done.
if absurl.startswith('http://') or absurl.startswith('https://'):
return http.HttpResponseRedirect(absurl)
# Otherwise, we need to introspect the object's relationships for a
# relation to the Site object
object_domain = None
if Site._meta.installed:
opts = obj._meta
# First, look for an many-to-many relationship to Site.
for field in opts.many_to_many:
if field.rel.to is Site:
try:
# Caveat: In the case of multiple related Sites, this just
# selects the *first* one, which is arbitrary.
object_domain = getattr(obj, field.name).all()[0].domain
except IndexError:
pass
if object_domain is not None:
break
# Next, look for a many-to-one relationship to Site.
if object_domain is None:
for field in obj._meta.fields:
if field.rel and field.rel.to is Site:
try:
object_domain = getattr(obj, field.name).domain
except Site.DoesNotExist:
pass
if object_domain is not None:
break
# Fall back to the current site (if possible).
if object_domain is None:
try:
object_domain = get_current_site(request).domain
except Site.DoesNotExist:
pass
# If all that malarkey found an object domain, use it. Otherwise, fall back
# to whatever get_absolute_url() returned.
if object_domain is not None:
protocol = 'https' if request.is_secure() else 'http'
return http.HttpResponseRedirect('%s://%s%s'
% (protocol, object_domain, absurl))
else:
return http.HttpResponseRedirect(absurl)
|
gnuhub/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyProtectedMemberInspection/truePositive.py
|
75
|
__author__ = 'ktisha'
class A:
def __init__(self):
self._a = 1
def foo(self):
self.b= 1
print <weak_warning descr="Access to a protected member _a of a class">A()._a</weak_warning>
|
blueboxgroup/cinder
|
refs/heads/master
|
cinder/tests/targets/test_base_iscsi_driver.py
|
2
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_utils import timeutils
from cinder import exception
from cinder import test
from cinder import utils
from cinder.volume import configuration as conf
from cinder.volume.targets import iscsi
class FakeDriver(iscsi.ISCSITarget):
def __init__(self, *args, **kwargs):
super(FakeDriver, self).__init__(*args, **kwargs)
def create_export(self, context, vref):
pass
def ensure_export(self, context, vref, vol_path):
pass
def remove_export(self, context, vref):
pass
def terminate_connection(self, vref, **kwargs):
pass
class FakeIncompleteDriver(iscsi.ISCSITarget):
def null_method():
pass
class TestBaseISCSITargetDriver(test.TestCase):
def setUp(self):
super(TestBaseISCSITargetDriver, self).setUp()
self.configuration = conf.Configuration(None)
self.fake_id_1 = 'ed2c1fd4-5fc0-11e4-aa15-123b93f75cba'
self.fake_id_2 = 'ed2c2222-5fc0-11e4-aa15-123b93f75cba'
self.target = FakeDriver(root_helper=utils.get_root_helper(),
configuration=self.configuration)
self.testvol_1 =\
{'project_id': self.fake_id_1,
'name': 'testvol',
'size': 1,
'id': self.fake_id_2,
'volume_type_id': None,
'provider_location': '10.10.7.1:3260 '
'iqn.2010-10.org.openstack:'
'volume-%s 0' % self.fake_id_2,
'provider_auth': 'CHAP stack-1-a60e2611875f40199931f2'
'c76370d66b 2FE0CQ8J196R',
'provider_geometry': '512 512',
'created_at': timeutils.utcnow(),
'host': 'fake_host@lvm#lvm'}
self.expected_iscsi_properties = \
{'auth_method': 'CHAP',
'auth_password': '2FE0CQ8J196R',
'auth_username': 'stack-1-a60e2611875f40199931f2c76370d66b',
'encrypted': False,
'logical_block_size': '512',
'physical_block_size': '512',
'target_discovered': False,
'target_iqn': 'iqn.2010-10.org.openstack:volume-%s' %
self.fake_id_2,
'target_lun': 0,
'target_portal': '10.10.7.1:3260',
'volume_id': self.fake_id_2}
def test_abc_methods_not_present_fails(self):
configuration = conf.Configuration(cfg.StrOpt('iscsi_target_prefix',
default='foo',
help='you wish'))
self.assertRaises(TypeError,
FakeIncompleteDriver,
configuration=configuration)
def test_get_iscsi_properties(self):
self.assertEqual(self.expected_iscsi_properties,
self.target._get_iscsi_properties(self.testvol_1))
def test_build_iscsi_auth_string(self):
auth_string = 'chap chap-user chap-password'
self.assertEqual(auth_string,
self.target._iscsi_authentication('chap',
'chap-user',
'chap-password'))
def test_do_iscsi_discovery(self):
target_string = '127.0.0.1:3260,1 '\
'iqn.2010-10.org.openstack:'\
'volume-%s' % self.testvol_1['id']
def _fake_execute(*args, **kwargs):
return target_string, None
def _fake_safe_get(val):
return '127.0.0.1'
self.stubs.Set(self.configuration,
'safe_get',
_fake_safe_get)
self.stubs.Set(utils,
'execute',
_fake_execute)
self.assertEqual(target_string,
self.target._do_iscsi_discovery(self.testvol_1))
def test_initialize_connection(self):
expected = {'driver_volume_type': 'iscsi',
'data': self.expected_iscsi_properties}
self.assertEqual(expected,
self.target.initialize_connection(self.testvol_1, {}))
def test_validate_connector(self):
bad_connector = {'no_initiator': 'nada'}
self.assertRaises(exception.InvalidConnectorException,
self.target.validate_connector,
bad_connector)
connector = {'initiator': 'fake_init'}
self.assertTrue(self.target.validate_connector,
connector)
|
willthames/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/ovirt/ovirt_quotas.py
|
45
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_quotas
short_description: Module to manage datacenter quotas in oVirt/RHV
version_added: "2.3"
author: "Ondra Machacek (@machacekondra)"
description:
- "Module to manage datacenter quotas in oVirt/RHV"
options:
name:
description:
- "Name of the quota to manage."
required: true
state:
description:
- "Should the quota be present/absent."
choices: ['present', 'absent']
default: present
data_center:
description:
- "Name of the datacenter where quota should be managed."
required: true
description:
description:
- "Description of the quota to manage."
cluster_threshold:
description:
- "Cluster threshold(soft limit) defined in percentage (0-100)."
cluster_grace:
description:
- "Cluster grace(hard limit) defined in percentage (1-100)."
storage_threshold:
description:
- "Storage threshold(soft limit) defined in percentage (0-100)."
storage_grace:
description:
- "Storage grace(hard limit) defined in percentage (1-100)."
clusters:
description:
- "List of dictionary of cluster limits, which is valid to specific cluster."
- "If cluster isn't spefied it's valid to all clusters in system:"
- "C(cluster) - Name of the cluster."
- "C(memory) - Memory limit (in GiB)."
- "C(cpu) - CPU limit."
storages:
description:
- "List of dictionary of storage limits, which is valid to specific storage."
- "If storage isn't spefied it's valid to all storages in system:"
- "C(storage) - Name of the storage."
- "C(size) - Size limit (in GiB)."
extends_documentation_fragment: ovirt
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Add cluster quota to cluster cluster1 with memory limit 20GiB and CPU limit to 10:
ovirt_quotas:
name: quota1
data_center: dcX
clusters:
- name: cluster1
memory: 20
cpu: 10
# Add cluster quota to all clusters with memory limit 30GiB and CPU limit to 15:
ovirt_quotas:
name: quota2
data_center: dcX
clusters:
- memory: 30
cpu: 15
# Add storage quota to storage data1 with size limit to 100GiB
ovirt_quotas:
name: quota3
data_center: dcX
storage_grace: 40
storage_threshold: 60
storages:
- name: data1
size: 100
# Remove quota quota1 (Note the quota must not be assigned to any VM/disk):
ovirt_quotas:
state: absent
data_center: dcX
name: quota1
'''
RETURN = '''
id:
description: ID of the quota which is managed
returned: On success if quota is found.
type: str
sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c
quota:
description: "Dictionary of all the quota attributes. Quota attributes can be found on your oVirt/RHV instance
at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/quota."
returned: On success if quota is found.
type: dict
'''
try:
import ovirtsdk4.types as otypes
except ImportError:
pass
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
BaseModule,
check_sdk,
create_connection,
equal,
get_link_name,
ovirt_full_argument_spec,
search_by_name,
)
class QuotasModule(BaseModule):
def build_entity(self):
return otypes.Quota(
description=self._module.params['description'],
name=self._module.params['name'],
storage_hard_limit_pct=self._module.params.get('storage_grace'),
storage_soft_limit_pct=self._module.params.get('storage_threshold'),
cluster_hard_limit_pct=self._module.params.get('cluster_grace'),
cluster_soft_limit_pct=self._module.params.get('cluster_threshold'),
)
def update_storage_limits(self, entity):
new_limits = {}
for storage in self._module.params.get('storages'):
new_limits[storage.get('name', '')] = {
'size': storage.get('size'),
}
old_limits = {}
sd_limit_service = self._service.service(entity.id).quota_storage_limits_service()
for limit in sd_limit_service.list():
storage = get_link_name(self._connection, limit.storage_domain) if limit.storage_domain else ''
old_limits[storage] = {
'size': limit.limit,
}
sd_limit_service.service(limit.id).remove()
return new_limits == old_limits
def update_cluster_limits(self, entity):
new_limits = {}
for cluster in self._module.params.get('clusters'):
new_limits[cluster.get('name', '')] = {
'cpu': cluster.get('cpu'),
'memory': float(cluster.get('memory')),
}
old_limits = {}
cl_limit_service = self._service.service(entity.id).quota_cluster_limits_service()
for limit in cl_limit_service.list():
cluster = get_link_name(self._connection, limit.cluster) if limit.cluster else ''
old_limits[cluster] = {
'cpu': limit.vcpu_limit,
'memory': limit.memory_limit,
}
cl_limit_service.service(limit.id).remove()
return new_limits == old_limits
def update_check(self, entity):
# -- FIXME --
# Note that we here always remove all cluster/storage limits, because
# it's not currently possible to update them and then re-create the limits
# appropriatelly, this shouldn't have any side-effects, but it's not considered
# as a correct approach.
# This feature is tracked here: https://bugzilla.redhat.com/show_bug.cgi?id=1398576
#
return (
self.update_storage_limits(entity) and
self.update_cluster_limits(entity) and
equal(self._module.params.get('description'), entity.description) and
equal(self._module.params.get('storage_grace'), entity.storage_hard_limit_pct) and
equal(self._module.params.get('storage_threshold'), entity.storage_soft_limit_pct) and
equal(self._module.params.get('cluster_grace'), entity.cluster_hard_limit_pct) and
equal(self._module.params.get('cluster_threshold'), entity.cluster_soft_limit_pct)
)
def main():
argument_spec = ovirt_full_argument_spec(
state=dict(
choices=['present', 'absent'],
default='present',
),
name=dict(required=True),
data_center=dict(required=True),
description=dict(default=None),
cluster_threshold=dict(default=None, type='int', aliases=['cluster_soft_limit']),
cluster_grace=dict(default=None, type='int', aliases=['cluster_hard_limit']),
storage_threshold=dict(default=None, type='int', aliases=['storage_soft_limit']),
storage_grace=dict(default=None, type='int', aliases=['storage_hard_limit']),
clusters=dict(default=[], type='list'),
storages=dict(default=[], type='list'),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
datacenters_service = connection.system_service().data_centers_service()
dc_name = module.params['data_center']
dc_id = getattr(search_by_name(datacenters_service, dc_name), 'id', None)
if dc_id is None:
raise Exception("Datacenter '%s' was not found." % dc_name)
quotas_service = datacenters_service.service(dc_id).quotas_service()
quotas_module = QuotasModule(
connection=connection,
module=module,
service=quotas_service,
)
state = module.params['state']
if state == 'present':
ret = quotas_module.create()
# Manage cluster limits:
cl_limit_service = quotas_service.service(ret['id']).quota_cluster_limits_service()
for cluster in module.params.get('clusters'):
cl_limit_service.add(
limit=otypes.QuotaClusterLimit(
memory_limit=float(cluster.get('memory')),
vcpu_limit=cluster.get('cpu'),
cluster=search_by_name(
connection.system_service().clusters_service(),
cluster.get('name')
),
),
)
# Manage storage limits:
sd_limit_service = quotas_service.service(ret['id']).quota_storage_limits_service()
for storage in module.params.get('storages'):
sd_limit_service.add(
limit=otypes.QuotaStorageLimit(
limit=storage.get('size'),
storage_domain=search_by_name(
connection.system_service().storage_domains_service(),
storage.get('name')
),
)
)
elif state == 'absent':
ret = quotas_module.remove()
module.exit_json(**ret)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == "__main__":
main()
|
frc1418/2015-robot
|
refs/heads/master
|
practice_robot/robot.py
|
1
|
#!/usr/bin/env python3
import wpilib
import distance
class MyRobot(wpilib.SampleRobot):
def robotInit(self):
self.servo= wpilib.Servo(6)
##INITIALIZE JOYSTICKS##
self.joystick1 = wpilib.Joystick(0)
self.joystick2 = wpilib.Joystick(1)
##INITIALIZE MOTORS##
self.lf_motor = wpilib.CANTalon(6)
self.lr_motor = wpilib.CANTalon(2)
self.rf_motor = wpilib.CANTalon(8)
self.rr_motor = wpilib.CANTalon(4)
self.accelerometer = wpilib.BuiltInAccelerometer()
self.distance = distance.Distance(self.accelerometer)
##ROBOT DRIVE##
self.robot_drive = wpilib.RobotDrive(self.lf_motor, self.lr_motor, self.rf_motor, self.rr_motor)
self.robot_drive.setInvertedMotor(wpilib.RobotDrive.MotorType.kFrontRight, True)
self.robot_drive.setInvertedMotor(wpilib.RobotDrive.MotorType.kRearRight, True)
def disabled(self):
wpilib.Timer.delay(.01)
def operatorControl(self):
while self.isOperatorControl() and self.isEnabled():
if self.joystick2.getRawButton(3):
self.servo.set(1)
if self.joystick2.getRawButton(2):
self.servo.set(0)
self.x = self.joystick1.getX()
self.y = self.joystick1.getY()
#Forward and Backward for testing
if self.joystick1.getRawButton(2):
self.y = 0.5;
if self.joystick1.getRawButton(3):
self.y = -0.5;
self.rotation = (self.joystick2.getX() / 2)
self.robot_drive.mecanumDrive_Cartesian(self.x, self.y, self.rotation, 0)
self.logger.info(self.accelerometer.getX())
self.distance.get_dist_x()
wpilib.Timer.delay(0.025)
if __name__ == '__main__':
wpilib.run(MyRobot)
|
pombredanne/django-markupwiki
|
refs/heads/master
|
example/settings.py
|
1
|
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'example',
}
}
ADMIN_MEDIA_PREFIX = '/media/'
SECRET_KEY = 'h%+o+&fe3r4j0z=9ghk=!divcta%zh%&=k8d^r08$cgr@3k3-&'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'example.urls'
TEMPLATE_DIRS = ( 'templates', )
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'markupwiki',
)
SITE_ID = 1
|
javiplx/cobbler-2.x
|
refs/heads/master
|
cobbler/action_status.py
|
15
|
"""
Reports on kickstart activity by examining the logs in
/var/log/cobbler.
Copyright 2007-2009, Red Hat, Inc
Michael DeHaan <mdehaan@redhat.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import os
import os.path
import glob
import time
import api as cobbler_api
import clogger
import utils
#from utils import _
# ARRAY INDEXES
MOST_RECENT_START = 0
MOST_RECENT_STOP = 1
MOST_RECENT_TARGET = 2
SEEN_START = 3
SEEN_STOP = 4
STATE = 5
class BootStatusReport:
def __init__(self,config,mode,logger=None):
"""
Constructor
"""
self.config = config
self.settings = config.settings()
self.ip_data = {}
self.mode = mode
if logger is None:
logger = clogger.Logger()
self.logger = logger
# -------------------------------------------------------
def scan_logfiles(self):
#profile foosball ? 127.0.0.1 start 1208294043.58
#system neo ? 127.0.0.1 start 1208295122.86
files = glob.glob("/var/log/cobbler/install.log*")
for fname in files:
fd = open(fname)
data = fd.read()
for line in data.split("\n"):
tokens = line.split()
if len(tokens) == 0:
continue
(profile_or_system, name, ip, start_or_stop, ts) = tokens
self.catalog(profile_or_system,name,ip,start_or_stop,ts)
fd.close()
# ------------------------------------------------------
def catalog(self,profile_or_system,name,ip,start_or_stop,ts):
ip_data = self.ip_data
if not ip_data.has_key(ip):
ip_data[ip] = [ -1, -1, "?", 0, 0, "?" ]
elem = ip_data[ip]
ts = float(ts)
mrstart = elem[MOST_RECENT_START]
mrstop = elem[MOST_RECENT_STOP]
mrtarg = elem[MOST_RECENT_TARGET]
snstart = elem[SEEN_START]
snstop = elem[SEEN_STOP]
if start_or_stop == "start":
if mrstart < ts:
mrstart = ts
mrtarg = "%s:%s" % (profile_or_system, name)
elem[SEEN_START] = elem[SEEN_START] + 1
if start_or_stop == "stop":
if mrstop < ts:
mrstop = ts
mrtarg = "%s:%s" % (profile_or_system, name)
elem[SEEN_STOP] = elem[SEEN_STOP] + 1
elem[MOST_RECENT_START] = mrstart
elem[MOST_RECENT_STOP] = mrstop
elem[MOST_RECENT_TARGET] = mrtarg
# -------------------------------------------------------
def process_results(self):
# FIXME: this should update the times here
tnow = int(time.time())
for ip in self.ip_data.keys():
elem = self.ip_data[ip]
start = int(elem[MOST_RECENT_START])
stop = int(elem[MOST_RECENT_STOP])
if (stop > start):
elem[STATE] = "finished"
else:
delta = tnow - start
min = delta / 60
sec = delta % 60
if min > 100:
elem[STATE] = "unknown/stalled"
else:
elem[STATE] = "installing (%sm %ss)" % (min,sec)
return self.ip_data
def get_printable_results(self):
format = "%-15s|%-20s|%-17s|%-17s"
ip_data = self.ip_data
ips = ip_data.keys()
ips.sort()
line = (
"ip",
"target",
"start",
"state",
)
buf = format % line
for ip in ips:
elem = ip_data[ip]
line = (
ip,
elem[MOST_RECENT_TARGET],
time.ctime(elem[MOST_RECENT_START]),
elem[STATE]
)
buf = buf + "\n" + format % line
return buf
# -------------------------------------------------------
def run(self):
"""
Calculate and print a kickstart-status report.
"""
self.scan_logfiles()
results = self.process_results()
if self.mode == "text":
return self.get_printable_results()
else:
return results
|
luster/oldnyc
|
refs/heads/master
|
viewer/simplejson/tests/test_speedups.py
|
129
|
from unittest import TestCase
from simplejson import encoder, scanner
def has_speedups():
return encoder.c_make_encoder is not None
class TestDecode(TestCase):
def test_make_scanner(self):
if not has_speedups():
return
self.assertRaises(AttributeError, scanner.c_make_scanner, 1)
def test_make_encoder(self):
if not has_speedups():
return
self.assertRaises(TypeError, encoder.c_make_encoder,
None,
"\xCD\x7D\x3D\x4E\x12\x4C\xF9\x79\xD7\x52\xBA\x82\xF2\x27\x4A\x7D\xA0\xCA\x75",
None)
|
JeroenZegers/Nabu-MSSS
|
refs/heads/master
|
nabu/postprocessing/speaker_verification_handlers/__init__.py
|
1
|
"""@package speaker_verification_handlers"""
from . import speaker_verification_handler, speaker_verification_handler_factory, averager
|
Nexenta/cinder
|
refs/heads/master
|
cinder/tests/unit/volume/drivers/netapp/eseries/test_host_mapper.py
|
8
|
# Copyright (c) 2015 Alex Meade. All rights reserved.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Mock unit tests for the NetApp E-series iscsi driver."""
import copy
import mock
import six
from cinder import exception
from cinder import test
from cinder.tests.unit.volume.drivers.netapp.eseries \
import fakes as eseries_fakes
from cinder.volume.drivers.netapp.eseries import host_mapper
from cinder.volume.drivers.netapp.eseries import utils
def get_fake_volume():
return {
'id': '114774fb-e15a-4fae-8ee2-c9723e3645ef', 'size': 1,
'volume_name': 'lun1', 'host': 'hostname@backend#DDP',
'os_type': 'linux', 'provider_location': 'lun1',
'name_id': '114774fb-e15a-4fae-8ee2-c9723e3645ef',
'provider_auth': 'provider a b', 'project_id': 'project',
'display_name': None, 'display_description': 'lun1',
'volume_type_id': None, 'migration_status': None, 'attach_status':
"detached", "status": "available"
}
FAKE_MAPPINGS = [{u'lun': 1}]
FAKE_USED_UP_MAPPINGS = [{u'lun': n} for n in range(256)]
FAKE_USED_UP_LUN_ID_DICT = {n: 1 for n in range(256)}
FAKE_UNUSED_LUN_ID = set([])
FAKE_USED_LUN_ID_DICT = ({0: 1, 1: 1})
FAKE_USED_LUN_IDS = [1, 2]
FAKE_SINGLE_USED_LUN_ID = 1
FAKE_USED_UP_LUN_IDS = range(256)
class NetAppEseriesHostMapperTestCase(test.TestCase):
def setUp(self):
super(NetAppEseriesHostMapperTestCase, self).setUp()
self.client = eseries_fakes.FakeEseriesClient()
def test_unmap_volume_from_host_volume_mapped_to_host(self):
fake_eseries_volume = copy.deepcopy(eseries_fakes.VOLUME)
fake_eseries_volume['listOfMappings'] = [
eseries_fakes.VOLUME_MAPPING
]
self.mock_object(self.client, 'list_volumes',
mock.Mock(return_value=[fake_eseries_volume]))
self.mock_object(self.client, 'delete_volume_mapping')
host_mapper.unmap_volume_from_host(self.client, get_fake_volume(),
eseries_fakes.HOST,
eseries_fakes.VOLUME_MAPPING)
self.assertTrue(self.client.delete_volume_mapping.called)
def test_unmap_volume_from_host_volume_mapped_to_different_host(self):
fake_eseries_volume = copy.deepcopy(eseries_fakes.VOLUME)
# Mapped to host 1
fake_eseries_volume['listOfMappings'] = [
eseries_fakes.VOLUME_MAPPING
]
self.mock_object(self.client, 'list_volumes',
mock.Mock(return_value=[fake_eseries_volume]))
self.mock_object(self.client, 'delete_volume_mapping')
self.mock_object(self.client, 'get_host_group',
mock.Mock(
side_effect=exception.NotFound))
err = self.assertRaises(exception.NetAppDriverException,
host_mapper.unmap_volume_from_host,
self.client, get_fake_volume(),
eseries_fakes.HOST_2,
eseries_fakes.VOLUME_MAPPING)
self.assertIn("not currently mapped to host", six.text_type(err))
def test_unmap_volume_from_host_volume_mapped_to_host_group_but_not_host(
self):
"""Test volume mapped to host not in specified host group.
Ensure an error is raised if the specified host is not in the
host group the volume is mapped to.
"""
fake_eseries_volume = copy.deepcopy(eseries_fakes.VOLUME)
fake_volume_mapping = copy.deepcopy(eseries_fakes.VOLUME_MAPPING)
fake_volume_mapping['mapRef'] = eseries_fakes.MULTIATTACH_HOST_GROUP[
'clusterRef']
fake_eseries_volume['listOfMappings'] = [fake_volume_mapping]
self.mock_object(self.client, 'list_volumes',
mock.Mock(return_value=[fake_eseries_volume]))
fake_host = copy.deepcopy(eseries_fakes.HOST)
fake_host['clusterRef'] = utils.NULL_REF
self.mock_object(self.client, 'list_hosts',
mock.Mock(return_value=[fake_host]))
err = self.assertRaises(exception.NetAppDriverException,
host_mapper.unmap_volume_from_host,
self.client, get_fake_volume(),
fake_host,
fake_volume_mapping)
self.assertIn("not currently mapped to host", six.text_type(err))
def test_unmap_volume_from_host_volume_mapped_to_multiattach_host_group(
self):
fake_eseries_volume = copy.deepcopy(eseries_fakes.VOLUME)
fake_volume_mapping = copy.deepcopy(eseries_fakes.VOLUME_MAPPING)
fake_volume_mapping['mapRef'] = eseries_fakes.MULTIATTACH_HOST_GROUP[
'clusterRef']
fake_eseries_volume['listOfMappings'] = [fake_volume_mapping]
self.mock_object(self.client, 'delete_volume_mapping')
self.mock_object(self.client, 'list_volumes',
mock.Mock(return_value=[fake_eseries_volume]))
fake_volume = get_fake_volume()
fake_volume['status'] = 'detaching'
host_mapper.unmap_volume_from_host(self.client, fake_volume,
eseries_fakes.HOST,
fake_volume_mapping)
self.assertTrue(self.client.delete_volume_mapping.called)
def test_unmap_volume_from_host_volume_mapped_to_multiattach_host_group_and_migrating( # noqa
self):
fake_eseries_volume = copy.deepcopy(eseries_fakes.VOLUME)
fake_volume_mapping = copy.deepcopy(eseries_fakes.VOLUME_MAPPING)
fake_volume_mapping['mapRef'] = eseries_fakes.MULTIATTACH_HOST_GROUP[
'clusterRef']
fake_eseries_volume['listOfMappings'] = [fake_volume_mapping]
self.mock_object(self.client, 'delete_volume_mapping')
self.mock_object(self.client, 'list_volumes',
mock.Mock(return_value=[fake_eseries_volume]))
fake_volume = get_fake_volume()
fake_volume['status'] = 'in-use'
host_mapper.unmap_volume_from_host(self.client, fake_volume,
eseries_fakes.HOST,
fake_volume_mapping)
self.assertFalse(self.client.delete_volume_mapping.called)
def test_unmap_volume_from_host_volume_mapped_to_outside_host_group(self):
"""Test volume mapped to host group without host.
Ensure we raise error when we find a volume is mapped to an unknown
host group that does not have the host.
"""
fake_eseries_volume = copy.deepcopy(eseries_fakes.VOLUME)
fake_volume_mapping = copy.deepcopy(eseries_fakes.VOLUME_MAPPING)
fake_ref = "8500000060080E500023C7340036035F515B78FD"
fake_volume_mapping['mapRef'] = fake_ref
fake_eseries_volume['listOfMappings'] = [fake_volume_mapping]
self.mock_object(self.client, 'list_volumes',
mock.Mock(return_value=[fake_eseries_volume]))
fake_host = copy.deepcopy(eseries_fakes.HOST)
fake_host['clusterRef'] = utils.NULL_REF
self.mock_object(self.client, 'list_hosts',
mock.Mock(return_value=[fake_host]))
self.mock_object(self.client, 'get_host_group',
mock.Mock(return_value=
eseries_fakes.FOREIGN_HOST_GROUP))
err = self.assertRaises(exception.NetAppDriverException,
host_mapper.unmap_volume_from_host,
self.client, get_fake_volume(),
eseries_fakes.HOST,
fake_volume_mapping)
self.assertIn("unsupported host group", six.text_type(err))
def test_unmap_volume_from_host_volume_mapped_to_outside_host_group_w_host(
self):
"""Test volume mapped to host in unknown host group.
Ensure we raise error when we find a volume is mapped to an unknown
host group that has the host.
"""
fake_eseries_volume = copy.deepcopy(eseries_fakes.VOLUME)
fake_volume_mapping = copy.deepcopy(eseries_fakes.VOLUME_MAPPING)
fake_ref = "8500000060080E500023C7340036035F515B78FD"
fake_volume_mapping['mapRef'] = fake_ref
fake_eseries_volume['clusterRef'] = fake_ref
fake_eseries_volume['listOfMappings'] = [fake_volume_mapping]
self.mock_object(self.client, 'list_volumes',
mock.Mock(return_value=[fake_eseries_volume]))
fake_host = copy.deepcopy(eseries_fakes.HOST)
fake_host['clusterRef'] = utils.NULL_REF
self.mock_object(self.client, 'list_hosts',
mock.Mock(return_value=[fake_host]))
self.mock_object(self.client, 'get_host_group',
mock.Mock(return_value=
eseries_fakes.FOREIGN_HOST_GROUP))
err = self.assertRaises(exception.NetAppDriverException,
host_mapper.unmap_volume_from_host,
self.client, get_fake_volume(),
eseries_fakes.HOST,
fake_volume_mapping)
self.assertIn("unsupported host group", six.text_type(err))
def test_map_volume_to_single_host_volume_not_mapped(self):
self.mock_object(self.client, 'create_volume_mapping',
mock.Mock(
return_value=eseries_fakes.VOLUME_MAPPING))
host_mapper.map_volume_to_single_host(self.client, get_fake_volume(),
eseries_fakes.VOLUME,
eseries_fakes.HOST,
None,
False)
self.assertTrue(self.client.create_volume_mapping.called)
def test_map_volume_to_single_host_volume_already_mapped_to_target_host(
self):
"""Should be a no-op"""
self.mock_object(self.client, 'create_volume_mapping',
mock.Mock())
host_mapper.map_volume_to_single_host(self.client,
get_fake_volume(),
eseries_fakes.VOLUME,
eseries_fakes.HOST,
eseries_fakes.VOLUME_MAPPING,
False)
self.assertFalse(self.client.create_volume_mapping.called)
def test_map_volume_to_single_host_volume_mapped_to_multiattach_host_group(
self):
"""Test map volume to a single host.
Should move mapping to target host if volume is not migrating or
attached(in-use). If volume is not in use then it should not require a
mapping making it ok to sever the mapping to the host group.
"""
fake_mapping_to_other_host = copy.deepcopy(
eseries_fakes.VOLUME_MAPPING)
fake_mapping_to_other_host['mapRef'] = \
eseries_fakes.MULTIATTACH_HOST_GROUP['clusterRef']
self.mock_object(self.client, 'move_volume_mapping_via_symbol',
mock.Mock(return_value={'lun': 5}))
host_mapper.map_volume_to_single_host(self.client,
get_fake_volume(),
eseries_fakes.VOLUME,
eseries_fakes.HOST,
fake_mapping_to_other_host,
False)
self.assertTrue(self.client.move_volume_mapping_via_symbol.called)
def test_map_volume_to_single_host_volume_mapped_to_multiattach_host_group_and_migrating( # noqa
self):
"""Should raise error saying multiattach not enabled"""
fake_mapping_to_other_host = copy.deepcopy(
eseries_fakes.VOLUME_MAPPING)
fake_mapping_to_other_host['mapRef'] = \
eseries_fakes.MULTIATTACH_HOST_GROUP['clusterRef']
fake_volume = get_fake_volume()
fake_volume['attach_status'] = "attached"
err = self.assertRaises(exception.NetAppDriverException,
host_mapper.map_volume_to_single_host,
self.client, fake_volume,
eseries_fakes.VOLUME,
eseries_fakes.HOST,
fake_mapping_to_other_host,
False)
self.assertIn('multiattach is disabled', six.text_type(err))
def test_map_volume_to_single_host_volume_mapped_to_multiattach_host_group_and_attached( # noqa
self):
"""Should raise error saying multiattach not enabled"""
fake_mapping_to_other_host = copy.deepcopy(
eseries_fakes.VOLUME_MAPPING)
fake_mapping_to_other_host['mapRef'] = \
eseries_fakes.MULTIATTACH_HOST_GROUP['clusterRef']
fake_volume = get_fake_volume()
fake_volume['attach_status'] = "attached"
err = self.assertRaises(exception.NetAppDriverException,
host_mapper.map_volume_to_single_host,
self.client, fake_volume,
eseries_fakes.VOLUME,
eseries_fakes.HOST,
fake_mapping_to_other_host,
False)
self.assertIn('multiattach is disabled', six.text_type(err))
def test_map_volume_to_single_host_volume_mapped_to_another_host(self):
"""Should raise error saying multiattach not enabled"""
fake_mapping_to_other_host = copy.deepcopy(
eseries_fakes.VOLUME_MAPPING)
fake_mapping_to_other_host['mapRef'] = eseries_fakes.HOST_2[
'hostRef']
err = self.assertRaises(exception.NetAppDriverException,
host_mapper.map_volume_to_single_host,
self.client, get_fake_volume(),
eseries_fakes.VOLUME,
eseries_fakes.HOST,
fake_mapping_to_other_host,
False)
self.assertIn('multiattach is disabled', six.text_type(err))
def test_map_volume_to_multiple_hosts_volume_already_mapped_to_target_host(
self):
"""Should be a no-op."""
self.mock_object(self.client, 'create_volume_mapping',
mock.Mock())
host_mapper.map_volume_to_multiple_hosts(self.client,
get_fake_volume(),
eseries_fakes.VOLUME,
eseries_fakes.HOST,
eseries_fakes.VOLUME_MAPPING)
self.assertFalse(self.client.create_volume_mapping.called)
def test_map_volume_to_multiple_hosts_volume_mapped_to_multiattach_host_group( # noqa
self):
"""Should ensure target host is in the multiattach host group."""
fake_host = copy.deepcopy(eseries_fakes.HOST_2)
fake_host['clusterRef'] = utils.NULL_REF
fake_mapping_to_host_group = copy.deepcopy(
eseries_fakes.VOLUME_MAPPING)
fake_mapping_to_host_group['mapRef'] = \
eseries_fakes.MULTIATTACH_HOST_GROUP['clusterRef']
self.mock_object(self.client, 'set_host_group_for_host')
self.mock_object(self.client, 'get_host_group',
mock.Mock(
return_value=eseries_fakes.MULTIATTACH_HOST_GROUP)
)
host_mapper.map_volume_to_multiple_hosts(self.client,
get_fake_volume(),
eseries_fakes.VOLUME,
fake_host,
fake_mapping_to_host_group)
self.assertEqual(
1, self.client.set_host_group_for_host.call_count)
def test_map_volume_to_multiple_hosts_volume_mapped_to_multiattach_host_group_with_lun_collision( # noqa
self):
"""Should ensure target host is in the multiattach host group."""
fake_host = copy.deepcopy(eseries_fakes.HOST_2)
fake_host['clusterRef'] = utils.NULL_REF
fake_mapping_to_host_group = copy.deepcopy(
eseries_fakes.VOLUME_MAPPING)
fake_mapping_to_host_group['mapRef'] = \
eseries_fakes.MULTIATTACH_HOST_GROUP['clusterRef']
self.mock_object(self.client, 'set_host_group_for_host',
mock.Mock(side_effect=exception.NetAppDriverException)
)
self.assertRaises(exception.NetAppDriverException,
host_mapper.map_volume_to_multiple_hosts,
self.client,
get_fake_volume(),
eseries_fakes.VOLUME,
fake_host,
fake_mapping_to_host_group)
def test_map_volume_to_multiple_hosts_volume_mapped_to_another_host(self):
"""Test that mapping moves to another host group.
Should ensure both existing host and destination host are in
multiattach host group and move the mapping to the host group.
"""
existing_host = copy.deepcopy(eseries_fakes.HOST)
existing_host['clusterRef'] = utils.NULL_REF
target_host = copy.deepcopy(eseries_fakes.HOST_2)
target_host['clusterRef'] = utils.NULL_REF
self.mock_object(self.client, 'get_host',
mock.Mock(return_value=existing_host))
self.mock_object(self.client, 'set_host_group_for_host')
self.mock_object(self.client, 'get_host_group',
mock.Mock(side_effect=exception.NotFound))
mock_move_mapping = mock.Mock(
return_value=eseries_fakes.VOLUME_MAPPING_TO_MULTIATTACH_GROUP)
self.mock_object(self.client,
'move_volume_mapping_via_symbol',
mock_move_mapping)
host_mapper.map_volume_to_multiple_hosts(self.client,
get_fake_volume(),
eseries_fakes.VOLUME,
target_host,
eseries_fakes.VOLUME_MAPPING)
self.assertEqual(
2, self.client.set_host_group_for_host.call_count)
self.assertTrue(self.client.move_volume_mapping_via_symbol
.called)
def test_map_volume_to_multiple_hosts_volume_mapped_to_another_host_with_lun_collision_with_source_host( # noqa
self):
"""Test moving source host to multiattach host group.
Should fail attempting to move source host to multiattach host
group and raise an error.
"""
existing_host = copy.deepcopy(eseries_fakes.HOST)
existing_host['clusterRef'] = utils.NULL_REF
target_host = copy.deepcopy(eseries_fakes.HOST_2)
target_host['clusterRef'] = utils.NULL_REF
self.mock_object(self.client, 'get_host',
mock.Mock(return_value=existing_host))
self.mock_object(self.client, 'set_host_group_for_host',
mock.Mock(side_effect=[
None,
exception.NetAppDriverException
]))
self.mock_object(self.client, 'get_host_group',
mock.Mock(side_effect=exception.NotFound))
mock_move_mapping = mock.Mock(
return_value=eseries_fakes.VOLUME_MAPPING_TO_MULTIATTACH_GROUP)
self.mock_object(self.client,
'move_volume_mapping_via_symbol',
mock_move_mapping)
self.assertRaises(exception.NetAppDriverException,
host_mapper.map_volume_to_multiple_hosts,
self.client,
get_fake_volume(),
eseries_fakes.VOLUME,
target_host,
eseries_fakes.VOLUME_MAPPING)
def test_map_volume_to_multiple_hosts_volume_mapped_to_another_host_with_lun_collision_with_dest_host( # noqa
self):
"""Test moving destination host to multiattach host group.
Should fail attempting to move destination host to multiattach host
group and raise an error.
"""
existing_host = copy.deepcopy(eseries_fakes.HOST)
existing_host['clusterRef'] = utils.NULL_REF
target_host = copy.deepcopy(eseries_fakes.HOST_2)
target_host['clusterRef'] = utils.NULL_REF
self.mock_object(self.client, 'get_host',
mock.Mock(return_value=existing_host))
self.mock_object(self.client, 'set_host_group_for_host',
mock.Mock(side_effect=[
exception.NetAppDriverException,
None
]))
self.mock_object(self.client, 'get_host_group',
mock.Mock(side_effect=exception.NotFound))
mock_move_mapping = mock.Mock(
return_value=eseries_fakes.VOLUME_MAPPING_TO_MULTIATTACH_GROUP)
self.mock_object(self.client,
'move_volume_mapping_via_symbol',
mock_move_mapping)
self.assertRaises(exception.NetAppDriverException,
host_mapper.map_volume_to_multiple_hosts,
self.client,
get_fake_volume(),
eseries_fakes.VOLUME,
target_host,
eseries_fakes.VOLUME_MAPPING)
def test_map_volume_to_multiple_hosts_volume_mapped_to_foreign_host_group(
self):
"""Test a target when the host is in a foreign host group.
Should raise an error stating the volume is mapped to an
unsupported host group.
"""
fake_eseries_volume = copy.deepcopy(eseries_fakes.VOLUME)
fake_volume_mapping = copy.deepcopy(eseries_fakes.VOLUME_MAPPING)
fake_ref = "8500000060080E500023C7340036035F515B78FD"
fake_volume_mapping['mapRef'] = fake_ref
self.mock_object(self.client, 'list_volumes',
mock.Mock(return_value=[fake_eseries_volume]))
fake_host = copy.deepcopy(eseries_fakes.HOST)
fake_host['clusterRef'] = utils.NULL_REF
self.mock_object(self.client, 'get_host_group',
mock.Mock(return_value=
eseries_fakes.FOREIGN_HOST_GROUP))
err = self.assertRaises(exception.NetAppDriverException,
host_mapper.map_volume_to_multiple_hosts,
self.client,
get_fake_volume(),
eseries_fakes.VOLUME,
fake_host,
fake_volume_mapping)
self.assertIn("unsupported host group", six.text_type(err))
def test_map_volume_to_multiple_hosts_volume_mapped_to_host_in_foreign_host_group( # noqa
self):
"""Test a target when the host is in a foreign host group.
Should raise an error stating the volume is mapped to a
host that is in an unsupported host group.
"""
fake_eseries_volume = copy.deepcopy(eseries_fakes.VOLUME)
fake_volume_mapping = copy.deepcopy(eseries_fakes.VOLUME_MAPPING)
fake_host = copy.deepcopy(eseries_fakes.HOST_2)
fake_host['clusterRef'] = eseries_fakes.FOREIGN_HOST_GROUP[
'clusterRef']
fake_volume_mapping['mapRef'] = fake_host['hostRef']
fake_eseries_volume['listOfMappings'] = [fake_volume_mapping]
self.mock_object(self.client, 'list_volumes',
mock.Mock(return_value=[fake_eseries_volume]))
self.mock_object(self.client, 'get_host',
mock.Mock(return_value=fake_host))
self.mock_object(self.client, 'get_host_group',
mock.Mock(side_effect=[
eseries_fakes.FOREIGN_HOST_GROUP]))
err = self.assertRaises(exception.NetAppDriverException,
host_mapper.map_volume_to_multiple_hosts,
self.client,
get_fake_volume(),
eseries_fakes.VOLUME,
eseries_fakes.HOST,
fake_volume_mapping)
self.assertIn("unsupported host group", six.text_type(err))
def test_map_volume_to_multiple_hosts_volume_target_host_in_foreign_host_group( # noqa
self):
"""Test a target when the host is in a foreign host group.
Should raise an error stating the target host is in an
unsupported host group.
"""
fake_eseries_volume = copy.deepcopy(eseries_fakes.VOLUME)
fake_volume_mapping = copy.deepcopy(eseries_fakes.VOLUME_MAPPING)
fake_host = copy.deepcopy(eseries_fakes.HOST_2)
fake_host['clusterRef'] = eseries_fakes.FOREIGN_HOST_GROUP[
'clusterRef']
self.mock_object(self.client, 'list_volumes',
mock.Mock(return_value=[fake_eseries_volume]))
self.mock_object(self.client, 'get_host',
mock.Mock(return_value=eseries_fakes.HOST))
self.mock_object(self.client, 'get_host_group',
mock.Mock(side_effect=[
eseries_fakes.FOREIGN_HOST_GROUP]))
err = self.assertRaises(exception.NetAppDriverException,
host_mapper.map_volume_to_multiple_hosts,
self.client,
get_fake_volume(),
eseries_fakes.VOLUME,
fake_host,
fake_volume_mapping)
self.assertIn("unsupported host group", six.text_type(err))
def test_get_unused_lun_ids(self):
unused_lun_ids = host_mapper._get_unused_lun_ids(FAKE_MAPPINGS)
self.assertEqual(set(range(2, 256)), unused_lun_ids)
def test_get_unused_lun_id_counter(self):
used_lun_id_count = host_mapper._get_used_lun_id_counter(
FAKE_MAPPINGS)
self.assertEqual(FAKE_USED_LUN_ID_DICT, used_lun_id_count)
def test_get_unused_lun_ids_used_up_luns(self):
unused_lun_ids = host_mapper._get_unused_lun_ids(
FAKE_USED_UP_MAPPINGS)
self.assertEqual(FAKE_UNUSED_LUN_ID, unused_lun_ids)
def test_get_lun_id_counter_used_up_luns(self):
used_lun_ids = host_mapper._get_used_lun_id_counter(
FAKE_USED_UP_MAPPINGS)
self.assertEqual(FAKE_USED_UP_LUN_ID_DICT, used_lun_ids)
def test_host_not_full(self):
fake_host = copy.deepcopy(eseries_fakes.HOST)
self.assertFalse(host_mapper._is_host_full(self.client, fake_host))
def test_host_full(self):
fake_host = copy.deepcopy(eseries_fakes.HOST)
self.mock_object(self.client, 'get_volume_mappings_for_host',
mock.Mock(return_value=FAKE_USED_UP_MAPPINGS))
self.assertTrue(host_mapper._is_host_full(self.client, fake_host))
def test_get_free_lun(self):
fake_host = copy.deepcopy(eseries_fakes.HOST)
with mock.patch('random.sample') as mock_random:
mock_random.return_value = [3]
lun = host_mapper._get_free_lun(self.client, fake_host, False,
[])
self.assertEqual(3, lun)
def test_get_free_lun_host_full(self):
fake_host = copy.deepcopy(eseries_fakes.HOST)
self.mock_object(host_mapper, '_is_host_full',
mock.Mock(return_value=True))
self.assertRaises(
exception.NetAppDriverException,
host_mapper._get_free_lun,
self.client, fake_host, False, FAKE_USED_UP_MAPPINGS)
def test_get_free_lun_no_unused_luns(self):
fake_host = copy.deepcopy(eseries_fakes.HOST)
lun = host_mapper._get_free_lun(self.client, fake_host, False,
FAKE_USED_UP_MAPPINGS)
self.assertEqual(255, lun)
def test_get_free_lun_no_unused_luns_host_not_full(self):
fake_host = copy.deepcopy(eseries_fakes.HOST)
self.mock_object(host_mapper, '_is_host_full',
mock.Mock(return_value=False))
lun = host_mapper._get_free_lun(self.client, fake_host, False,
FAKE_USED_UP_MAPPINGS)
self.assertEqual(255, lun)
def test_get_free_lun_no_lun_available(self):
fake_host = copy.deepcopy(eseries_fakes.HOST_3)
self.mock_object(self.client, 'get_volume_mappings_for_host',
mock.Mock(return_value=FAKE_USED_UP_MAPPINGS))
self.assertRaises(exception.NetAppDriverException,
host_mapper._get_free_lun,
self.client, fake_host, False,
FAKE_USED_UP_MAPPINGS)
def test_get_free_lun_multiattach_enabled_no_unused_ids(self):
fake_host = copy.deepcopy(eseries_fakes.HOST_3)
self.mock_object(self.client, 'get_volume_mappings',
mock.Mock(return_value=FAKE_USED_UP_MAPPINGS))
self.assertRaises(exception.NetAppDriverException,
host_mapper._get_free_lun,
self.client, fake_host, True,
FAKE_USED_UP_MAPPINGS)
def test_get_lun_by_mapping(self):
used_luns = host_mapper._get_used_lun_ids_for_mappings(FAKE_MAPPINGS)
self.assertEqual(set([0, 1]), used_luns)
def test_get_lun_by_mapping_no_mapping(self):
used_luns = host_mapper._get_used_lun_ids_for_mappings([])
self.assertEqual(set([0]), used_luns)
def test_lun_id_available_on_host(self):
fake_host = copy.deepcopy(eseries_fakes.HOST)
self.assertTrue(host_mapper._is_lun_id_available_on_host(
self.client, fake_host, FAKE_UNUSED_LUN_ID))
def test_no_lun_id_available_on_host(self):
fake_host = copy.deepcopy(eseries_fakes.HOST_3)
self.mock_object(self.client, 'get_volume_mappings_for_host',
mock.Mock(return_value=FAKE_USED_UP_MAPPINGS))
self.assertFalse(host_mapper._is_lun_id_available_on_host(
self.client, fake_host, FAKE_SINGLE_USED_LUN_ID))
|
binoculars/osf.io
|
refs/heads/develop
|
api_tests/base/test_utils.py
|
6
|
# -*- coding: utf-8 -*-
from nose.tools import * # flake8: noqa
import mock # noqa
import unittest
from rest_framework import fields
from rest_framework.exceptions import ValidationError
from api.base import utils as api_utils
from framework.status import push_status_message
class TestTruthyFalsy:
"""Check that our copy/pasted representation of
TRUTHY and FALSY match the DRF BooleanField's versions
"""
def test_truthy(self):
assert_equal(api_utils.TRUTHY, fields.BooleanField.TRUE_VALUES)
def test_falsy(self):
assert_equal(api_utils.FALSY, fields.BooleanField.FALSE_VALUES)
class TestIsDeprecated(unittest.TestCase):
def setUp(self):
super(TestIsDeprecated, self).setUp()
self.min_version = '2.0'
self.max_version = '2.5'
def test_is_deprecated(self):
request_version = '2.6'
is_deprecated = api_utils.is_deprecated(
request_version, self.min_version, self.max_version)
assert_equal(is_deprecated, True)
def test_is_not_deprecated(self):
request_version = '2.5'
is_deprecated = api_utils.is_deprecated(
request_version, self.min_version, self.max_version)
assert_equal(is_deprecated, False)
class TestFlaskDjangoIntegration:
def test_push_status_message_no_response(self):
status_message = 'This is a message'
statuses = ['info', 'warning', 'warn', 'success', 'danger', 'default']
for status in statuses:
try:
push_status_message(status_message, kind=status)
except BaseException:
assert_true(
False,
'Exception from push_status_message via API v2 with type "{}".'.format(status)
)
def test_push_status_message_expected_error(self):
status_message = 'This is a message'
try:
push_status_message(status_message, kind='error')
assert_true(
False,
'push_status_message() should have generated a ValidationError exception.'
)
except ValidationError as e:
assert_equal(
e.detail[0],
status_message,
'push_status_message() should have passed along the message with the Exception.'
)
except RuntimeError:
assert_true(
False,
'push_status_message() should have caught the runtime error and replaced it.'
)
except BaseException:
assert_true(
False,
'Exception from push_status_message when called from the v2 API with type "error"'
)
@mock.patch('framework.status.session')
def test_push_status_message_unexpected_error(self, mock_sesh):
status_message = 'This is a message'
exception_message = 'this is some very unexpected problem'
mock_get = mock.Mock(side_effect=RuntimeError(exception_message))
mock_data = mock.Mock()
mock_data.attach_mock(mock_get, 'get')
mock_sesh.attach_mock(mock_data, 'data')
try:
push_status_message(status_message, kind='error')
assert_true(
False,
'push_status_message() should have generated a RuntimeError exception.'
)
except ValidationError as e:
assert_true(
False,
'push_status_message() should have re-raised the RuntimeError not gotten ValidationError.'
)
except RuntimeError as e:
assert_equal(getattr(e, 'message', None),
exception_message,
'push_status_message() should have re-raised the '
'original RuntimeError with the original message.')
except BaseException:
assert_true(
False, 'Unexpected Exception from push_status_message when called '
'from the v2 API with type "error"')
|
qtile/qtile
|
refs/heads/master
|
libqtile/scripts/run_cmd.py
|
2
|
# Copyright (c) 2014, Roger Duran
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the \"Software\"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Command-line wrapper to run commands and add rules to new windows
"""
import argparse
import atexit
import subprocess
from libqtile import ipc
from libqtile.command import graph
def run_cmd(opts) -> None:
if opts.socket is None:
socket = ipc.find_sockfile()
else:
socket = opts.socket
client = ipc.Client(socket)
root = graph.CommandGraphRoot()
cmd = [opts.cmd]
if opts.args:
cmd.extend(opts.args)
proc = subprocess.Popen(cmd)
match_args = {"net_wm_pid": proc.pid}
rule_args = {"float": opts.float, "intrusive": opts.intrusive,
"group": opts.group, "break_on_match": not opts.dont_break}
cmd = root.call("add_rule")
_, rule_id = client.send((root.selectors, cmd.name, (match_args, rule_args), {}))
def remove_rule() -> None:
cmd = root.call("remove_rule")
client.send((root.selectors, cmd.name, (rule_id,), {}))
atexit.register(remove_rule)
proc.wait()
def add_subcommand(subparsers, parents):
parser = subparsers.add_parser(
"run-cmd",
parents=parents,
help="A wrapper around the command graph"
)
parser.add_argument(
'-s',
'--socket',
help='Use specified communication socket.')
parser.add_argument(
'-i',
'--intrusive',
action='store_true',
help='If the new window should be intrusive.')
parser.add_argument(
'-f',
'--float',
action='store_true',
help='If the new window should be float.')
parser.add_argument(
'-b',
'--dont-break',
action='store_true',
help='Do not break on match (keep applying rules).')
parser.add_argument(
'-g',
'--group',
help='Set the window group.')
parser.add_argument(
'cmd',
help='Command to execute.'),
parser.add_argument(
'args',
nargs=argparse.REMAINDER,
metavar='[args ...]',
help='Optional arguments to pass to command.'
)
parser.set_defaults(func=run_cmd)
|
nickvandewiele/RMG-Py
|
refs/heads/master
|
rmgpy/cantherm/pdep.py
|
5
|
#!/usr/bin/env python
# encoding: utf-8
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2009 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
"""
This module provides the :class:`PressureDependenceJob` class, which represents
a job for computing the pressure-dependent rate coefficients of a unimolecular
reaction network.
"""
import os.path
import math
import numpy
import logging
import rmgpy.constants as constants
import rmgpy.quantity as quantity
from rmgpy.kinetics import Chebyshev, PDepArrhenius
from rmgpy.reaction import Reaction
from rmgpy.kinetics.tunneling import Wigner, Eckart
from rmgpy.cantherm.output import prettify
################################################################################
class PressureDependenceJob(object):
"""
A representation of a pressure dependence job. The attributes are:
======================= ====================================================
Attribute Description
======================= ====================================================
`Tmin` The minimum temperature at which to compute :math:`k(T,P)` values
`Tmax` The maximum temperature at which to compute :math:`k(T,P)` values
`Tcount` The number of temperatures at which to compute :math:`k(T,P)` values
`Pmin` The minimum pressure at which to compute :math:`k(T,P)` values
`Pmax` The maximum pressure at which to compute :math:`k(T,P)` values
`Pcount` The number of pressures at which to compute :math:`k(T,P)` values
`Emin` The minimum energy to use to compute :math:`k(T,P)` values
`Emax` The maximum energy to use to compute :math:`k(T,P)` values
`maximumGrainSize` The maximum energy grain size to use to compute :math:`k(T,P)` values
`minimumGrainCount` The minimum number of energy grains to use to compute :math:`k(T,P)` values
`method` The method to use to reduce the master equation to :math:`k(T,P)` values
`interpolationModel` The interpolation model to fit to the computed :math:`k(T,P)` values
`maximumAtoms` The maximum number of atoms to apply pressure dependence to (in RMG jobs)
`activeKRotor` A flag indicating whether to treat the K-rotor as active or adiabatic
`activeJRotor` A flag indicating whether to treat the J-rotor as active or adiabatic
`rmgmode` A flag that toggles "RMG mode", described below
----------------------- ----------------------------------------------------
`network` The unimolecular reaction network
`Tlist` An array of temperatures at which to compute :math:`k(T,P)` values
`Plist` An array of pressures at which to compute :math:`k(T,P)` values
`Elist` An array of energies to use to compute :math:`k(T,P)` values
======================= ====================================================
In RMG mode, several alterations to the k(T,P) algorithm are made both for
speed and due to the nature of the approximations used:
* Densities of states are not computed for product channels
* Arbitrary rigid rotor moments of inertia are included in the active modes;
these cancel in the ILT and equilibrium expressions
* k(E) for each path reaction is computed in the direction A -> products,
where A is always an explored isomer; the high-P kinetics are reversed
if necessary for this purpose
* Thermodynamic parameters are always used to compute the reverse k(E)
from the forward k(E) for each path reaction
RMG mode should be turned off by default except in RMG jobs.
"""
def __init__(self, network,
Tmin=None, Tmax=None, Tcount=0, Tlist=None,
Pmin=None, Pmax=None, Pcount=0, Plist=None,
maximumGrainSize=None, minimumGrainCount=0,
method=None, interpolationModel=None, maximumAtoms=None,
activeKRotor=True, activeJRotor=True, rmgmode=False):
self.network = network
self.Tmin = Tmin
self.Tmax = Tmax
self.Tcount = Tcount
if Tlist is not None:
self.Tlist = Tlist
self.Tmin = (numpy.min(self.Tlist.value_si),"K")
self.Tmax = (numpy.max(self.Tlist.value_si),"K")
self.Tcount = len(self.Tlist.value_si)
else:
self.Tlist = None
self.Pmin = Pmin
self.Pmax = Pmax
self.Pcount = Pcount
if Plist is not None:
self.Plist = Plist
self.Pmin = (numpy.min(self.Plist.value_si)*1e-5,"bar")
self.Pmax = (numpy.max(self.Plist.value_si)*1e-5,"bar")
self.Pcount = len(self.Plist.value_si)
else:
self.Plist = None
self.maximumGrainSize = maximumGrainSize
self.minimumGrainCount = minimumGrainCount
self.Emin = None
self.Emax = None
self.Elist = None
self.method = method
self.interpolationModel = interpolationModel
self.maximumAtoms = maximumAtoms
self.activeKRotor = activeKRotor
self.activeJRotor = activeJRotor
self.rmgmode = rmgmode
@property
def Tmin(self):
"""The minimum temperature at which the computed k(T,P) values are valid, or ``None`` if not defined."""
return self._Tmin
@Tmin.setter
def Tmin(self, value):
self._Tmin = quantity.Temperature(value)
@property
def Tmax(self):
"""The maximum temperature at which the computed k(T,P) values are valid, or ``None`` if not defined."""
return self._Tmax
@Tmax.setter
def Tmax(self, value):
self._Tmax = quantity.Temperature(value)
@property
def Tlist(self):
"""The temperatures at which the k(T,P) values are computed."""
return self._Tlist
@Tlist.setter
def Tlist(self, value):
self._Tlist = quantity.Temperature(value)
@property
def Pmin(self):
"""The minimum pressure at which the computed k(T,P) values are valid, or ``None`` if not defined."""
return self._Pmin
@Pmin.setter
def Pmin(self, value):
self._Pmin = quantity.Pressure(value)
@property
def Pmax(self):
"""The maximum pressure at which the computed k(T,P) values are valid, or ``None`` if not defined."""
return self._Pmax
@Pmax.setter
def Pmax(self, value):
self._Pmax = quantity.Pressure(value)
@property
def Plist(self):
"""The pressures at which the k(T,P) values are computed."""
return self._Plist
@Plist.setter
def Plist(self, value):
self._Plist = quantity.Pressure(value)
@property
def maximumGrainSize(self):
"""The maximum allowed energy grain size, or ``None`` if not defined."""
return self._maximumGrainSize
@maximumGrainSize.setter
def maximumGrainSize(self, value):
self._maximumGrainSize = quantity.Energy(value)
def copy(self):
"""
Return a copy of the pressure dependence job.
"""
return PressureDependenceJob(
network = self.network,
Tmin = self.Tmax,
Tmax = self.Tmax,
Tcount = self.Tcount,
Tlist = self.Tlist,
Pmin = self.Pmin,
Pmax = self.Pmax,
Pcount = self.Pcount,
Plist = self.Plist,
maximumGrainSize = self.maximumGrainSize,
minimumGrainCount = self.minimumGrainCount,
method = self.method,
interpolationModel = self.interpolationModel,
activeKRotor = self.activeKRotor,
activeJRotor = self.activeJRotor,
rmgmode = self.rmgmode,
)
def execute(self, outputFile, plot, format='pdf'):
self.network.printSummary()
if outputFile is not None:
self.draw(os.path.dirname(outputFile), format)
self.initialize()
self.K = self.network.calculateRateCoefficients(self.Tlist.value_si, self.Plist.value_si, self.method)
self.fitInterpolationModels()
if outputFile is not None:
self.save(outputFile)
if plot:
self.plot(os.path.dirname(outputFile))
def generateTemperatureList(self):
"""
Returns an array of temperatures based on the interpolation `model`,
minimum and maximum temperatures `Tmin` and `Tmax` in K, and the number of
temperatures `Tcount`. For Chebyshev polynomials a Gauss-Chebyshev
distribution is used; for all others a linear distribution on an inverse
temperature domain is used. Note that the Gauss-Chebyshev grid does *not*
place `Tmin` and `Tmax` at the endpoints, yet the interpolation is still
valid up to these values.
"""
Tmin = self.Tmin.value_si
Tmax = self.Tmax.value_si
Tcount = self.Tcount
if self.Tlist is not None:
pass
elif self.interpolationModel[0].lower() == 'chebyshev':
# Distribute temperatures on a Gauss-Chebyshev grid
Tlist = numpy.zeros(Tcount, numpy.float64)
for i in range(Tcount):
T = -math.cos((2*i+1) * math.pi / (2*self.Tcount))
T = 2.0 / ((1.0/Tmax - 1.0/Tmin) * T + 1.0/Tmax + 1.0/Tmin)
Tlist[i] = T
self.Tlist = (Tlist,"K")
else:
# Distribute temperatures evenly on a T^-1 domain
Tlist = 1.0/numpy.linspace(1.0/Tmax, 1.0/Tmin, Tcount)
self.Tlist = (Tlist,"K")
return self.Tlist.value_si
def initialize(self):
for reaction in self.network.pathReactions:
tunneling = reaction.transitionState.tunneling
if isinstance(tunneling, Wigner) and tunneling.frequency is None:
tunneling.frequency = (reaction.transitionState.frequency.value_si,"cm^-1")
elif isinstance(tunneling, Eckart) and tunneling.frequency is None:
tunneling.frequency = (reaction.transitionState.frequency.value_si,"cm^-1")
tunneling.E0_reac = (sum([reactant.conformer.E0.value_si for reactant in reaction.reactants])*0.001,"kJ/mol")
tunneling.E0_TS = (reaction.transitionState.conformer.E0.value_si*0.001,"kJ/mol")
tunneling.E0_prod = (sum([product.conformer.E0.value_si for product in reaction.products])*0.001,"kJ/mol")
elif tunneling is not None:
if tunneling.frequency is not None:
# Frequency was given by the user
pass
else:
raise ValueError('Unknown tunneling model {0!r} for path reaction {1}.'.format(tunneling, reaction))
maximumGrainSize = self.maximumGrainSize.value_si if self.maximumGrainSize is not None else 0.0
self.network.initialize(
Tmin = self.Tmin.value_si,
Tmax = self.Tmax.value_si,
Pmin = self.Pmin.value_si,
Pmax = self.Pmax.value_si,
maximumGrainSize = maximumGrainSize,
minimumGrainCount = self.minimumGrainCount,
activeJRotor = self.activeJRotor,
activeKRotor = self.activeKRotor,
rmgmode = self.rmgmode,
)
self.generateTemperatureList()
self.generatePressureList()
def generatePressureList(self):
"""
Returns an array of pressures based on the interpolation `model`,
minimum and maximum pressures `Pmin` and `Pmax` in Pa, and the number of
pressures `Pcount`. For Chebyshev polynomials a Gauss-Chebyshev
distribution is used; for all others a linear distribution on an logarithmic
pressure domain is used. Note that the Gauss-Chebyshev grid does *not*
place `Pmin` and `Pmax` at the endpoints, yet the interpolation is still
valid up to these values.
"""
Pmin = self.Pmin.value_si
Pmax = self.Pmax.value_si
Pcount = self.Pcount
if self.Plist is not None:
pass
if self.interpolationModel[0].lower() == 'chebyshev':
# Distribute pressures on a Gauss-Chebyshev grid
Plist = numpy.zeros(Pcount, numpy.float64)
for i in range(Pcount):
P = -math.cos((2*i+1) * math.pi / (2*self.Pcount))
P = 10**(0.5 * ((math.log10(Pmax) - math.log10(Pmin)) * P + math.log10(Pmax) + math.log10(Pmin)))
Plist[i] = P
self.Plist = (Plist*1e-5,"bar")
else:
# Distribute pressures evenly on a log domain
Plist = 10.0 ** numpy.linspace(math.log10(Pmin), math.log10(Pmax), Pcount)
self.Plist = (Plist*1e-5,"bar")
return self.Plist.value_si
def fitInterpolationModels(self):
configurations = []
configurations.extend(self.network.isomers)
configurations.extend(self.network.reactants)
configurations.extend(self.network.products)
self.network.netReactions = []
Nreac = self.network.Nisom + self.network.Nreac
Nprod = Nreac + self.network.Nprod
Tmin = self.Tmin.value_si
Tmax = self.Tmax.value_si
Tdata = self.Tlist.value_si
Pmin = self.Pmin.value_si
Pmax = self.Pmax.value_si
Pdata = self.Plist.value_si
for prod in range(Nprod):
for reac in range(Nreac):
if reac == prod: continue
reaction = Reaction(
reactants = configurations[reac].species,
products = configurations[prod].species,
)
kdata = self.K[:,:,prod,reac].copy()
order = len(reaction.reactants)
kdata *= 1e6 ** (order-1)
kunits = {1: 's^-1', 2: 'cm^3/(mol*s)', 3: 'cm^6/(mol^2*s)'}[order]
reaction.kinetics = self.fitInterpolationModel(Tdata, Pdata, kdata, kunits)
self.network.netReactions.append(reaction)
def fitInterpolationModel(self, Tdata, Pdata, kdata, kunits):
Tmin = self.Tmin.value_si
Tmax = self.Tmax.value_si
Pmin = self.Pmin.value_si
Pmax = self.Pmax.value_si
model = self.interpolationModel[0].lower()
if model == 'chebyshev':
kinetics = Chebyshev().fitToData(Tdata, Pdata, kdata, kunits,
self.interpolationModel[1], self.interpolationModel[2],
Tmin, Tmax, Pmin, Pmax,
)
elif model == 'pdeparrhenius':
kinetics = PDepArrhenius().fitToData(Tdata, Pdata, kdata, kunits)
else:
raise Exception('Invalid interpolation model {0!r}.'.format(self.interpolationModel[0]))
return kinetics
def save(self, outputFile):
logging.info('Saving pressure dependence results for {0} network...'.format(self.network.label))
f = open(outputFile, 'a')
Nreac = self.network.Nisom + self.network.Nreac
Nprod = Nreac + self.network.Nprod
Tlist = self.Tlist.value_si
Plist = self.Plist.value_si
Tcount = Tlist.shape[0]
Pcount = Plist.shape[0]
count = 0
for prod in range(Nprod):
for reac in range(Nreac):
if reac == prod: continue
reaction = self.network.netReactions[count]
count += 1
kdata = self.K[:,:,prod,reac].copy()
order = len(reaction.reactants)
kdata *= 1e6 ** (order-1)
kunits = {1: 's^-1', 2: 'cm^3/(mol*s)', 3: 'cm^6/(mol^2*s)'}[order]
f.write('# =========== ')
f.write('=========== ' * Pcount)
f.write('\n')
f.write('# T \ P ')
f.write(' '.join(['{0:11.3e}'.format(P*1e-5) for P in Plist]))
f.write('\n')
f.write('# =========== ')
f.write('=========== ' * Pcount)
f.write('\n')
for t in range(Tcount):
f.write('# {0:11g}'.format(Tlist[t]))
for p in range(Pcount):
f.write(' {0:11.3e}'.format(kdata[t,p]))
f.write('\n')
f.write('# =========== ')
f.write('=========== ' * Pcount)
f.write('\n')
string = 'pdepreaction(reactants={0!r}, products={1!r}, kinetics={2!r})'.format(
[reactant.label for reactant in reaction.reactants],
[product.label for product in reaction.products],
reaction.kinetics,
)
f.write('{0}\n\n'.format(prettify(string)))
f.close()
f = open(os.path.join(os.path.dirname(outputFile), 'chem.inp'), 'a')
count = 0
for prod in range(Nprod):
for reac in range(Nreac):
if reac == prod: continue
reaction = self.network.netReactions[count]
kinetics = reaction.kinetics
count += 1
string = '{0!s:51} 1.0 0.0 0.0\n'.format(reaction)
if isinstance(kinetics, PDepArrhenius):
for P, arrhenius in zip(kinetics.pressures.value_si, kinetics.arrhenius):
string += 'PLOG/ {0:<9.3f} {1:<11.3e} {2:<8.2f} {3:<8.2f}/\n'.format(P / 101325.,
arrhenius.A.value_si / (arrhenius.T0.value_si ** arrhenius.n.value_si) * 1e6 ** (len(reaction.reactants) - 1),
arrhenius.n.value_si,
arrhenius.Ea.value_si / 4184.
)
elif isinstance(kinetics, Chebyshev):
coeffs = kinetics.coeffs.value_si.copy()
coeffs[0,0] += 6 * (len(reaction.reactants) - 1)
string += 'TCHEB/ {0:<9.3f} {1:<9.3f}/\n'.format(kinetics.Tmin.value_si, kinetics.Tmax.value_si)
string += 'PCHEB/ {0:<9.3f} {1:<9.3f}/\n'.format(kinetics.Pmin.value_si / 101325., kinetics.Pmax.value_si / 101325.)
string += 'CHEB/ {0:d} {1:d}/\n'.format(kinetics.degreeT, kinetics.degreeP)
if kinetics.degreeP < 6:
for i in range(kinetics.degreeT):
string += 'CHEB/'
for j in range(kinetics.degreeP):
string += ' {0:<12.3e}'.format(coeffs[i,j])
string += '/\n'
else:
coeffs_list = []
for i in range(kinetics.degreeT):
for j in range(kinetics.degreeP):
coeffs_list.append(coeffs[i,j])
coeffs_list[0] += 6 * (numReactants - 1)
for i in range(len(coeffs_list)):
if i % 5 == 0: string += ' CHEB/'
string += ' {0:<12.3e}'.format(coeffs_list[i])
if i % 5 == 4: string += '/\n'
f.write('{0}\n'.format(string))
f.close()
def plot(self, outputDirectory):
# Skip this step if matplotlib is not installed
try:
import pylab
except ImportError:
return
import matplotlib.cm
cm = matplotlib.cm.jet
Nreac = self.network.Nisom + self.network.Nreac
Nprod = Nreac + self.network.Nprod
Tlist = self.Tlist.value_si
Plist = self.Plist.value_si
Tcount = Tlist.shape[0]
Pcount = Plist.shape[0]
K = self.K
count = 0
for prod in range(Nprod):
for reac in range(Nreac):
if reac == prod: continue
reaction = self.network.netReactions[count]
count += 1
reaction_str = '{0} {1} {2}'.format(
' + '.join([reactant.label for reactant in reaction.reactants]),
'<=>' if prod < Nreac else '-->',
' + '.join([product.label for product in reaction.products]),
)
fig = pylab.figure(figsize=(10,6))
K2 = numpy.zeros((Tcount, Pcount))
if reaction.kinetics is not None:
for t in range(Tcount):
for p in range(Pcount):
K2[t,p] = reaction.kinetics.getRateCoefficient(Tlist[t], Plist[p])
K = self.K[:,:,prod,reac].copy()
order = len(reaction.reactants)
K *= 1e6 ** (order-1)
K2 *= 1e6 ** (order-1)
kunits = {1: 's^-1', 2: 'cm^3/(mol*s)', 3: 'cm^6/(mol^2*s)'}[order]
pylab.subplot(1,2,1)
for p in range(Pcount):
pylab.semilogy(1000.0 / Tlist, K[:,p], color=cm(1.*p/(Pcount-1)), marker='o', linestyle='')
if reaction.kinetics is not None:
pylab.semilogy(1000.0 / Tlist, K2[:,p], color=cm(1.*p/(Pcount-1)), marker='', linestyle='-')
pylab.xlabel('1000 / Temperature (1000/K)')
pylab.ylabel('Rate coefficient ({0})'.format(kunits))
pylab.title(reaction_str)
pylab.subplot(1,2,2)
for t in range(Tcount):
pylab.loglog(Plist*1e-5, K[t,:], color=cm(1.*t/(Tcount-1)), marker='o', linestyle='')
pylab.loglog(Plist*1e-5, K2[t,:], color=cm(1.*t/(Tcount-1)), marker='', linestyle='-')
pylab.xlabel('Pressure (bar)')
pylab.ylabel('Rate coefficient ({0})'.format(kunits))
pylab.title(reaction_str)
fig.subplots_adjust(left=0.10, bottom=0.13, right=0.95, top=0.92, wspace=0.3, hspace=0.3)
pylab.savefig(os.path.join(outputDirectory, 'kinetics_{0:d}.pdf'.format(count)))
pylab.close()
def draw(self, outputDirectory, format='pdf'):
"""
Generate a PDF drawing of the pressure-dependent reaction network.
This requires that Cairo and its Python wrapper be available; if not,
the drawing is not generated.
You may also generate different formats of drawings, by changing format to
one of the following: `pdf`, `svg`, `png`.
"""
# Skip this step if cairo is not installed
try:
import cairocffi as cairo
except ImportError:
try:
import cairo
except ImportError:
return
from rmgpy.pdep.draw import NetworkDrawer
path = os.path.join(outputDirectory, 'network.' + format)
NetworkDrawer().draw(self.network, format=format, path=path)
def saveInputFile(self, path):
"""
Save a CanTherm input file for the pressure dependence job to `path`
on disk.
"""
speciesList = self.network.getAllSpecies()
# Add labels for species, reactions, transition states that don't have them
for i, spec in enumerate(speciesList):
if not spec.label:
spec.label = 'species{0:d}'.format(i+1)
for i, rxn in enumerate(self.network.pathReactions):
if not rxn.label:
rxn.label = 'reaction{0:d}'.format(i+1)
if not rxn.transitionState.label:
rxn.transitionState.label = 'TS{0:d}'.format(i+1)
if not self.network.label:
self.network.label = 'network'
with open(path, 'w') as f:
# Write species
for spec in speciesList:
f.write('species(\n')
f.write(' label = {0!r},\n'.format(str(spec)))
if len(spec.molecule) > 0:
f.write(' structure = SMILES({0!r}),\n'.format(spec.molecule[0].toSMILES()))
if spec.conformer is not None:
if spec.conformer.E0 is not None:
f.write(' E0 = {0!r},\n'.format(spec.conformer.E0))
if len(spec.conformer.modes) > 0:
f.write(' modes = [\n')
for mode in spec.conformer.modes:
f.write(' {0!r},\n'.format(mode))
f.write(' ],\n')
f.write(' spinMultiplicity = {0:d},\n'.format(spec.conformer.spinMultiplicity))
f.write(' opticalIsomers = {0:d},\n'.format(spec.conformer.opticalIsomers))
if spec.molecularWeight is not None:
f.write(' molecularWeight = {0!r},\n'.format(spec.molecularWeight))
if spec.transportData is not None:
f.write(' collisionModel = {0!r},\n'.format(spec.transportData))
if spec.energyTransferModel is not None:
f.write(' energyTransferModel = {0!r},\n'.format(spec.energyTransferModel))
if spec.thermo is not None:
f.write(' thermo = {0!r},\n'.format(spec.thermo))
f.write(')\n\n')
# Write transition states
for rxn in self.network.pathReactions:
ts = rxn.transitionState
f.write('transitionState(\n')
f.write(' label = {0!r},\n'.format(ts.label))
if ts.conformer is not None:
if ts.conformer.E0 is not None:
f.write(' E0 = {0!r},\n'.format(ts.conformer.E0))
if len(ts.conformer.modes) > 0:
f.write(' modes = [\n')
for mode in ts.conformer.modes:
f.write(' {0!r},\n'.format(mode))
f.write(' ],\n')
f.write(' spinMultiplicity = {0:d},\n'.format(ts.conformer.spinMultiplicity))
f.write(' opticalIsomers = {0:d},\n'.format(ts.conformer.opticalIsomers))
if ts.frequency is not None:
f.write(' frequency = {0!r},\n'.format(ts.frequency))
f.write(')\n\n')
# Write reactions
for rxn in self.network.pathReactions:
ts = rxn.transitionState
f.write('reaction(\n')
f.write(' label = {0!r},\n'.format(rxn.label))
f.write(' reactants = [{0}],\n'.format(', '.join([repr(str(spec)) for spec in rxn.reactants])))
f.write(' products = [{0}],\n'.format(', '.join([repr(str(spec)) for spec in rxn.products])))
f.write(' transitionState = {0!r},\n'.format(rxn.transitionState.label))
if rxn.kinetics is not None:
f.write(' kinetics = {0!r},\n'.format(rxn.kinetics))
if ts.tunneling is not None:
f.write(' tunneling = {0!r},\n'.format(ts.tunneling.__class__.__name__))
f.write(')\n\n')
# Write network
f.write('network(\n')
f.write(' label = {0!r},\n'.format(self.network.label))
f.write(' isomers = [\n')
for isomer in self.network.isomers:
f.write(' {0!r},\n'.format(str(isomer.species[0])))
f.write(' ],\n')
f.write(' reactants = [\n')
for reactants in self.network.reactants:
f.write(' ({0}),\n'.format(', '.join([repr(str(spec)) for spec in reactants.species])))
f.write(' ],\n')
f.write(' bathGas = {\n')
for spec, frac in self.network.bathGas.items():
f.write(' {0!r}: {1:g},\n'.format(str(spec), frac))
f.write(' },\n')
f.write(')\n\n')
# Write pressure dependence
f.write('pressureDependence(\n')
f.write(' label = {0!r},\n'.format(self.network.label))
f.write(' Tmin = {0!r},\n'.format(self.Tmin))
f.write(' Tmax = {0!r},\n'.format(self.Tmax))
f.write(' Tcount = {0:d},\n'.format(self.Tcount))
f.write(' Tlist = {0!r},\n'.format(self.Tlist))
f.write(' Pmin = {0!r},\n'.format(self.Pmin))
f.write(' Pmax = {0!r},\n'.format(self.Pmax))
f.write(' Pcount = {0:d},\n'.format(self.Pcount))
f.write(' Plist = {0!r},\n'.format(self.Plist))
if self.maximumGrainSize is not None:
f.write(' maximumGrainSize = {0!r},\n'.format(self.maximumGrainSize))
if self.minimumGrainCount != 0:
f.write(' minimumGrainCount = {0:d},\n'.format(self.minimumGrainCount))
f.write(' method = {0!r},\n'.format(self.method))
if self.interpolationModel is not None:
f.write(' interpolationModel = {0!r},\n'.format(self.interpolationModel))
f.write(' activeKRotor = {0!r},\n'.format(self.activeKRotor))
f.write(' activeJRotor = {0!r},\n'.format(self.activeJRotor))
if self.rmgmode:
f.write(' rmgmode = {0!r},\n'.format(self.rmgmode))
f.write(')\n\n')
|
texastribune/tx_salaries
|
refs/heads/master
|
tx_salaries/utils/transformers/university_of_north_texas_system.py
|
1
|
from . import base
from . import mixins
from datetime import date
from .. import cleaver
class TransformedRecord(
mixins.GenericCompensationMixin, mixins.GenericDepartmentMixin,
mixins.GenericIdentifierMixin, mixins.GenericJobTitleMixin,
mixins.GenericPersonMixin, mixins.MembershipMixin,
mixins.OrganizationMixin, mixins.PostMixin, mixins.RaceMixin,
mixins.LinkMixin, base.BaseTransformedRecord):
MAP = {
'full_name': 'NAME',
# 'suffix': '', if needed
'department': 'DEPTNAME',
'job_title': 'JOBTITLE',
'hire_date': 'HIRE_DATE',
'compensation': 'TOTAL SALARY',
'gender': 'GENDER',
'race': 'ETHNICITY',
'status': 'FULL/PART TIME',
'organization_name': 'COMPANY',
}
# The order of the name fields to build a full name.
# If `full_name` is in MAP, you don't need this at all.
# NAME_FIELDS = ('first_name', 'last_name', )
# The name of the organization this WILL SHOW UP ON THE SITE, so double check it!
# ORGANIZATION_NAME = 'The University of North Texas '
organization_name_map = {
'DAL': 'University of North Texas at Dallas',
'HSC': 'University of North Texas Health Science Center',
'SYS': 'University of North Texas System',
'UNT': 'University of North Texas',
}
# What type of organization is this? This MUST match what we use on the site, double check against salaries.texastribune.org
# ORGANIZATION_CLASSIFICATION = 'University'
# ???
compensation_type = 'FT'
# How would you describe the compensation field? We try to respect how they use their system.
description = 'Total yearly salary'
# When did you receive the data? NOT when we added it to the site.
DATE_PROVIDED = date(2016, 6, 21)
# The URL to find the raw data in our S3 bucket.
URL = ('http://raw.texastribune.org.s3.amazonaws.com/'
'university_north_texas_system/salaries/'
'2016-06/unt_system.xlsx')
# How do they track gender? We need to map what they use to `F` and `M`.
gender_map = {'F': 'F', 'M': 'M'}
# This is how the loader checks for valid people. Defaults to checking to see if `last_name` is empty.
@property
def is_valid(self):
# Adjust to return False on invalid fields. For example:
return self.full_name.strip() != ''
@property
def organization_name(self):
return self.organization_name_map[self.get_mapped_value(
'organization_name').strip()]
@property
def organization(self):
classification = ('University Hospital'
if self.organization_name ==
'University of North Texas Health Science Center'
else 'University')
return {
'name': self.organization_name,
'children': self.department_as_child,
'classification': classification
}
@property
def person(self):
name = self.get_name()
r = {
'family_name': name.last,
'given_name': name.first,
# 'additional_name': name.middle,
'name': unicode(name),
'gender': self.gender_map[self.gender.strip()],
}
return r
@property
def compensation_type(self):
status = self.get_mapped_value('status')
if float(status) >= 1:
return 'FT'
return 'PT'
@property
def description(self):
status = self.get_mapped_value('status')
if float(status) <= 1:
return 'Total yearly salary, part-time'
else:
return 'Total yearly salary'
def get_raw_name(self):
split_name = self.full_name.split(',')
last_name = split_name[0]
split_firstname = split_name[1].split(' ')
first_name = split_firstname[0]
if len(split_firstname) == 2 and len(split_firstname[1]) == 1:
middle_name = split_firstname[1]
else:
first_name = split_name[1]
middle_name = ''
return u' '.join([first_name, middle_name, last_name])
transform = base.transform_factory(TransformedRecord)
|
arbn/pysaml2
|
refs/heads/master
|
tests/test_63_ecp.py
|
3
|
from saml2.authn_context import INTERNETPROTOCOLPASSWORD
from saml2.httpbase import set_list2dict
from saml2.profile.ecp import RelayState
from saml2.profile.paos import Request
from saml2.server import Server
from saml2.samlp import Response
from saml2.samlp import STATUS_SUCCESS
from saml2.samlp import AuthnRequest
from saml2 import ecp_client
from saml2 import BINDING_SOAP
from saml2 import BINDING_PAOS
from saml2 import create_class_from_xml_string
from saml2.profile import ecp as ecp_prof
from saml2.client import Saml2Client
from pathutils import dotname, full_path
__author__ = 'rolandh'
AUTHN = {
"class_ref": INTERNETPROTOCOLPASSWORD,
"authn_auth": "http://www.example.com/login"
}
def _eq(l1, l2):
if len(l1) == len(l2):
return set(l1) == set(l2)
else:
return len(l1) == len(l2)
class DummyResponse(object):
def __init__(self, headers):
self.headers = headers
def test_complete_flow():
client = ecp_client.Client("user", "password",
metadata_file=full_path("idp_all.xml"))
sp = Saml2Client(config_file=dotname("servera_conf"))
idp = Server(config_file=dotname("idp_all_conf"))
IDP_ENTITY_ID = idp.config.entityid
#SP_ENTITY_ID = sp.config.entityid
# ------------ @Client -----------------------------
headers = client.add_paos_headers([])
assert len(headers) == 2
# ------------ @SP -----------------------------
response = DummyResponse(set_list2dict(headers))
assert sp.can_handle_ecp_response(response)
sid, message = sp.create_ecp_authn_request(IDP_ENTITY_ID, relay_state="XYZ")
# ------------ @Client -----------------------------
respdict = client.parse_soap_message(message)
cargs = client.parse_sp_ecp_response(respdict)
assert isinstance(respdict["body"], AuthnRequest)
assert len(respdict["header"]) == 2
item0 = respdict["header"][0]
assert isinstance(item0, Request) or isinstance(item0, RelayState)
destination = respdict["body"].destination
ht_args = client.apply_binding(BINDING_SOAP, respdict["body"], destination)
# Time to send to the IDP
# ----------- @IDP -------------------------------
req = idp.parse_authn_request(ht_args["data"], BINDING_SOAP)
assert isinstance(req.message, AuthnRequest)
# create Response and return in the SOAP response
sp_entity_id = req.sender()
name_id = idp.ident.transient_nameid( "id12", sp.config.entityid)
binding, destination = idp.pick_binding("assertion_consumer_service",
[BINDING_PAOS],
entity_id=sp_entity_id)
resp = idp.create_ecp_authn_request_response(
destination, {"eduPersonEntitlement": "Short stop",
"surName": "Jeter",
"givenName": "Derek",
"mail": "derek.jeter@nyy.mlb.com",
"title": "The man"
},
req.message.id, destination, sp_entity_id,
name_id=name_id, authn=AUTHN)
# ------------ @Client -----------------------------
# The client got the response from the IDP repackage and send it to the SP
respdict = client.parse_soap_message(resp)
idp_response = respdict["body"]
assert isinstance(idp_response, Response)
assert len(respdict["header"]) == 1
_ecp_response = None
for item in respdict["header"]:
if item.c_tag == "Response" and item.c_namespace == ecp_prof.NAMESPACE:
_ecp_response = item
#_acs_url = _ecp_response.assertion_consumer_service_url
# done phase2 at the client
ht_args = client.use_soap(idp_response, cargs["rc_url"],
[cargs["relay_state"]])
print ht_args
# ------------ @SP -----------------------------
respdict = sp.unpack_soap_message(ht_args["data"])
# verify the relay_state
for header in respdict["header"]:
inst = create_class_from_xml_string(RelayState, header)
if isinstance(inst, RelayState):
assert inst.text == "XYZ"
# parse the response
resp = sp.parse_authn_request_response(respdict["body"], None, {sid: "/"})
print resp.response
assert resp.response.destination == "http://lingon.catalogix.se:8087/paos"
assert resp.response.status.status_code.value == STATUS_SUCCESS
|
vanhonit/xmario_center
|
refs/heads/master
|
softwarecenter/ui/gtk3/panes/installedpane.py
|
3
|
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2011 Canonical
#
# Authors:
# Michael Vogt
# Didier Roche
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from gi.repository import Gtk
import logging
import xapian
from gi.repository import GObject
from gettext import gettext as _
from gettext import ngettext
import platform
from softwarecenter.enums import (NonAppVisibility,
SortMethods)
from softwarecenter.utils import (
wait_for_apt_cache_ready, utf8, ExecutionTime)
from softwarecenter.db.categories import (CategoriesParser,
categories_sorted_by_name)
from softwarecenter.ui.gtk3.models.appstore2 import (
AppTreeStore, CategoryRowReference)
from softwarecenter.ui.gtk3.widgets.menubutton import MenuButton
from softwarecenter.ui.gtk3.widgets.oneconfviews import OneConfViews
from softwarecenter.ui.gtk3.widgets.spinner import SpinnerNotebook
from softwarecenter.ui.gtk3.views.appview import AppView
from softwarepane import SoftwarePane
from softwarecenter.backend.oneconfhandler import get_oneconf_handler
from softwarecenter.db.appfilter import AppFilter
from softwarecenter.paths import APP_INSTALL_PATH
LOG = logging.getLogger(__name__)
def interrupt_build_and_wait(f):
""" decorator that ensures that a build of the categorised installed apps
is interrupted before a new build commences.
expects self._build_in_progress and self._halt_build as properties
"""
def wrapper(*args, **kwargs):
self = args[0]
if self._build_in_progress:
LOG.debug('Waiting for build to exit...')
self._halt_build = True
GObject.timeout_add(200, lambda: wrapper(*args, **kwargs))
return False
# ready now
self._halt_build = False
f(*args, **kwargs)
return False
return wrapper
class InstalledPane(SoftwarePane, CategoriesParser):
"""Widget that represents the installed panel in software-center
It contains a search entry and navigation buttons
"""
class Pages():
# page names, useful for debugging
NAMES = ('list', 'details')
# the actual page id's
(LIST,
DETAILS) = range(2)
# the default page
HOME = LIST
# pages for the installed view spinner notebook
(PAGE_SPINNER,
PAGE_INSTALLED) = range(2)
__gsignals__ = {'installed-pane-created': (GObject.SignalFlags.RUN_FIRST,
None,
())}
def __init__(self, cache, db, distro, icons, datadir):
# parent
SoftwarePane.__init__(self, cache, db, distro, icons, datadir,
show_ratings=False)
CategoriesParser.__init__(self, db)
self.current_appview_selection = None
self.icons = icons
self.loaded = False
self.pane_name = _("Installed Software")
self.installed_apps = 0
# None is local
self.current_hostid = None
self.current_hostname = None
self.oneconf_additional_pkg = set()
self.oneconf_missing_pkg = set()
# switches to terminate build in progress
self._build_in_progress = False
self._halt_build = False
self.nonapps_visible = NonAppVisibility.NEVER_VISIBLE
self.visible_docids = None
self.visible_cats = {}
self.installed_spinner_notebook = None
def init_view(self):
if self.view_initialized:
return
SoftwarePane.init_view(self)
# show a busy cursor and display the main spinner while we build the
# view
window = self.get_window()
if window:
window.set_cursor(self.busy_cursor)
self.show_appview_spinner()
self.oneconf_viewpickler = OneConfViews(self.icons)
self.oneconf_viewpickler.register_computer(None,
_("This computer (%s)") % platform.node())
self.oneconf_viewpickler.select_first()
self.oneconf_viewpickler.connect('computer-changed',
self._selected_computer_changed)
self.oneconf_viewpickler.connect('current-inventory-refreshed',
self._current_inventory_need_refresh)
# Start OneConf
self.oneconf_handler = get_oneconf_handler(self.oneconf_viewpickler)
if self.oneconf_handler:
self.oneconf_handler.connect('show-oneconf-changed',
self._show_oneconf_changed)
self.oneconf_handler.connect('last-time-sync-changed',
self._last_time_sync_oneconf_changed)
# OneConf pane
self.computerpane = Gtk.Paned.new(Gtk.Orientation.HORIZONTAL)
self.oneconfcontrol = Gtk.Box()
self.oneconfcontrol.set_orientation(Gtk.Orientation.VERTICAL)
self.computerpane.pack1(self.oneconfcontrol, False, False)
# size negotiation takes everything for the first one
self.oneconfcontrol.set_property('width-request', 200)
self.box_app_list.pack_start(self.computerpane, True, True, 0)
scroll = Gtk.ScrolledWindow()
scroll.set_shadow_type(Gtk.ShadowType.IN)
scroll.add(self.oneconf_viewpickler)
self.oneconfcontrol.pack_start(scroll, True, True, 0)
oneconftoolbar = Gtk.Box()
oneconftoolbar.set_orientation(Gtk.Orientation.HORIZONTAL)
oneconfpropertymenu = Gtk.Menu()
self.oneconfproperty = MenuButton(oneconfpropertymenu,
Gtk.Image.new_from_stock(Gtk.STOCK_PROPERTIES,
Gtk.IconSize.BUTTON))
self.stopsync_label = _(u"Stop Syncing “%s”")
stop_oneconf_share_menuitem = Gtk.MenuItem(
label=self.stopsync_label % platform.node())
stop_oneconf_share_menuitem.connect("activate",
self._on_stop_oneconf_hostshare_clicked)
stop_oneconf_share_menuitem.show()
oneconfpropertymenu.append(stop_oneconf_share_menuitem)
self.oneconfcontrol.pack_start(oneconftoolbar, False, False, 1)
self.oneconf_last_sync = Gtk.Label()
self.oneconf_last_sync.set_line_wrap(True)
oneconftoolbar.pack_start(self.oneconfproperty, False, False, 0)
oneconftoolbar.pack_start(self.oneconf_last_sync, True, True, 1)
self.notebook.append_page(self.box_app_list, Gtk.Label(label="list"))
# details
self.notebook.append_page(self.scroll_details,
Gtk.Label(label="details"))
# initial refresh
self.state.search_term = ""
# build models and filters
self.base_model = AppTreeStore(self.db, self.cache, self.icons)
self.treefilter = self.base_model.filter_new(None)
self.treefilter.set_visible_func(self._row_visibility_func,
AppTreeStore.COL_ROW_DATA)
self.app_view.set_model(self.treefilter)
self.app_view.tree_view.connect("row-collapsed",
self._on_row_collapsed)
self._all_cats = self.parse_applications_menu(APP_INSTALL_PATH)
self._all_cats = categories_sorted_by_name(self._all_cats)
# we do not support the search aid feature in the installedview
self.box_app_list.remove(self.search_aid)
# remove here
self.box_app_list.remove(self.app_view)
# create a local spinner notebook for the installed view
self.installed_spinner_notebook = SpinnerNotebook(self.app_view)
self.computerpane.pack2(self.installed_spinner_notebook, True, True)
self.show_installed_view_spinner()
self.show_all()
# initialize view to hide the oneconf computer selector
self.oneconf_viewpickler.select_first()
self.oneconfcontrol.hide()
# hacky, hide the header
self.app_view.header_hbox.hide()
self.hide_appview_spinner()
# keep track of the current view by tracking its origin
self.current_displayed_origin = None
# now we are initialized
self.emit("installed-pane-created")
self.view_initialized = True
return False
def show_installed_view_spinner(self):
""" display the local spinner for the installed view panel """
if self.installed_spinner_notebook:
self.installed_spinner_notebook.show_spinner()
def hide_installed_view_spinner(self):
""" hide the local spinner for the installed view panel """
if self.installed_spinner_notebook:
self.installed_spinner_notebook.hide_spinner()
def _selected_computer_changed(self, oneconf_pickler, hostid, hostname):
if self.current_hostid == hostid:
return
LOG.debug("Selected computer changed to %s (%s)" % (hostid, hostname))
self.current_hostid = hostid
self.current_hostname = hostname
menuitem = self.oneconfproperty.get_menu().get_children()[0]
if self.current_hostid:
diff = self.oneconf_handler.oneconf.diff(self.current_hostid, '')
self.oneconf_additional_pkg, self.oneconf_missing_pkg = diff
stopsync_hostname = self.current_hostname
# FIXME for P: oneconf views don't support search
if self.state.search_term:
self._search()
else:
stopsync_hostname = platform.node()
self.searchentry.show()
menuitem.set_label(self.stopsync_label %
stopsync_hostname.encode('utf-8'))
self.refresh_apps()
def _last_time_sync_oneconf_changed(self, oneconf_handler, msg):
LOG.debug("refresh latest sync date")
self.oneconf_last_sync.set_label(msg)
def _show_oneconf_changed(self, oneconf_handler, oneconf_inventory_shown):
LOG.debug('Share inventory status changed')
if oneconf_inventory_shown:
self.oneconfcontrol.show()
else:
self.oneconf_viewpickler.select_first()
self.oneconfcontrol.hide()
def _on_stop_oneconf_hostshare_clicked(self, widget):
LOG.debug("Stop sharing inventory for %s" % self.current_hostname)
self.oneconf_handler.sync_between_computers(False, self.current_hostid)
# stop sharing another host than the local one.
if self.current_hostid:
self.oneconf_viewpickler.remove_computer(self.current_hostid)
self.oneconf_viewpickler.select_first()
def _current_inventory_need_refresh(self, oneconfviews):
if self.current_hostid:
diff = self.oneconf_handler.oneconf.diff(self.current_hostid, '')
self.oneconf_additional_pkg, self.oneconf_missing_pkg = diff
self.refresh_apps()
def _on_row_collapsed(self, view, it, path):
pass
def _row_visibility_func(self, model, it, col):
row = model.get_value(it, col)
if self.visible_docids is None:
if isinstance(row, CategoryRowReference):
row.vis_count = row.pkg_count
return True
elif isinstance(row, CategoryRowReference):
return row.untranslated_name in self.visible_cats.keys()
elif row is None:
return False
return row.get_docid() in self.visible_docids
def _use_category(self, cat):
# System cat is large and slow to search, filter it in default mode
if ('carousel-only' in cat.flags or
((self.nonapps_visible == NonAppVisibility.NEVER_VISIBLE)
and cat.untranslated_name == 'System')):
return False
return True
# override its SoftwarePane._hide_nonapp_pkgs...
def _hide_nonapp_pkgs(self):
self.nonapps_visible = NonAppVisibility.NEVER_VISIBLE
self.refresh_apps()
return True
def _save_treeview_state(self):
# store the state
expanded_rows = []
self.app_view.tree_view.map_expanded_rows(
lambda view, path, data: expanded_rows.append(path.to_string()),
None)
va = self.app_view.tree_view_scroll.get_vadjustment()
if va:
vadj = va.get_value()
else:
vadj = 0
return expanded_rows, vadj
def _restore_treeview_state(self, state):
expanded_rows, vadj = state
for ind in expanded_rows:
path = Gtk.TreePath.new_from_string(ind)
self.app_view.tree_view.expand_row(path, False)
va = self.app_view.tree_view_scroll.get_vadjustment()
if va:
va.set_lower(vadj)
va.set_value(vadj)
#~ @interrupt_build_and_wait
def _build_categorised_installedview(self, keep_state=False):
LOG.debug('Rebuilding categorised installedview...')
# display the busy cursor and a local spinner while we build the view
window = self.get_window()
if window:
window.set_cursor(self.busy_cursor)
self.show_installed_view_spinner()
if keep_state:
treeview_state = self._save_treeview_state()
# disconnect the model to avoid e.g. updates of "cursor-changed"
# AppTreeView.expand_path while the model is in rebuild-flux
self.app_view.set_model(None)
model = self.base_model # base model not treefilter
model.clear()
def profiled_rebuild_categorised_view():
with ExecutionTime("rebuild_categorized_view"):
rebuild_categorised_view()
def rebuild_categorised_view():
self.cat_docid_map = {}
enq = self.enquirer
i = 0
while Gtk.events_pending():
Gtk.main_iteration()
xfilter = AppFilter(self.db, self.cache)
xfilter.set_installed_only(True)
for cat in self._all_cats:
# for each category do category query and append as a new
# node to tree_view
if not self._use_category(cat):
continue
query = self.get_query_for_cat(cat)
LOG.debug("xfilter.installed_only: %s" %
xfilter.installed_only)
enq.set_query(query,
sortmode=SortMethods.BY_ALPHABET,
nonapps_visible=self.nonapps_visible,
filter=xfilter,
nonblocking_load=False,
persistent_duplicate_filter=(i > 0))
L = len(enq.matches)
if L:
i += L
docs = enq.get_documents()
self.cat_docid_map[cat.untranslated_name] = \
set([doc.get_docid() for doc in docs])
model.set_category_documents(cat, docs)
while Gtk.events_pending():
Gtk.main_iteration()
# check for uncategorised pkgs
if self.state.channel:
self._run_channel_enquirer(persistent_duplicate_filter=(i > 0))
L = len(enq.matches)
if L:
# some foo for channels
# if no categorised results but in channel, then use
# the channel name for the category
channel_name = None
if not i and self.state.channel:
channel_name = self.state.channel.display_name
docs = enq.get_documents()
tag = channel_name or 'Uncategorized'
self.cat_docid_map[tag] = set(
[doc.get_docid() for doc in docs])
model.set_nocategory_documents(docs, untranslated_name=tag,
display_name=channel_name)
i += L
if i:
self.app_view.tree_view.set_cursor(Gtk.TreePath(),
None, False)
if i <= 10:
self.app_view.tree_view.expand_all()
# cache the installed app count
self.installed_count = i
self.app_view._append_appcount(self.installed_count,
mode=AppView.INSTALLED_MODE)
self.app_view.set_model(self.treefilter)
if keep_state:
self._restore_treeview_state(treeview_state)
# hide the local spinner
self.hide_installed_view_spinner()
if window:
window.set_cursor(None)
# reapply search if needed
if self.state.search_term:
self._do_search(self.state.search_term)
self.emit("app-list-changed", i)
return
GObject.idle_add(profiled_rebuild_categorised_view)
def _build_oneconfview(self, keep_state=False):
LOG.debug('Rebuilding oneconfview for %s...' % self.current_hostid)
# display the busy cursor and the local spinner while we build the view
window = self.get_window()
if window:
window.set_cursor(self.busy_cursor)
self.show_installed_view_spinner()
if keep_state:
treeview_state = self._save_treeview_state()
# disconnect the model to avoid e.g. updates of "cursor-changed"
# AppTreeView.expand_path while the model is in rebuild-flux
self.app_view.set_model(None)
model = self.base_model # base model not treefilter
model.clear()
def profiled_rebuild_oneconfview():
with ExecutionTime("rebuild_oneconfview"):
rebuild_oneconfview()
def rebuild_oneconfview():
# FIXME for P: hide the search entry
self.searchentry.hide()
self.cat_docid_map = {}
enq = self.enquirer
query = xapian.Query("")
if self.state.channel and self.state.channel.query:
query = xapian.Query(xapian.Query.OP_AND,
query,
self.state.channel.query)
i = 0
# First search: missing apps only
xfilter = AppFilter(self.db, self.cache)
xfilter.set_restricted_list(self.oneconf_additional_pkg)
xfilter.set_not_installed_only(True)
enq.set_query(query,
sortmode=SortMethods.BY_ALPHABET,
nonapps_visible=self.nonapps_visible,
filter=xfilter,
nonblocking_load=True, # we don't block this one for
# better oneconf responsiveness
persistent_duplicate_filter=(i > 0))
L = len(enq.matches)
if L:
cat_title = utf8(ngettext(
u'%(amount)s item on “%(machine)s” not on this computer',
u'%(amount)s items on “%(machine)s” not on this computer',
L)) % {'amount': L, 'machine': utf8(self.current_hostname)}
i += L
docs = enq.get_documents()
self.cat_docid_map["missingpkg"] = set(
[doc.get_docid() for doc in docs])
model.set_nocategory_documents(docs,
untranslated_name="additionalpkg", display_name=cat_title)
# Second search: additional apps
xfilter.set_restricted_list(self.oneconf_missing_pkg)
xfilter.set_not_installed_only(False)
xfilter.set_installed_only(True)
enq.set_query(query,
sortmode=SortMethods.BY_ALPHABET,
nonapps_visible=self.nonapps_visible,
filter=xfilter,
nonblocking_load=False,
persistent_duplicate_filter=(i > 0))
L = len(enq.matches)
if L:
cat_title = utf8(ngettext(
u'%(amount)s item on this computer not on “%(machine)s”',
'%(amount)s items on this computer not on “%(machine)s”',
L)) % {'amount': L, 'machine': utf8(self.current_hostname)}
i += L
docs = enq.get_documents()
self.cat_docid_map["additionalpkg"] = set(
[doc.get_docid() for doc in docs])
model.set_nocategory_documents(docs,
untranslated_name="additionalpkg", display_name=cat_title)
if i:
self.app_view.tree_view.set_cursor(Gtk.TreePath(),
None, False)
if i <= 10:
self.app_view.tree_view.expand_all()
# cache the installed app count
self.installed_count = i
self.app_view._append_appcount(self.installed_count,
mode=AppView.DIFF_MODE)
self.app_view.set_model(self.treefilter)
if keep_state:
self._restore_treeview_state(treeview_state)
# hide the local spinner
self.hide_installed_view_spinner()
if window:
window.set_cursor(None)
self.emit("app-list-changed", i)
return
GObject.idle_add(profiled_rebuild_oneconfview)
def _check_expand(self):
it = self.treefilter.get_iter_first()
while it:
path = self.treefilter.get_path(it)
if self.state.search_term: # or path in self._user_expanded_paths:
self.app_view.tree_view.expand_row(path, False)
else:
self.app_view.tree_view.collapse_row(path)
it = self.treefilter.iter_next(it)
def _do_search(self, terms):
self.state.search_term = terms
xfilter = AppFilter(self.db, self.cache)
xfilter.set_installed_only(True)
self.enquirer.set_query(self.get_query(),
nonapps_visible=self.nonapps_visible,
filter=xfilter,
nonblocking_load=True)
self.visible_docids = self.enquirer.get_docids()
self.visible_cats = self._get_vis_cats(self.visible_docids)
self.treefilter.refilter()
self.app_view.tree_view.expand_all()
def _run_channel_enquirer(self, persistent_duplicate_filter=True):
xfilter = AppFilter(self.db, self.cache)
xfilter.set_installed_only(True)
if self.state.channel:
self.enquirer.set_query(
self.state.channel.query,
sortmode=SortMethods.BY_ALPHABET,
nonapps_visible=NonAppVisibility.MAYBE_VISIBLE,
filter=xfilter,
nonblocking_load=False,
persistent_duplicate_filter=persistent_duplicate_filter)
def _search(self, terms=None):
if not terms:
self.visible_docids = None
self.state.search_term = ""
self._clear_search()
self.treefilter.refilter()
self._check_expand()
# run channel enquirer to ensure that the channel specific
# info for show/hide nonapps is actually correct
self._run_channel_enquirer()
# trigger update of the show/hide
self.emit("app-list-changed", 0)
elif self.state.search_term != terms:
self._do_search(terms)
def get_query(self):
# search terms
return self.db.get_query_list_from_search_entry(
self.state.search_term)
def get_query_for_cat(self, cat):
LOG.debug("self.state.channel: %s" % self.state.channel)
if self.state.channel and self.state.channel.query:
query = xapian.Query(xapian.Query.OP_AND,
cat.query,
self.state.channel.query)
return query
return cat.query
@wait_for_apt_cache_ready
def refresh_apps(self, *args, **kwargs):
"""refresh the applist and update the navigation bar """
logging.debug("installedpane refresh_apps")
keep_state = kwargs.get("keep_state", False)
if self.current_hostid:
self._build_oneconfview(keep_state)
else:
self._build_categorised_installedview(keep_state)
def _clear_search(self):
# remove the details and clear the search
self.searchentry.clear_with_no_signal()
def on_search_terms_changed(self, searchentry, terms):
"""callback when the search entry widget changes"""
logging.debug("on_search_terms_changed: '%s'" % terms)
self._search(terms.strip())
self.state.search_term = terms
self.notebook.set_current_page(InstalledPane.Pages.LIST)
self.hide_installed_view_spinner()
def _get_vis_cats(self, visids):
vis_cats = {}
appcount = 0
visids = set(visids)
for cat_uname, docids in self.cat_docid_map.iteritems():
children = len(docids & visids)
if children:
appcount += children
vis_cats[cat_uname] = children
self.app_view._append_appcount(appcount, mode=AppView.DIFF_MODE)
return vis_cats
def _refresh_on_cache_or_db_change(self):
self.refresh_apps(keep_state=True)
self.app_details_view.refresh_app()
def on_db_reopen(self, db):
super(InstalledPane, self).on_db_reopen(db)
self._refresh_on_cache_or_db_change()
def on_cache_ready(self, cache):
LOG.debug("on_cache_ready")
self._refresh_on_cache_or_db_change()
def on_application_selected(self, appview, app):
"""callback when an app is selected"""
logging.debug("on_application_selected: '%s'" % app)
self.current_appview_selection = app
def get_callback_for_page(self, page, state):
if page == InstalledPane.Pages.LIST:
return self.display_overview_page
return self.display_details_page
def display_search(self):
model = self.app_view.get_model()
if model:
self.emit("app-list-changed", len(model))
self.searchentry.show()
@wait_for_apt_cache_ready
def display_overview_page(self, page, view_state):
LOG.debug("view_state: %s" % view_state)
if self.current_hostid:
# FIXME for P: oneconf views don't support search
# this one ensure that even when switching between pane, we
# don't have the search item
if self.state.search_term:
self._search()
self._build_oneconfview()
elif (view_state and
view_state.channel and
view_state.channel.origin is not self.current_displayed_origin):
# we don't need to refresh the full installed view every time it
# is displayed, so we check to see if we are viewing the same
# channel and if so we don't refresh the view, note that the view
# *is* is refreshed whenever the contents change and this is
# sufficient (see LP: #828887)
self._build_categorised_installedview()
self.current_displayed_origin = view_state.channel.origin
if self.state.search_term:
self._search(self.state.search_term)
return True
def get_current_app(self):
"""return the current active application object applicable
to the context"""
return self.current_appview_selection
def is_category_view_showing(self):
# there is no category view in the installed pane
return False
def is_applist_view_showing(self):
"""Return True if we are in the applist view """
return (self.notebook.get_current_page() ==
InstalledPane.Pages.LIST)
def is_app_details_view_showing(self):
"""Return True if we are in the app_details view """
return self.notebook.get_current_page() == InstalledPane.Pages.DETAILS
def get_test_window():
from softwarecenter.testutils import (get_test_db,
get_test_datadir,
get_test_gtk3_viewmanager,
get_test_pkg_info,
get_test_gtk3_icon_cache,
)
# needed because available pane will try to get it
vm = get_test_gtk3_viewmanager()
vm # make pyflakes happy
db = get_test_db()
cache = get_test_pkg_info()
datadir = get_test_datadir()
icons = get_test_gtk3_icon_cache()
w = InstalledPane(cache, db, 'Ubuntu', icons, datadir)
w.show()
win = Gtk.Window()
win.set_data("pane", w)
win.add(w)
win.set_size_request(400, 600)
win.connect("destroy", lambda x: Gtk.main_quit())
# init the view
w.init_view()
from softwarecenter.backend.channel import AllInstalledChannel
from softwarecenter.ui.gtk3.panes.softwarepane import DisplayState
w.state.channel = AllInstalledChannel()
view_state = DisplayState()
view_state.channel = AllInstalledChannel()
w.display_overview_page(None, view_state)
win.show()
return win
if __name__ == "__main__":
win = get_test_window()
Gtk.main()
|
dsprenkels/servo
|
refs/heads/master
|
tests/wpt/harness/wptrunner/tests/__init__.py
|
621
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
dnozay/lettuce
|
refs/heads/master
|
tests/integration/lib/Django-1.2.5/django/utils/hashcompat.py
|
319
|
"""
The md5 and sha modules are deprecated since Python 2.5, replaced by the
hashlib module containing both hash algorithms. Here, we provide a common
interface to the md5 and sha constructors, depending on system version.
"""
import sys
if sys.version_info >= (2, 5):
import hashlib
md5_constructor = hashlib.md5
md5_hmac = md5_constructor
sha_constructor = hashlib.sha1
sha_hmac = sha_constructor
else:
import md5
md5_constructor = md5.new
md5_hmac = md5
import sha
sha_constructor = sha.new
sha_hmac = sha
|
AndreyPopovNew/asuswrt-merlin-rt-n
|
refs/heads/master
|
release/src/router/libxml2/python/tests/xpathns.py
|
59
|
#!/usr/bin/python -u
#
import libxml2
expect=' xmlns:a="urn:whatevar"'
# Memory debug specific
libxml2.debugMemory(1)
d = libxml2.parseDoc("<a:a xmlns:a='urn:whatevar'/>")
res=""
for n in d.xpathEval("//namespace::*"):
res = res + n.serialize()
d.freeDoc()
if res != expect:
print "test5 failed: unexpected output"
print res
del res
del d
del n
# Memory debug specific
libxml2.cleanupParser()
if libxml2.debugMemory(1) == 0:
print "OK"
else:
print "Memory leak %d bytes" % (libxml2.debugMemory(1))
libxml2.dumpMemory()
|
m0gliE/fastcoin-8.7.4rc1
|
refs/heads/fastcoin-8.7.4rc1
|
contrib/wallettools/walletunlock.py
|
782
|
from jsonrpc import ServiceProxy
access = ServiceProxy("http://127.0.0.1:9332")
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
|
pyg3t/pyg3t
|
refs/heads/master
|
pyg3t/gtcheckargs.py
|
1
|
from __future__ import print_function, unicode_literals
import sys
from optparse import OptionParser
from pyg3t.gtparse import parse
from pyg3t.util import NullDevice, pyg3tmain, get_encoded_output, regex
description = """Check translations of command-line options in po-files."""
def build_parser():
p = OptionParser(usage='%prog [OPTION] [FILE...]',
description=description)
p.add_option('--quiet', action='store_true',
help='suppress normal output; print only count')
p.add_option('--diagnostics', action='store_true',
help='print diagnostics')
#p.add_option('--longlines', action='store_true',
# help='check for lines longer than 79 characters')
return p
# e.g. HELLO
metavar = r'\w*'
# -p
single_short_noarg = r'-[a-zA-Z0-9\?]'
# -p or -p HELLO
single_short = r'(%s)( %s)?' % (single_short_noarg, metavar)
# -p, -q, -r
some_short = r'%s(, %s)*' % (single_short, single_short)
# --hello
single_long_noarg = r'--[a-z0-9][a-z0-9\-_]*'
# --hello or --hello=HELLO or --hello[=HELLO]
single_long = r'%s(=%s|\[=%s\])?' % (single_long_noarg, metavar, metavar)
# -p, -q, --hello, --hello-world=HELLO
any_short_some_long = r'(%s, )*(%s, )*%s' % (single_short, single_long,
single_long)
# White space first (perhaps), then business, then either more whitespace
# (before a description) *or* end of line
full_short = r'^\s*%s(\s+|$)' % some_short
full_long = r'^\s*%s(\s+|$)' % any_short_some_long
# matches all of the above
option_pattern = r'(%s)|(%s)' % (full_long, full_short)
METAVAR = regex(metavar)
OPTION = regex(option_pattern)
leading_whitespace = regex(r'^\s+')
separators = regex(r'^\s+|, |\b \b|=|\s+$')
class Option:
def __init__(self, match, group, lines):
self.match = match
self.group = group
self.lines = lines
self.firstindent = len(self.group)
self.groups = [g for g in match.groups()[1:]
if group is not None]
if len(lines) > 1:
wmatch = leading_whitespace.match(lines[1])
if wmatch:
wspace = len(wmatch.group())
else:
wspace = 0
self.nextindent = wspace
else:
self.nextindent = None
class BadOption(ValueError):
pass
class OptionChecker:
def __init__(self, longlines=True, debugfile=None):
self.longlines = longlines
if debugfile is None:
debugfile = NullDevice()
self.debug = debugfile
def diagnostics(self, msgidline, text):
#n = msg.meta['lineno']
strlen = 25
msgidline = msgidline.lstrip()
if len(msgidline) > strlen:
msgidline = '%s...' % msgidline[:strlen - 2]
print('"%26s": %s' % (msgidline, text), file=self.debug)
def get_options(self, string):
# For each option we make a *group*. The group consists of
# all lines pertaining to that option (definition and description).
#
# One msgid may specify several options, so there'll be a list
# of groups.
lines = []
matches = []
groups = []
options = []
# Upper limit of description indentation level
indent_guess = 75
for line in string.split('\\n'):
if not line:
# Throw out empty lines at the end as well as
# descriptions that are not indented
continue
match = OPTION.match(line)
#print line
leadingspace_match = leading_whitespace.match(line)
if leadingspace_match:
leadingspace = len(leadingspace_match.group())
else:
leadingspace = 0
# use heuristic to avoid "false" options when a line in the
# description starts with dash
if match and leadingspace < indent_guess // 2:
self.diagnostics(line.lstrip(), 'Option found')
lines = []
group = match.group()
#print group
matches.append(match)
groups.append(group)
options.append(lines)
if len(group) + 1 < len(line): # i.e. if there IS a description
indent_guess = min(indent_guess, len(group))
else:
self.diagnostics(line.lstrip(), 'Not an option')
lines.append(line)
options1 = []
for match, group, lines in zip(matches, groups, options):
option = Option(match, group, lines)
options1.append(option)
return options1
def checkoptions(self, msg):
#msg = msg.decode()
if not msg.istranslated:
return
msgid_options = self.get_options(msg.msgid)
msgstr_options = self.get_options(msg.msgstr)
if len(msgid_options) == 0:
return
if len(msgid_options) != len(msgstr_options):
raise BadOption('Unequal number of options: %d vs %d'
% (len(msgid_options), len(msgstr_options)))
for opt1, opt2 in zip(msgid_options, msgstr_options):
tokens1 = separators.split(opt1.group)
tokens2 = separators.split(opt2.group)
if len(tokens1) != len(tokens2):
raise BadOption('Different number of options/vars or bad'
' separators')
for g1, g2 in zip(tokens1, tokens2):
if g1 == g2:
continue
if g1.isupper() and g2.isupper():
if opt1.group.count(g1) != opt2.group.count(g2):
raise BadOption('Metavar %s not matched in translation'
% g1)
continue
if g1.startswith('--') and g1 != g2:
raise BadOption('Long option %s not found in translation'
% g1)
elif g1.startswith('-') and len(g1) == 2 and g1 != g2:
raise BadOption('Short option %s not found in translation'
% g1)
if opt1.firstindent != opt2.firstindent:
# It's OK if there wasn't enough space
# Only complain if there's more than 2 chars of space
# Also, perhaps it is the English one which is squeezed
msgstr_bad = (len(opt2.group.rstrip()) + 2 < opt2.firstindent)
if msgstr_bad:
raise BadOption('Bad indentation of option line')
if self.longlines:
msgid_fits = all(len(line) < 80 for line in opt1.lines)
msgstr_fits = all(len(line) < 80 for line in opt2.lines)
if msgid_fits and not msgstr_fits:
raise BadOption('Lines longer than 80 characters')
# XXX check subsequent indent
print(('OK : Line %d. ' % msg.meta['lineno']).ljust(78, '-'),
file=self.debug)
@pyg3tmain(build_parser)
def main(parser):
opts, args = parser.parse_args()
errcount = 0
debug = None
out = get_encoded_output('utf8')
if opts.diagnostics:
debug = out
checker = OptionChecker(debugfile=debug)
if len(args) == 1:
template = 'Line {line}: {msg}'
else:
template = '{file} L{line}: {msg}'
for arg in args:
fd = open(arg, 'rb')
cat = parse(fd)
for msg in cat:
# we ignore plurals. Who would write command-like
# arguments with multiple plural versions?
try:
checker.checkoptions(msg)
except BadOption as e:
errcount += 1
if not opts.quiet:
string = template.format(file=arg,
line=msg.meta['lineno'],
msg=e.args[0])
if opts.diagnostics:
print(('ERR: %s '
% string).ljust(78, '-'), file=checker.debug)
else:
print(string, file=out)
print('-' * len(string), file=out)
print(msg.tostring(), file=out)
if errcount == 1:
print('Found 1 error.', file=out)
else:
print('Found %d errors.' % errcount, file=out)
exitcode = int(errcount > 0)
sys.exit(exitcode)
|
tonysyu/mpltools
|
refs/heads/master
|
mpltools/widgets/slider.py
|
2
|
import matplotlib.widgets as mwidgets
class Slider(mwidgets.Slider):
"""Slider widget to select a value from a floating point range.
Parameters
----------
ax : :class:`~matplotlib.axes.Axes` instance
The parent axes for the widget
value_range : (float, float)
(min, max) value allowed for value.
label : str
The slider label.
value : float
Initial value. If None, set to value in middle of value range.
on_slide : function
Callback function for slide event. Function should expect slider value.
value_fmt : str
Format string for formatting the slider text.
slidermin, slidermax : float
Used to contrain the value of this slider to the values
of other sliders.
dragging : bool
If True, slider is responsive to mouse.
pad : float
Padding (in axes coordinates) between `label`/`value_fmt` and slider.
Attributes
----------
value : float
Current slider value.
"""
def __init__(self, ax, value_range, label='', value=None, on_slide=None,
value_fmt='%1.2f', slidermin=None, slidermax=None,
dragging=True, pad=0.02):
mwidgets.AxesWidget.__init__(self, ax)
self.valmin, self.valmax = value_range
if value is None:
value = 0.5 * (self.valmin + self.valmax)
self.val = value
self.valinit = value
self.valfmt = value_fmt
y0 = 0.5
x_low = [self.valmin, value]
x_high = [value, self.valmax]
self.line_low, = ax.plot(x_low, [y0, y0], color='0.5', lw=2)
self.line_high, = ax.plot(x_high, [y0, y0], color='0.7', lw=2)
self.val_handle, = ax.plot(value, y0, 'o',
mec='0.4', mfc='0.6', markersize=8)
ax.set_xlim(value_range)
ax.set_navigate(False)
ax.set_axis_off()
self.connect_event('button_press_event', self._update)
self.connect_event('button_release_event', self._update)
if dragging:
self.connect_event('motion_notify_event', self._update)
self.label = ax.text(-pad, y0, label, transform=ax.transAxes,
verticalalignment='center',
horizontalalignment='right')
self.show_value = False if value_fmt is None else True
if self.show_value:
self.valtext = ax.text(1 + pad, y0, value_fmt % value,
transform=ax.transAxes,
verticalalignment='center',
horizontalalignment='left')
self.slidermin = slidermin
self.slidermax = slidermax
self.drag_active = False
self.cnt = 0
self.observers = {}
if on_slide is not None:
self.on_changed(on_slide)
# Attributes for matplotlib.widgets.Slider compatibility
self.closedmin = self.closedmax = True
@property
def value(self):
return self.val
@value.setter
def value(self, value):
self.val = value
self.line_low.set_xdata([self.valmin, value])
self.line_high.set_xdata([value, self.valmax])
self.val_handle.set_xdata([value])
if self.show_value:
self.valtext.set_text(self.valfmt % value)
def set_val(self, value):
"""Set value of slider."""
# Override matplotlib.widgets.Slider to update graphics objects.
self.value = value
if self.drawon:
self.ax.figure.canvas.draw()
if not self.eventson:
return
for cid, func in self.observers.items():
func(value)
if __name__ == '__main__':
import numpy as np
import matplotlib.pyplot as plt
ax = plt.subplot2grid((10, 1), (0, 0), rowspan=8)
ax_slider = plt.subplot2grid((10, 1), (9, 0))
a0 = 5
x = np.arange(0.0, 1.0, 0.001)
y = np.sin(6 * np.pi * x)
line, = ax.plot(x, a0 * y, lw=2, color='red')
ax.axis([x.min(), x.max(), -10, 10])
def update(val):
amp = samp.value
line.set_ydata(amp * y)
samp = Slider(ax_slider, (0.1, 10.0), on_slide=update,
label='Amplitude:', value=a0)
plt.show()
|
atul-bhouraskar/django
|
refs/heads/master
|
django/conf/locale/mk/formats.py
|
504
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'd F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'j.m.Y'
SHORT_DATETIME_FORMAT = 'j.m.Y H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
'%d. %m. %Y', '%d. %m. %y', # '25. 10. 2006', '25. 10. 06'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
'%d.%m.%y %H:%M:%S', # '25.10.06 14:30:59'
'%d.%m.%y %H:%M:%S.%f', # '25.10.06 14:30:59.000200'
'%d.%m.%y %H:%M', # '25.10.06 14:30'
'%d.%m.%y', # '25.10.06'
'%d. %m. %Y %H:%M:%S', # '25. 10. 2006 14:30:59'
'%d. %m. %Y %H:%M:%S.%f', # '25. 10. 2006 14:30:59.000200'
'%d. %m. %Y %H:%M', # '25. 10. 2006 14:30'
'%d. %m. %Y', # '25. 10. 2006'
'%d. %m. %y %H:%M:%S', # '25. 10. 06 14:30:59'
'%d. %m. %y %H:%M:%S.%f', # '25. 10. 06 14:30:59.000200'
'%d. %m. %y %H:%M', # '25. 10. 06 14:30'
'%d. %m. %y', # '25. 10. 06'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
googleads/google-ads-python
|
refs/heads/master
|
google/ads/googleads/v6/services/services/user_data_service/transports/__init__.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import UserDataServiceTransport
from .grpc import UserDataServiceGrpcTransport
# Compile a registry of transports.
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[UserDataServiceTransport]]
_transport_registry["grpc"] = UserDataServiceGrpcTransport
__all__ = (
"UserDataServiceTransport",
"UserDataServiceGrpcTransport",
)
|
1tush/sentry
|
refs/heads/master
|
src/sentry/migrations/0091_auto__add_alert.py
|
36
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Alert'
db.create_table('sentry_alert', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('project', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Project'])),
('group', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Group'], null=True)),
('datetime', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('message', self.gf('django.db.models.fields.TextField')()),
('data', self.gf('django.db.models.fields.TextField')(null=True)),
))
db.send_create_signal('sentry', ['Alert'])
def backwards(self, orm):
# Deleting model 'Alert'
db.delete_table('sentry_alert')
models = {
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.affecteduserbygroup': {
'Meta': {'unique_together': "(('project', 'tuser', 'group'),)", 'object_name': 'AffectedUserByGroup'},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'tuser': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.TrackedUser']", 'null': 'True'})
},
'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'time_spent': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'sentry.filterkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'FilterKey'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.filtervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'FilterValue'},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'users_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupcountbyminute': {
'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'GroupCountByMinute', 'db_table': "'sentry_messagecountbyminute'"},
'date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.grouptag': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTag', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.messageindex': {
'Meta': {'unique_together': "(('column', 'value', 'object_id'),)", 'object_name': 'MessageIndex'},
'column': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'pending_member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'),)", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': "orm['sentry.User']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Team']", 'null': 'True'})
},
'sentry.projectcountbyminute': {
'Meta': {'unique_together': "(('project', 'date'),)", 'object_name': 'ProjectCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'user_added': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'keys_added_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
'sentry.searchdocument': {
'Meta': {'unique_together': "(('project', 'group'),)", 'object_name': 'SearchDocument'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_changed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'total_events': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'sentry.searchtoken': {
'Meta': {'unique_together': "(('document', 'field', 'token'),)", 'object_name': 'SearchToken'},
'document': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'token_set'", 'to': "orm['sentry.SearchDocument']"}),
'field': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.team': {
'Meta': {'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'team_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.TeamMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
'sentry.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_teammember_set'", 'to': "orm['sentry.User']"})
},
'sentry.trackeduser': {
'Meta': {'unique_together': "(('project', 'ident'),)", 'object_name': 'TrackedUser'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Group']", 'through': "orm['sentry.AffectedUserByGroup']", 'symmetrical': 'False'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'num_events': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
}
}
complete_apps = ['sentry']
|
QijunPan/ansible
|
refs/heads/devel
|
lib/ansible/modules/storage/netapp/netapp_e_host.py
|
45
|
#!/usr/bin/python
# (c) 2016, NetApp, Inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = """
---
module: netapp_e_host
short_description: manage eseries hosts
description:
- Create, update, remove hosts on NetApp E-series storage arrays
version_added: '2.2'
author: Kevin Hulquest (@hulquest)
options:
api_username:
required: true
description:
- The username to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_password:
required: true
description:
- The password to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_url:
required: true
description:
- The url to the SANtricity WebServices Proxy or embedded REST API.
example:
- https://prod-1.wahoo.acme.com/devmgr/v2
validate_certs:
required: false
default: true
description:
- Should https certificates be validated?
ssid:
description:
- the id of the storage array you wish to act against
required: True
name:
description:
- If the host doesnt yet exist, the label to assign at creation time.
- If the hosts already exists, this is what is used to identify the host to apply any desired changes
required: True
host_type_index:
description:
- The index that maps to host type you wish to create. It is recommended to use the M(netapp_e_facts) module to gather this information. Alternatively you can use the WSP portal to retrieve the information.
required: True
ports:
description:
- a list of of dictionaries of host ports you wish to associate with the newly created host
required: False
group:
description:
- the group you want the host to be a member of
required: False
"""
EXAMPLES = """
- name: Set Host Info
netapp_e_host:
ssid: "{{ ssid }}"
api_url: "{{ netapp_api_url }}"
api_username: "{{ netapp_api_username }}"
api_password: "{{ netapp_api_password }}"
name: "{{ host_name }}"
host_type_index: "{{ host_type_index }}"
"""
RETURN = """
msg:
description: Success message
returned: success
type: string
sample: The host has been created.
"""
import json
from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.urls import open_url
from ansible.module_utils.six.moves.urllib.error import HTTPError
HEADERS = {
"Content-Type": "application/json",
"Accept": "application/json",
}
def request(url, data=None, headers=None, method='GET', use_proxy=True,
force=False, last_mod_time=None, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None, force_basic_auth=True, ignore_errors=False):
try:
r = open_url(url=url, data=data, headers=headers, method=method, use_proxy=use_proxy,
force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs,
url_username=url_username, url_password=url_password, http_agent=http_agent,
force_basic_auth=force_basic_auth)
except HTTPError:
err = get_exception()
r = err.fp
try:
raw_data = r.read()
if raw_data:
data = json.loads(raw_data)
else:
raw_data is None
except:
if ignore_errors:
pass
else:
raise Exception(raw_data)
resp_code = r.getcode()
if resp_code >= 400 and not ignore_errors:
raise Exception(resp_code, data)
else:
return resp_code, data
class Host(object):
def __init__(self):
argument_spec = basic_auth_argument_spec()
argument_spec.update(dict(
api_username=dict(type='str', required=True),
api_password=dict(type='str', required=True, no_log=True),
api_url=dict(type='str', required=True),
ssid=dict(type='str', required=True),
state=dict(type='str', required=True, choices=['absent', 'present']),
group=dict(type='str', required=False),
ports=dict(type='list', required=False),
force_port=dict(type='bool', default=False),
name=dict(type='str', required=True),
host_type_index=dict(type='int', required=True)
))
self.module = AnsibleModule(argument_spec=argument_spec)
args = self.module.params
self.group = args['group']
self.ports = args['ports']
self.force_port = args['force_port']
self.name = args['name']
self.host_type_index = args['host_type_index']
self.state = args['state']
self.ssid = args['ssid']
self.url = args['api_url']
self.user = args['api_username']
self.pwd = args['api_password']
self.certs = args['validate_certs']
self.ports = args['ports']
self.post_body = dict()
if not self.url.endswith('/'):
self.url += '/'
@property
def valid_host_type(self):
try:
(rc, host_types) = request(self.url + 'storage-systems/%s/host-types' % self.ssid, url_password=self.pwd,
url_username=self.user, validate_certs=self.certs, headers=HEADERS)
except Exception:
err = get_exception()
self.module.fail_json(
msg="Failed to get host types. Array Id [%s]. Error [%s]." % (self.ssid, str(err)))
try:
match = filter(lambda host_type: host_type['index'] == self.host_type_index, host_types)[0]
return True
except IndexError:
self.module.fail_json(msg="There is no host type with index %s" % self.host_type_index)
@property
def hostports_available(self):
used_ids = list()
try:
(rc, self.available_ports) = request(self.url + 'storage-systems/%s/unassociated-host-ports' % self.ssid,
url_password=self.pwd, url_username=self.user,
validate_certs=self.certs,
headers=HEADERS)
except:
err = get_exception()
self.module.fail_json(
msg="Failed to get unassociated host ports. Array Id [%s]. Error [%s]." % (self.ssid, str(err)))
if len(self.available_ports) > 0 and len(self.ports) <= len(self.available_ports):
for port in self.ports:
for free_port in self.available_ports:
# Desired Type matches but also make sure we havent already used the ID
if not free_port['id'] in used_ids:
# update the port arg to have an id attribute
used_ids.append(free_port['id'])
break
if len(used_ids) != len(self.ports) and not self.force_port:
self.module.fail_json(
msg="There are not enough free host ports with the specified port types to proceed")
else:
return True
else:
self.module.fail_json(msg="There are no host ports available OR there are not enough unassigned host ports")
@property
def group_id(self):
if self.group:
try:
(rc, all_groups) = request(self.url + 'storage-systems/%s/host-groups' % self.ssid,
url_password=self.pwd,
url_username=self.user, validate_certs=self.certs, headers=HEADERS)
except:
err = get_exception()
self.module.fail_json(
msg="Failed to get host groups. Array Id [%s]. Error [%s]." % (self.ssid, str(err)))
try:
group_obj = filter(lambda group: group['name'] == self.group, all_groups)[0]
return group_obj['id']
except IndexError:
self.module.fail_json(msg="No group with the name: %s exists" % self.group)
else:
# Return the value equivalent of no group
return "0000000000000000000000000000000000000000"
@property
def host_exists(self):
try:
(rc, all_hosts) = request(self.url + 'storage-systems/%s/hosts' % self.ssid, url_password=self.pwd,
url_username=self.user, validate_certs=self.certs, headers=HEADERS)
except:
err = get_exception()
self.module.fail_json(
msg="Failed to determine host existence. Array Id [%s]. Error [%s]." % (self.ssid, str(err)))
self.all_hosts = all_hosts
try: # Try to grab the host object
self.host_obj = filter(lambda host: host['label'] == self.name, all_hosts)[0]
return True
except IndexError:
# Host with the name passed in does not exist
return False
@property
def needs_update(self):
needs_update = False
self.force_port_update = False
if self.host_obj['clusterRef'] != self.group_id or \
self.host_obj['hostTypeIndex'] != self.host_type_index:
needs_update = True
if self.ports:
if not self.host_obj['ports']:
needs_update = True
for arg_port in self.ports:
# First a quick check to see if the port is mapped to a different host
if not self.port_on_diff_host(arg_port):
for obj_port in self.host_obj['ports']:
if arg_port['label'] == obj_port['label']:
# Confirmed that port arg passed in exists on the host
# port_id = self.get_port_id(obj_port['label'])
if arg_port['type'] != obj_port['portId']['ioInterfaceType']:
needs_update = True
if 'iscsiChapSecret' in arg_port:
# No way to know the current secret attr, so always return True just in case
needs_update = True
else:
# If the user wants the ports to be reassigned, do it
if self.force_port:
self.force_port_update = True
needs_update = True
else:
self.module.fail_json(
msg="The port you specified:\n%s\n is associated with a different host. Specify force_port as True or try a different port spec" % arg_port)
return needs_update
def port_on_diff_host(self, arg_port):
""" Checks to see if a passed in port arg is present on a different host """
for host in self.all_hosts:
# Only check 'other' hosts
if self.host_obj['name'] != self.name:
for port in host['ports']:
# Check if the port label is found in the port dict list of each host
if arg_port['label'] == port['label']:
self.other_host = host
return True
return False
def reassign_ports(self, apply=True):
if not self.post_body:
self.post_body = dict(
portsToUpdate=dict()
)
for port in self.ports:
if self.port_on_diff_host(port):
self.post_body['portsToUpdate'].update(dict(
portRef=self.other_host['hostPortRef'],
hostRef=self.host_obj['id'],
# Doesnt yet address port identifier or chap secret
))
if apply:
try:
(rc, self.host_obj) = request(
self.url + 'storage-systems/%s/hosts/%s' % (self.ssid, self.host_obj['id']),
url_username=self.user, url_password=self.pwd, headers=HEADERS,
validate_certs=self.certs, method='POST', data=json.dumps(self.post_body))
except:
err = get_exception()
self.module.fail_json(
msg="Failed to reassign host port. Host Id [%s]. Array Id [%s]. Error [%s]." % (
self.host_obj['id'], self.ssid, str(err)))
def update_host(self):
if self.ports:
if self.hostports_available:
if self.force_port_update is True:
self.reassign_ports(apply=False)
# Make sure that only ports that arent being reassigned are passed into the ports attr
self.ports = [port for port in self.ports if not self.port_on_diff_host(port)]
self.post_body['ports'] = self.ports
if self.group:
self.post_body['groupId'] = self.group_id
self.post_body['hostType'] = dict(index=self.host_type_index)
try:
(rc, self.host_obj) = request(self.url + 'storage-systems/%s/hosts/%s' % (self.ssid, self.host_obj['id']),
url_username=self.user, url_password=self.pwd, headers=HEADERS,
validate_certs=self.certs, method='POST', data=json.dumps(self.post_body))
except:
err = get_exception()
self.module.fail_json(msg="Failed to update host. Array Id [%s]. Error [%s]." % (self.ssid, str(err)))
self.module.exit_json(changed=True, **self.host_obj)
def create_host(self):
post_body = dict(
name=self.name,
host_type=dict(index=self.host_type_index),
groupId=self.group_id,
ports=self.ports
)
if self.ports:
# Check that all supplied port args are valid
if self.hostports_available:
post_body.update(ports=self.ports)
elif not self.force_port:
self.module.fail_json(
msg="You supplied ports that are already in use. Supply force_port to True if you wish to reassign the ports")
if not self.host_exists:
try:
(rc, create_resp) = request(self.url + "storage-systems/%s/hosts" % self.ssid, method='POST',
url_username=self.user, url_password=self.pwd, validate_certs=self.certs,
data=json.dumps(post_body), headers=HEADERS)
except:
err = get_exception()
self.module.fail_json(
msg="Failed to create host. Array Id [%s]. Error [%s]." % (self.ssid, str(err)))
else:
self.module.exit_json(changed=False,
msg="Host already exists. Id [%s]. Host [%s]." % (self.ssid, self.name))
self.host_obj = create_resp
if self.ports and self.force_port:
self.reassign_ports()
self.module.exit_json(changed=True, **self.host_obj)
def remove_host(self):
try:
(rc, resp) = request(self.url + "storage-systems/%s/hosts/%s" % (self.ssid, self.host_obj['id']),
method='DELETE',
url_username=self.user, url_password=self.pwd, validate_certs=self.certs)
except:
err = get_exception()
self.module.fail_json(
msg="Failed to remote host. Host[%s]. Array Id [%s]. Error [%s]." % (self.host_obj['id'],
self.ssid,
str(err)))
def apply(self):
if self.state == 'present':
if self.host_exists:
if self.needs_update and self.valid_host_type:
self.update_host()
else:
self.module.exit_json(changed=False, msg="Host already present.", id=self.ssid, label=self.name)
elif self.valid_host_type:
self.create_host()
else:
if self.host_exists:
self.remove_host()
self.module.exit_json(changed=True, msg="Host removed.")
else:
self.module.exit_json(changed=False, msg="Host already absent.", id=self.ssid, label=self.name)
def main():
host = Host()
host.apply()
if __name__ == '__main__':
main()
|
guildenstern70/pyfab
|
refs/heads/master
|
src/reportlab/graphics/widgets/table.py
|
32
|
#!/usr/bin/env python
#Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/widgets/grids.py
__version__=''' $Id$ '''
from reportlab.graphics.widgetbase import Widget
from reportlab.graphics.charts.textlabels import Label
from reportlab.graphics import shapes
from reportlab.lib import colors
from reportlab.lib.validators import *
from reportlab.lib.attrmap import *
from reportlab.graphics.shapes import Drawing
class TableWidget(Widget):
"""A two dimensions table of labels
"""
_attrMap = AttrMap(
x = AttrMapValue(isNumber, desc="x position of left edge of table"),
y = AttrMapValue(isNumber, desc="y position of bottom edge of table"),
width = AttrMapValue(isNumber, desc="table width"),
height = AttrMapValue(isNumber, desc="table height"),
borderStrokeColor = AttrMapValue(isColorOrNone, desc="table border color"),
fillColor = AttrMapValue(isColorOrNone, desc="table fill color"),
borderStrokeWidth = AttrMapValue(isNumber, desc="border line width"),
horizontalDividerStrokeColor = AttrMapValue(isColorOrNone, desc="table inner horizontal lines color"),
verticalDividerStrokeColor = AttrMapValue(isColorOrNone, desc="table inner vertical lines color"),
horizontalDividerStrokeWidth = AttrMapValue(isNumber, desc="table inner horizontal lines width"),
verticalDividerStrokeWidth = AttrMapValue(isNumber, desc="table inner vertical lines width"),
dividerDashArray = AttrMapValue(isListOfNumbersOrNone, desc='Dash array for dividerLines.'),
data = AttrMapValue(None, desc="a list of list of strings to be displayed in the cells"),
boxAnchor = AttrMapValue(isBoxAnchor, desc="location of the table anchoring point"),
fontName = AttrMapValue(isString, desc="text font in the table"),
fontSize = AttrMapValue(isNumber, desc="font size of the table"),
fontColor = AttrMapValue(isColorOrNone, desc="font color"),
alignment = AttrMapValue(OneOf("left", "right"), desc="Alignment of text within cells"),
textAnchor = AttrMapValue(OneOf('start','middle','end','numeric'), desc="Alignment of text within cells"),
)
def __init__(self, x=10, y=10, **kw):
self.x = x
self.y = y
self.width = 200
self.height = 100
self.borderStrokeColor = colors.black
self.fillColor = None
self.borderStrokeWidth = 0.5
self.horizontalDividerStrokeColor = colors.black
self.verticalDividerStrokeColor = colors.black
self.horizontalDividerStrokeWidth = 0.5
self.verticalDividerStrokeWidth = 0.25
self.dividerDashArray = None
self.data = [['North','South','East','West'],[100,110,120,130],['A','B','C','D']] # list of rows each row is a list of columns
self.boxAnchor = 'nw'
#self.fontName = None
self.fontSize = 8
self.fontColor = colors.black
self.alignment = 'right'
self.textAnchor = 'start'
for k, v in kw.items():
if k in list(self.__class__._attrMap.keys()):
setattr(self, k, v)
else:
raise ValueError('invalid argument supplied for class %s'%self.__class__)
def demo(self):
""" returns a sample of this widget with data
"""
d = Drawing(400, 200)
t = TableWidget()
d.add(t, name='table')
d.table.dividerDashArray = (1, 3, 2)
d.table.verticalDividerStrokeColor = None
d.table.borderStrokeWidth = 0
d.table.borderStrokeColor = colors.red
return d
def draw(self):
""" returns a group of shapes
"""
g = shapes.Group()
#overall border and fill
if self.borderStrokeColor or self.fillColor: # adds border and filling color
rect = shapes.Rect(self.x, self.y, self.width, self.height)
rect.fillColor = self.fillColor
rect.strokeColor = self.borderStrokeColor
rect.strokeWidth = self.borderStrokeWidth
g.add(rect)
#special case - for an empty table we want to avoid divide-by-zero
data = self.preProcessData(self.data)
rows = len(self.data)
cols = len(self.data[0])
#print "(rows,cols)=(%s, %s)"%(rows,cols)
row_step = self.height / float(rows)
col_step = self.width / float(cols)
#print "(row_step,col_step)=(%s, %s)"%(row_step,col_step)
# draw the grid
if self.horizontalDividerStrokeColor:
for i in range(rows): # make horizontal lines
x1 = self.x
x2 = self.x + self.width
y = self.y + row_step*i
#print 'line (%s, %s), (%s, %s)'%(x1, y, x2, y)
line = shapes.Line(x1, y, x2, y)
line.strokeDashArray = self.dividerDashArray
line.strokeWidth = self.horizontalDividerStrokeWidth
line.strokeColor = self.horizontalDividerStrokeColor
g.add(line)
if self.verticalDividerStrokeColor:
for i in range(cols): # make vertical lines
x = self.x+col_step*i
y1 = self.y
y2 = self.y + self.height
#print 'line (%s, %s), (%s, %s)'%(x, y1, x, y2)
line = shapes.Line(x, y1, x, y2)
line.strokeDashArray = self.dividerDashArray
line.strokeWidth = self.verticalDividerStrokeWidth
line.strokeColor = self.verticalDividerStrokeColor
g.add(line)
# since we plot data from down up, we reverse the list
self.data.reverse()
for (j, row) in enumerate(self.data):
y = self.y + j*row_step + 0.5*row_step - 0.5 * self.fontSize
for (i, datum) in enumerate(row):
if datum:
x = self.x + i*col_step + 0.5*col_step
s = shapes.String(x, y, str(datum), textAnchor=self.textAnchor)
s.fontName = self.fontName
s.fontSize = self.fontSize
s.fillColor = self.fontColor
g.add(s)
return g
def preProcessData(self, data):
"""preprocess and return a new array with at least one row
and column (use a None) if needed, and all rows the same
length (adding Nones if needed)
"""
if not data:
return [[None]]
#make all rows have similar number of cells, append None when needed
max_row = max( [len(x) for x in data] )
for rowNo, row in enumerate(data):
if len(row) < max_row:
row.extend([None]*(max_row-len(row)))
return data
#test
if __name__ == '__main__':
d = TableWidget().demo()
import os
d.save(formats=['pdf'],outDir=os.getcwd(),fnRoot=None)
|
USGCRP/gcis-py-client
|
refs/heads/master
|
bin/states.py
|
1
|
from collections import OrderedDict
sync_metadata_tree = {
'noaa-led-state-summaries-2017': OrderedDict([
# ('alabama', [
# ('/metadata/figures/3819', 'al-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3817', 'al-observed-number-of-very-hot-days', '2a'),
# ('/metadata/figures/3818', 'al-observed-number-of-very-warm-nights', '2b'),
# ('/metadata/figures/3815', 'al-observed-annual-precipitation', '2c'),
# ('/metadata/figures/4036', 'al-total-hurricane-events-in-alabama-1900-2013', '2d'),
# ('/metadata/figures/3816', 'al-observed-number-of-extreme-precipitation-events', '3'),
# ('/metadata/figures/3820', 'al-projected-change-in-spring-precipitation', '4'),
# ('/metadata/figures/3821', 'al-past-and-projected-changes-in-global-sea-level', '5')
# ]),
# ('alaska', [
# ('', 'ak-observed-and-projected-temperature-change', '1'),
# ('', 'ak-observed-summer-temperature', '2'),
# ('', 'ak-observed-winter-temperature', '3a'),
# ('', 'ak-observed-number-of-warm-days', '3b'),
# ('', 'ak-observed-summer-precipitation', '3c'),
# ('', 'ak-observed-number-of-extreme-precipitation-events', '3d'),
# ('', 'ak-observed-number-of-very-cold-nights', '4'),
# ('', 'ak-observed-annual-precipitation', '5'),
# ('', 'march-and-september-arctic-sea-ice-extent', '6'),
# ('', 'ak-past-and-projected-changes-in-global-sea-level', '7'),
# ('', 'ak-projected-change-in-annual-precipitation', '8')
# ]),
# ('arizona', [
# ('/metadata/figures/3676', 'az-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3672', 'az-observed-number-of-extremely-hot-days', '2a'),
# ('/metadata/figures/3670', 'az-observed-maximum-summer-temperature', '2b-1'),
# ('/metadata/figures/4083', 'az-observed-minimum-summer-temperature', '2b-2'),
# ('/metadata/figures/3668', 'az-observed-annual-precipitation', '2c'),
# ('/metadata/figures/3673', 'az-observed-number-of-extreme-precipitation-events', '2d'),
# ('/metadata/figures/3674', 'az-observed-number-of-extremely-warm-nights', '3'),
# ('/metadata/figures/3675', 'az-observed-number-of-very-cold-nights', '4'),
# ('/metadata/figures/3671', 'az-observed-monsoon-season-precipitation', '5'),
# ('/metadata/figures/3903', 'arizona-palmer-drought-severity-index', '6'),
# ('/metadata/figures/3890', 'az-lake-mead-water-level-at-hoover-dam', '7'),
# ('/metadata/figures/3677', 'az-projected-change-in-spring-precipitation', '8')
# ]),
# ('arkansas', [
# ('/metadata/figures/3954', 'ar-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3840', 'ar-observed-number-of-extremely-hot-days', '2a'),
# ('/metadata/figures/3842', 'ar-observed-number-of-very-cold-nights', '2b'),
# ('/metadata/figures/3839', 'ar-observed-annual-precipitation', '2c'),
# ('/metadata/figures/3844', 'ar-observed-summer-precipitation', '2d'),
# ('/metadata/figures/3843', 'ar-observed-number-of-very-warm-nights', '3'),
# ('/metadata/figures/3845', 'ar-observed-summer-temperature', '4'),
# ('/metadata/figures/3841', 'ar-observed-number-of-extreme-precipitation-events', '5'),
# ('/metadata/figures/3846', 'ar-projected-change-in-winter-precipitation', '6')
# ]),
# ('california', [
# ('/metadata/figures/4086', 'ca-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/4090', 'ca-observed-number-of-extremely-hot-days', '2a'),
# ('/metadata/figures/4094', 'ca-observed-annual-precipitation', '2b'),
# ('/metadata/figures/4089', 'ca-observed-number-of-extreme-precipitation-events', '2c'),
# ('/metadata/figures/4095', 'ca-observed-winter-precipitation', '2d'),
# ('/metadata/figures/4092', 'ca-observed-number-of-very-warm-nights', '3'),
# # ('', 'ca-observed-number-of-cold-nights', '4'),
# # ('', 'april-1-snow-water-equivalent-at-donner-summit', '5'),
# ('/metadata/figures/4088', 'ca-storage-levels-in-the-shasta-dam-reservoir', '6'),
# ('/metadata/figures/4096', 'california-palmer-drought-severity-index', '7'),
# ('/metadata/figures/4093', 'ca-projected-change-in-winter-precipitation', '8'),
# ('/metadata/figures/4084', 'ca-past-and-projected-changes-in-global-sea-level', '9'),
# # ('', 'observed-and-projected-annual-number-of-tidal-floods-for-la-jolla-and-san-francisco-ca', '10')
# ]),
# ('colorado', [
# ('/metadata/figures/3695', 'co-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3697', 'co-observed-spring-temperature', '2a'),
# ('/metadata/figures/3698', 'co-observed-summer-temperature', '2b'),
# ('/metadata/figures/3693', 'co-observed-number-of-very-hot-days', '3'),
# ('/metadata/figures/3691', 'co-observed-number-of-very-cold-nights', '4'),
# ('/metadata/figures/3694', 'co-observed-number-of-warm-nights', '5a'),
# ('/metadata/figures/3689', 'co-observed-annual-precipitation', '5b'),
# ('/metadata/figures/3690', 'co-observed-fall-precipitation', '5c-1'),
# ('/metadata/figures/3696', 'co-observed-spring-precipitation', '5c-2'),
# ('/metadata/figures/3692', 'co-observed-number-of-extreme-precipitation-events', '5d'),
# ('/metadata/figures/3893', 'co-end-of-season-snow-water-equivalent-depth-at-tower', '6'),
# ('/metadata/figures/3699', 'co-projected-change-in-winter-precipitation', '7'),
# ('/metadata/figures/3902', 'colorado-palmer-drought-severity-index', '8')
# ]),
# ('connecticut', [
# ('/metadata/figures/4060', 'ct-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/4082', 'ct-observed-number-of-very-hot-days', '2a'),
# ('/metadata/figures/3848', 'ct-observed-number-of-extreme-precipitation-events', '2b'),
# ('/metadata/figures/3847', 'ct-observed-annual-precipitation', '2c'),
# ('/metadata/figures/3852', 'ct-observed-summer-precipitation', '2d'),
# ('/metadata/figures/3851', 'ct-observed-number-of-warm-nights', '3'),
# ('/metadata/figures/3849', 'ct-observed-number-of-very-cold-nights', '4'),
# ('/metadata/figures/3853', 'ct-projected-change-in-spring-precipitation', '5'),
# # ('', 'observed-and-projected-annual-number-of-tidal-floods-new-london-ct', '6'),
# ('/metadata/figures/3876', 'ct-past-and-projected-changes-in-global-sea-level', '7')
# ]),
# ('delaware', [
# ('/metadata/figures/3761', 'de-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3759', 'de-observed-number-of-very-hot-days', '2a'),
# ('/metadata/figures/3757', 'de-observed-number-of-days-below-freezing', '2b'),
# ('/metadata/figures/3756', 'de-observed-annual-precipitation', '2c'),
# ('/metadata/figures/3758', 'de-observed-number-of-extreme-precipitation-events', '2d'),
# ('/metadata/figures/3760', 'de-observed-number-of-very-warm-nights', '3'),
# # ('', 'de-change-in-annual-precipitation', '4'),
# # ('', 'observed-and-projected-annual-number-of-tidal-floods-lewes-de', '5'),
# ('/metadata/figures/3830', 'de-past-and-projected-changes-in-global-sea-level', '6')
# ]),
# ('florida', [
# ('/metadata/figures/3767', 'fl-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3765', 'fl-observed-number-of-very-hot-days', '2a'),
# ('/metadata/figures/3951', 'fl-observed-number-of-nights-below-freezing', '2b'),
# ('/metadata/figures/3763', 'fl-observed-annual-precipitation', '2c'),
# # ('', 'total-hurricane-events-in-florida', '2d'),
# ('/metadata/figures/3766', 'fl-observed-number-of-very-warm-nights', '3'),
# # ('', 'average-annual-number-of-days-with-a-minimum-temperature-less-than-or-equal-to-32', '4'),
# ('/metadata/figures/3764', 'fl-observed-number-of-extreme-precipitation-events', '5'),
# ('/metadata/figures/3768', 'fl-projected-change-in-summer-precipitation', '6'),
# # ('', 'observed-and-projected-annual-number-of-tidal-floods-key-west-fl', '7'),
# # ('', 'fl-past-and-projected-changes-in-global-sea-level', '8')
# ]),
# ('georgia', [
# ('/metadata/figures/3774', 'ga-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3771', 'ga-observed-number-of-extremely-hot-days', '2a'),
# ('/metadata/figures/3770', 'ga-observed-number-of-days-below-freezing', '2b'),
# ('/metadata/figures/3775', 'ga-observed-summer-precipitation', '2c'),
# ('/metadata/figures/3772', 'ga-observed-number-of-extreme-precipitation-events', '2d'),
# ('/metadata/figures/3773', 'ga-observed-number-of-very-warm-nights', '3'),
# ('/metadata/figures/3769', 'ga-observed-annual-precipitation', '4'),
# # ('', 'ga-projected-change-in-annual-precipitation', '5'),
# ('/metadata/figures/3825', 'ga-past-and-projected-changes-in-global-sea-level', '6'),
# # ('', 'observed-and-projected-annual-number-of-tidal-floods-fort-pulaski-ga', '7')
# ]),
# ('hawaii', [
# # ('', 'hi-observed-and-projected-temperature-change', '1'),
# # ('', 'hi-observed-temperature-change', '2'),
# # ('', 'hi-observed-number-of-hot-days', '3'),
# # ('', 'hi-observed-number-of-very-warm-nights', '4'),
# # ('', 'time-series-of-hri-anomalies', '5'),
# # ('', 'hi-observed-precipitation-change', '6'),
# # ('', 'hi-observed-number-of-extreme-precipitation-events', '7'),
# # ('', 'hi-projected-change-in-annual-precipitation', '8'),
# ('/metadata/figures/4108', 'hi-past-and-projected-changes-in-global-sea-level', '9'),
# # ('', 'observed-and-projected-annual-number-of-tidal-floods-honolulu-hi', '10')
# ]),
# ('idaho', [
# ('/metadata/figures/3645', 'id-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3647', 'id-observed-number-of-very-hot-days', '2'),
# ('/metadata/figures/3649', 'id-observed-number-of-warm-nights', '3'),
# ('/metadata/figures/3648', 'id-observed-number-of-very-cold-nights', '4'),
# ('/metadata/figures/3646', 'id-observed-annual-precipitation', '5'),
# ('/metadata/figures/3650', 'id-observed-number-of-extreme-precipitation-events', '6'),
# # ('', 'april-1-snow-water-equivalent-at-camp-creek-id', '7'),
# ('/metadata/figures/3651', 'id-projected-change-in-spring-precipitation', '8')
# # ('/metadata/figures/3906', 'end-of-season-snow-water-equivalent-depth-at-bear-mountain', '9') #Not used?
# ]),
# ('illinois', [
# ('/metadata/figures/3663', 'il-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3664', 'il-observed-spring-temperature', '2a'),
# ('/metadata/figures/3666', 'il-observed-summer-temperature', '2b'),
# ('/metadata/figures/3661', 'il-observed-number-of-very-hot-days', '3a'),
# ('/metadata/figures/3662', 'il-observed-number-of-very-warm-nights', '3b'),
# ('/metadata/figures/3660', 'il-observed-number-of-very-cold-nights', '3c'),
# ('/metadata/figures/3832', 'il-observed-annual-precipitation', '3d'),
# ('/metadata/figures/3946', 'il-observed-spring-precipitation', '4a'),
# ('/metadata/figures/3665', 'il-observed-summer-precipitation', '4b'),
# ('/metadata/figures/3659', 'il-observed-number-of-extreme-precipitation-events', '5'),
# ('/metadata/figures/3947', 'il-annual-lake-wide-average-water-levels-for-lake-michigan-huron', '6'),
# # ('', 'il-hours-of-heat-index-over-threshold-values', '7'),
# ('/metadata/figures/3667', 'il-projected-change-in-spring-precipitation', '8')
# ]),
# ('indiana', [
# ('/metadata/figures/3683', 'in-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3685', 'in-observed-spring-temperature', '2a-1'),
# ('/metadata/figures/3687', 'in-observed-summer-temperature', '2a-2'),
# ('/metadata/figures/3681', 'in-observed-number-of-very-hot-days', '2b'),
# ('/metadata/figures/3682', 'in-observed-number-of-very-warm-nights', '2c'),
# ('/metadata/figures/3684', 'in-observed-spring-precipitation', '2d-1'),
# ('/metadata/figures/3686', 'in-observed-summer-precipitation', '2d-2'),
# ('/metadata/figures/3680', 'in-observed-number-of-very-cold-nights', '3'),
# ('/metadata/figures/3678', 'in-observed-annual-precipitation', '4'),
# ('/metadata/figures/3679', 'in-observed-number-of-extreme-precipitation-events', '5'),
# ('/metadata/figures/4142', 'in-annual-lake-wide-average-water-levels-for-lake-michigan-huron', '6'),
# ('/metadata/figures/3688', 'in-projected-change-in-spring-precipitation', '7')
# ]),
# ('iowa', [
# ('/metadata/figures/3700', 'ia-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3706', 'ia-observed-summer-temperature', '2a'),
# ('/metadata/figures/3703', 'ia-observed-number-of-very-hot-days', '2b'),
# ('/metadata/figures/3704', 'ia-observed-number-of-very-warm-nights', '2c'),
# ('/metadata/figures/3702', 'ia-observed-number-of-very-cold-nights', '2d'),
# ('/metadata/figures/3705', 'ia-observed-spring-precipitation', '3a'),
# ('/metadata/figures/3814', 'ia-observed-summer-precipitation', '3b'),
# ('/metadata/figures/3708', 'ia-observed-annual-precipitation', '4'),
# ('/metadata/figures/3701', 'ia-observed-number-of-extreme-precipitation-events', '5'),
# ('/metadata/figures/3707', 'ia-projected-change-in-spring-precipitation', '6')
# ]),
# ('kansas', [
# ('/metadata/figures/3601', 'ks-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3607', 'ks-observed-spring-temperature', '2a'),
# ('/metadata/figures/3609', 'ks-observed-summer-temperature', '2b'),
# ('/metadata/figures/3604', 'ks-observed-number-of-very-warm-nights', '3a'),
# ('/metadata/figures/3603', 'ks-observed-number-of-extremely-hot-days', '3b'),
# ('/metadata/figures/3627', 'ks-observed-annual-precipitation', '3c'),
# ('/metadata/figures/3606', 'ks-observed-spring-precipitation', '3d-1'),
# ('/metadata/figures/3608', 'ks-observed-summer-precipitation', '3d-2'),
# ('/metadata/figures/3602', 'ks-observed-number-of-very-cold-nights', '4'),
# ('/metadata/figures/3605', 'ks-observed-number-of-extreme-precipitation-events', '5'),
# ('/metadata/figures/3904', 'kansas-palmer-drought-severity-index', '6'),
# ('/metadata/figures/3610', 'ks-projected-change-in-summer-precipitation', '7')
# ]),
# ('kentucky', [
# ('/metadata/figures/3782', 'ky-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3778', 'ky-observed-number-of-extremely-hot-days', '2a'),
# ('/metadata/figures/3785', 'ky-observed-winter-temperature', '2b-a'),
# ('/metadata/figures/3784', 'ky-observed-summer-temperature', '2b-b'),
# ('/metadata/figures/3777', 'ky-observed-annual-precipitation', '2c'),
# ('/metadata/figures/3783', 'ky-observed-summer-precipitation', '2d'),
# ('/metadata/figures/3780', 'ky-observed-number-of-very-cold-nights', '3'),
# ('/metadata/figures/3781', 'ky-observed-number-of-very-warm-nights', '4'),
# ('/metadata/figures/3779', 'ky-observed-number-of-extreme-precipitation-events', '5'),
# ('/metadata/figures/3786', 'ky-projected-change-in-spring-precipitation', '6')
# ]),
# ('louisiana', [
# ('/metadata/figures/3793', 'la-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3791', 'la-observed-number-of-very-hot-days', '2a'),
# ('/metadata/figures/3790', 'la-observed-number-of-days-below-freezing', '2b'),
# ('/metadata/figures/3794', 'la-observed-spring-precipitation', '2c-1'),
# ('/metadata/figures/3788', 'la-observed-fall-precipitation', '2c-2'),
# ('/metadata/figures/3789', 'la-observed-number-of-extreme-precipitation-events', '2d'),
# ('/metadata/figures/3792', 'la-observed-number-of-very-warm-nights', '3'),
# ('/metadata/figures/3787', 'la-observed-annual-precipitation', '4'),
# # ('', 'total-hurricane-events-in-louisiana', '5'),
# # ('', 'storm-surge-heights-in-louisiana', '6'),
# ('/metadata/figures/3795', 'la-projected-change-in-summer-precipitation', '7'),
# ('/metadata/figures/3833', 'la-past-and-projected-changes-in-global-sea-level', '8')
# ]),
# ('maine', [
# ('/metadata/figures/3961', 'me-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3969', 'me-observed-winter-temperature', '2a-1'),
# ('/metadata/figures/3968', 'me-observed-summer-temperature', '2a-2'),
# ('/metadata/figures/3966', 'me-observed-number-of-hot-days', '2b'),
# ('/metadata/figures/3964', 'me-observed-annual-precipitation', '2c'),
# # ('', 'me-change-in-precipitation-by-county', '2d'),
# ('/metadata/figures/3967', 'me-observed-number-of-very-cold-nights', '3'),
# ('/metadata/figures/3965', 'me-observed-number-of-extreme-precipitation-events', '4'),
# ('/metadata/figures/3962', 'me-projected-change-in-winter-precipitation', '5'),
# ('/metadata/figures/4112', 'me-past-and-projected-changes-in-global-sea-level', '6')
# ]),
# ('maryland', [
# ('/metadata/figures/3801', 'md-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3799', 'md-observed-number-of-very-hot-days', '2a'),
# ('/metadata/figures/3800', 'md-observed-number-of-very-warm-nights', '2b'),
# ('/metadata/figures/3798', 'md-observed-number-of-very-cold-nights', '2c'),
# ('/metadata/figures/3796', 'md-observed-annual-precipitation', '2d'),
# # ('', 'very-warm-and-extremely-hot-days-in-washington-dc', '3'),
# ('/metadata/figures/3797', 'md-observed-number-of-extreme-precipitation-events', '4'),
# # ('', 'md-projected-change-in-annual-precipitation', '5'),
# # ('', 'observed-and-projected-annual-number-of-tidal-floods-baltimore-md', '6'),
# ('/metadata/figures/4059', 'md-past-and-projected-changes-in-global-sea-level', '7')
# ]),
# ('massachusetts', [
# ('/metadata/figures/4010', 'ma-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/4051', 'ma-observed-number-of-hot-days', '2a'),
# ('/metadata/figures/4054', 'ma-observed-number-of-extreme-precipitation-events', '2b'),
# ('/metadata/figures/4056', 'ma-observed-annual-precipitation', '2c'),
# ('/metadata/figures/4057', 'ma-observed-summer-precipitation', '2d'),
# ('/metadata/figures/4052', 'ma-observed-number-of-warm-nights', '3'),
# ('/metadata/figures/4055', 'ma-observed-number-of-very-cold-nights', '4'),
# ('/metadata/figures/4011', 'ma-projected-change-in-spring-precipitation', '5'),
# # ('', 'observed-and-projected-annual-number-of-tidal-floods-boston-ma', '6'),
# ('/metadata/figures/4058', 'ma-past-and-projected-changes-in-global-sea-level', '7')
# ]),
# ('michigan', [
# ('/metadata/figures/3709', 'mi-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3712', 'mi-observed-number-of-hot-days', '2a'),
# ('/metadata/figures/3714', 'mi-observed-number-of-warm-nights', '2b'),
# ('/metadata/figures/3713', 'mi-observed-number-of-very-cold-nights', '2c'),
# ('/metadata/figures/3710', 'mi-observed-annual-precipitation', '2d'),
# ('/metadata/figures/3711', 'mi-observed-number-of-extreme-precipitation-events', '3'),
# ('/metadata/figures/3955', 'mi-annual-lake-wide-average-water-levels-for-lake-michigan-huron', '4'),
# ('/metadata/figures/3715', 'mi-projected-change-in-winter-precipitation', '5')
# ]),
# ('minnesota', [
# ('/metadata/figures/3611', 'mn-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3613', 'mn-observed-number-of-hot-days', '2a'),
# ('/metadata/figures/3615', 'mn-observed-number-of-very-warm-nights', '2b'),
# ('/metadata/figures/3952', 'mn-observed-number-of-very-cold-days', '2c'),
# ('/metadata/figures/3616', 'mn-observed-number-of-extreme-precipitation-events', '2d'),
# # ('', 'ice-out-dates-on-lake-osakis', '3'),
# ('/metadata/figures/3612', 'mn-observed-annual-precipitation', '4'),#DUPE
# # ('/metadata/figures/3614', 'mn-observed-annual-precipitation', '4'),#DUPE
# # ('', 'number-of-extreme-heat-events-by-county', '5'),
# ('/metadata/figures/3617', 'mn-projected-change-in-spring-precipitation', '6')
# ]),
# ('mississippi', [
# ('/metadata/figures/3948', 'ms-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3856', 'ms-observed-number-of-extremely-hot-days', '2a'),
# ('/metadata/figures/3858', 'ms-observed-number-of-days-below-freezing', '2b'),
# ('/metadata/figures/3855', 'ms-observed-annual-precipitation', '2c'),
# ('/metadata/figures/3860', 'ms-observed-summer-precipitation', '2d'),
# ('/metadata/figures/3859', 'ms-observed-number-of-very-warm-nights', '3'),
# ('/metadata/figures/3861', 'ms-observed-summer-temperature', '4'),
# ('/metadata/figures/3857', 'ms-observed-number-of-extreme-precipitation-events', '5'),
# # ('', 'storm-surge-heights-at-pass-christian', '6'),
# ('/metadata/figures/3862', 'ms-projected-change-in-summer-precipitation', '7'),
# ('/metadata/figures/3877', 'ms-past-and-projected-changes-in-global-sea-level', '8')
# ]),
# ('missouri', [
# ('/metadata/figures/3810', 'mo-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3812', 'mo-observed-summer-temperature', '2a'),
# ('/metadata/figures/3804', 'mo-observed-maximum-summer-temperature', '2b'),
# ('/metadata/figures/3805', 'mo-observed-minimum-summer-temperature', '2c'),
# ('/metadata/figures/3806', 'mo-observed-number-of-extremely-hot-days', '3a'),
# ('/metadata/figures/3808', 'mo-observed-number-of-very-cold-nights', '3b'),
# ('/metadata/figures/3803', 'mo-observed-annual-precipitation', '3c'),
# ('/metadata/figures/3811', 'mo-observed-summer-precipitation', '3d'),
# ('/metadata/figures/3809', 'mo-observed-number-of-very-warm-nights', '4'),
# ('/metadata/figures/3807', 'mo-observed-number-of-extreme-precipitation-events', '5'),
# ('/metadata/figures/3813', 'mo-projected-change-in-spring-precipitation', '6')
# ]),
# ('montana', [
# ('/metadata/figures/3723', 'mt-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3726', 'mt-observed-number-of-very-hot-days', '2a'),
# ('/metadata/figures/3727', 'mt-observed-number-of-warm-nights', '2b'),
# ('/metadata/figures/3724', 'mt-observed-annual-precipitation', '2c'),
# ('/metadata/figures/3731', 'mt-observed-number-of-extreme-precipitation-events', '2d'),
# ('/metadata/figures/3725', 'mt-observed-number-of-very-cold-days', '3'),
# ('/metadata/figures/3729', 'mt-observed-winter-temperature', '4a'),
# ('/metadata/figures/3728', 'mt-observed-summer-temperature', '4b'),
# ('/metadata/figures/3907', 'end-of-season-snow-water-equivalent-depth-at-noisy-basin', '5'),
# ('/metadata/figures/3730', 'mt-projected-change-in-spring-precipitation', '6'),
# ]),
# ('nebraska', [
# ('/metadata/figures/4024', 'ne-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3870', 'ne-observed-winter-temperature', '2a'),
# ('/metadata/figures/3869', 'ne-observed-summer-temperature', '2b'),
# ('/metadata/figures/3864', 'ne-observed-number-of-extremely-hot-days', '3a'),
# ('/metadata/figures/3867', 'ne-observed-number-or-warm-nights', '3b'),
# ('/metadata/figures/3863', 'ne-observed-annual-precipitation', '3c'),
# ('/metadata/figures/3868', 'ne-observed-summer-precipitation', '3d'),
# ('/metadata/figures/3866', 'ne-observed-number-of-very-cold-nights', '4'),
# ('/metadata/figures/3865', 'ne-observed-number-of-extreme-precipitation-events', '5'),
# ('/metadata/figures/3871', 'ne-projected-change-in-winter-precipitation', '6'),
# ]),
# ('nevada', [
# ('/metadata/figures/3949', 'nv-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3873', 'nv-observed-number-of-very-hot-days', '2'),
# ('/metadata/figures/3874', 'nv-observed-number-of-warm-nights', '3'),
# ('/metadata/figures/3872', 'nv-observed-annual-precipitation', '4'),
# ('/metadata/figures/3905', 'nevada-palmer-drought-severity-index', '5'),
# ('/metadata/figures/3891', 'nv-lake-mead-elevation-at-hoover-dam', '6'),
# # ('', 'april-1-snow-water-equivalent-at-mt-rose-nv', '7'),
# # ('', 'lake-tahoe-water-levels', '8'),
# # ('', 'nv-projected-change-in-annual-precipitation', '9')
# # ('/metadata/figures/3892', 'nv-end-of-season-snow-water-equivalent-depth-at-big-creek-summit', '7'), #Not used?
# ]),
# ('new-hampshire', [
# ('/metadata/figures/4062', 'nh-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/4066', 'nh-observed-number-of-hot-days', '2a'),
# ('/metadata/figures/4069', 'nh-observed-number-of-warm-nights', '2b'),
# ('/metadata/figures/4064', 'nh-observed-annual-precipitation', '2c'),
# ('/metadata/figures/4067', 'nh-observed-summer-precipitation', '2d'),
# ('/metadata/figures/4068', 'nh-observed-number-of-very-cold-nights', '3'),
# ('/metadata/figures/4065', 'nh-observed-number-of-extreme-precipitation-events', '4'),
# ('/metadata/figures/4063', 'nh-projected-change-in-winter-precipitation', '5'),
# ('/metadata/figures/4078', 'nh-past-and-projected-changes-in-global-sea-level', '6')
# ]),
# ('new-jersey', [
# ('/metadata/figures/3976', 'nj-observed-and-projected-temperature-change', '1'),
# ('/metadata/figures/3974', 'nj-observed-number-of-very-hot-days', '2a'),
# ('/metadata/figures/3973', 'nj-observed-number-of-very-cold-nights', '2b'),
# ('/metadata/figures/3971', 'nj-observed-annual-precipitation', '2c'),
# ('/metadata/figures/3977', 'nj-observed-summer-precipitation', '2d'),
# ('/metadata/figures/3975', 'nj-observed-number-of-warm-nights', '3'),
# ('/metadata/figures/3972', 'nj-observed-number-of-extreme-precipitation-events', '4'),
# ('/metadata/figures/3978', 'nj-projected-change-in-spring-precipitation', '5'),
# # ('', 'observed-and-projected-annual-number-of-tidal-floods-for-atlantic-city-nj', '6'),
# ('/metadata/figures/3979', 'nj-past-and-projected-changes-in-global-sea-level', '7')
# ]),
('new-mexico', [
('/metadata/figures/3620', 'nm-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3622', 'nm-observed-number-of-extremely-hot-days', '2'),
('/metadata/figures/3624', 'nm-observed-number-of-warm-nights', '3'),
('/metadata/figures/3628', 'nm-observed-number-of-very-cold-nights', '4a'),
('/metadata/figures/3621', 'nm-observed-annual-precipitation', '4b'),
('/metadata/figures/3623', 'nm-observed-number-of-extreme-precipitation-events', '4c'),
('/metadata/figures/3619', 'nm-observed-monsoon-season-precipitation', '4d'),
('/metadata/figures/3618', 'nm-storage-levels-in-the-elephant-butte-reservoir', '5'),
('/metadata/figures/3626', 'nm-projected-change-in-spring-precipitation', '6'),
('/metadata/figures/3625', 'new-mexico-palmer-drought-severity-index', '7')
]),
('new-york', [
('/metadata/figures/3958', 'ny-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3886', 'ny-observed-winter-temperature', '2a-1'),
('/metadata/figures/3885', 'ny-observed-summer-temperature', '2a-2'),
('/metadata/figures/3883', 'ny-observed-number-of-very-hot-days', '2b'),
('/metadata/figures/3884', 'ny-observed-number-of-warm-nights', '2c'),
('/metadata/figures/3880', 'ny-observed-annual-precipitation', '2d'),
('/metadata/figures/3882', 'ny-observed-number-of-very-cold-nights', '3'),
# ('', 'annual-maximum-ice-cover-for-lake-erie-and-lake-ontario', '4'),
('/metadata/figures/3881', 'ny-observed-number-of-extreme-precipitation-events', '5'),
('/metadata/figures/3889', 'ny-past-and-projected-changes-in-global-sea-level', '6'),
# ('', 'observed-and-projected-annual-number-of-tidal-floods-for-battery-park-ny', '7'),
('/metadata/figures/3887', 'ny-projected-change-in-winter-precipitation', '8')
]),
('north-carolina', [
('/metadata/figures/3752', 'nc-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3754', 'nc-observed-winter-temperature', '2a'),
('/metadata/figures/3753', 'nc-observed-summer-temperature', '2b'),
('/metadata/figures/3750', 'nc-observed-number-of-very-hot-days', '3a'),
('/metadata/figures/3748', 'nc-observed-annual-precipitation', '3b'),
('/metadata/figures/3749', 'nc-observed-number-of-extreme-precipitation-events', '3c'),
# ('', 'total-hurricane-events-in-nc', '4'),
('/metadata/figures/3751', 'nc-observed-number-of-very-warm-nights', '4'),
# ('', 'nc-projected-change-in-annual-precipitation', '5'),
('/metadata/figures/3826', 'nc-past-and-projected-changes-in-global-sea-level', '6'),
# ('', 'observed-and-projected-annual-number-of-tidal-floods-for-wilmington-nc', '7')
]),
('north-dakota', [
('/metadata/figures/3652', 'nd-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3654', 'nd-observed-number-of-very-hot-days', '2'),
('/metadata/figures/3656', 'nd-observed-number-of-warm-nights', '3'),
('/metadata/figures/3655', 'nd-observed-number-of-very-cold-days', '4'),
('/metadata/figures/3653', 'nd-observed-annual-precipitation', '5'),
('/metadata/figures/3657', 'nd-observed-number-of-extreme-precipitation-events', '6'),
# ('', 'devils-lake-water-levels', '7'),
('/metadata/figures/3658', 'nd-projected-change-in-winter-precipitation', '8')
]),
('ohio', [
('/metadata/figures/3737', 'oh-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3735', 'oh-observed-number-of-very-hot-days', '2a'),
('/metadata/figures/3734', 'oh-observed-number-of-very-cold-nights', '2b'),
('/metadata/figures/3732', 'oh-observed-annual-precipitation', '2c'),
('/metadata/figures/3739', 'oh-observed-winter-precipitation', '2d-1'),
('/metadata/figures/3738', 'oh-observed-summer-precipitation', '2d-2'),
('/metadata/figures/3736', 'oh-observed-number-of-warm-nights', '3'),
('/metadata/figures/3733', 'oh-observed-number-of-extreme-precipitation-events', '4'),
('/metadata/figures/3740', 'oh-projected-change-in-spring-precipitation', '5')
]),
('oklahoma', [
('/metadata/figures/3959', 'ok-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3900', 'ok-observed-summer-temperature', '2'),
('/metadata/figures/3895', 'ok-observed-number-of-extremely-hot-days', '3a'),
('/metadata/figures/3897', 'ok-observed-number-of-extremely-warm-nights', '3b'),
('/metadata/figures/3894', 'ok-observed-annual-precipitation', '3c'),
('/metadata/figures/3899', 'ok-observed-summer-precipitation', '3d'),
('/metadata/figures/3898', 'ok-observed-number-of-very-cold-nights', '4'),
('/metadata/figures/3896', 'ok-observed-number-of-extreme-precipitation-events', '5'),
('/metadata/figures/3928', 'oklahoma-palmer-drought-severity-index', '6'),
('/metadata/figures/3901', 'ok-projected-change-in-summer-precipitation', '7')
]),
('oregon', [
('/metadata/figures/3746', 'or-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3743', 'or-observed-number-of-extremely-hot-days', '2'),
('/metadata/figures/3745', 'or-observed-number-of-very-cold-nights', '3'),
('/metadata/figures/3742', 'or-observed-number-of-days-below-freezing', '4a'),
('/metadata/figures/3828', 'or-observed-number-of-warm-nights', '4b'),
('/metadata/figures/3741', 'or-observed-annual-precipitation', '4c'),
('/metadata/figures/3744', 'or-observed-number-of-extreme-precipitation-events', '4d'),
# ('', 'april-1-snow-water-equivalent-at-tangent', '5'),
('/metadata/figures/3837', 'end-of-season-snow-water-equivalent-depth-at-mt-hood', '5'),
('/metadata/figures/3747', 'or-projected-change-in-winter-precipitation', '6'),
('/metadata/figures/3838', 'or-past-and-projected-changes-in-global-sea-level', '7')
]),
('pennsylvania', [
('/metadata/figures/3960', 'pa-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3931', 'pa-observed-number-of-hot-days', '2a'),
('/metadata/figures/3932', 'pa-observed-number-of-very-cold-nights', '2b'),
('/metadata/figures/3929', 'pa-observed-annual-precipitation', '2c'),
('/metadata/figures/3930', 'pa-observed-number-of-extreme-precipitation-events', '2d'),
('/metadata/figures/3933', 'pa-observed-number-of-warm-nights', '3'),
('/metadata/figures/3945', 'pa-past-and-projected-changes-in-global-sea-level', '4'),
# ('', 'observed-and-projected-annual-number-of-tidal-floods-for-philadelphia-pa', '5'),
('/metadata/figures/3935', 'pa-projected-change-in-winter-precipitation', '6')
]),
('rhode-island', [
('/metadata/figures/4070', 'ri-observed-and-projected-temperature-change', '1'),
('/metadata/figures/4073', 'ri-observed-number-of-hot-days', '2'),
('/metadata/figures/4075', 'ri-observed-number-of-warm-nights', '3a'),
('/metadata/figures/4072', 'ri-observed-number-of-extreme-precipitation-events', '3b'),
('/metadata/figures/4079', 'ri-observed-annual-precipitation', '3c'),
('/metadata/figures/4080', 'ri-observed-summer-precipitation', '3d'),
('/metadata/figures/4074', 'ri-observed-number-of-very-cold-nights', '4'),
# ('', 'ri-projected-change-in-annual-precipitation', '5'),
# ('', 'observed-and-projected-annual-number-of-tidal-floods-providence-ri', '6'),
('/metadata/figures/4076', 'ri-past-and-projected-changes-in-global-sea-level', '7')
]),
('south-carolina', [
('/metadata/figures/4027', 'sc-observed-and-projected-temperature-change', '1'),
('/metadata/figures/4032', 'sc-observed-number-of-extremely-hot-days', '2a'),
('/metadata/figures/4031', 'sc-observed-number-of-days-below-freezing', '2b'),
('/metadata/figures/4030', 'sc-observed-annual-precipitation', '2c'),
('/metadata/figures/4033', 'sc-observed-number-of-extreme-precipitation-events', '2d'),
('/metadata/figures/4034', 'sc-observed-number-of-very-warm-nights', '3'),
('/metadata/figures/4037', 'sc-past-and-projected-changes-in-global-sea-level', '4'),
# ('', 'sc-projected-change-in-annual-precipitation', '5'),
# ('', 'observed-and-projected-annual-number-of-tidal-floods-charleston-sc', '6')
]),
('south-dakota', [
('/metadata/figures/4013', 'sd-observed-and-projected-temperature-change', '1'),
('/metadata/figures/4023', 'sd-observed-winter-temperature', '2a'),
('/metadata/figures/4022', 'sd-observed-summer-temperature', '2b'),
('/metadata/figures/4017', 'sd-observed-number-of-extremely-hot-days', '3a'),
('/metadata/figures/4020', 'sd-observed-number-or-warm-nights', '3b'),
('/metadata/figures/4016', 'sd-observed-annual-precipitation', '3c'),
('/metadata/figures/4021', 'sd-observed-summer-precipitation', '3d'),
('/metadata/figures/4019', 'sd-observed-number-of-very-cold-days', '4'),
('/metadata/figures/4015', 'sd-annual-snowfall-totals-at-menno', '5'),
('/metadata/figures/4018', 'sd-observed-number-of-extreme-precipitation-events', '6'),
('/metadata/figures/4014', 'sd-projected-change-in-winter-precipitation', '7')
]),
('tennessee', [
('/metadata/figures/3638', 'tn-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3640', 'tn-observed-number-of-extremely-hot-days', '2'),
('/metadata/figures/3641', 'tn-observed-number-of-very-warm-nights', '3'),
('/metadata/figures/3639', 'tn-observed-annual-precipitation', '4'),
('/metadata/figures/3642', 'tn-observed-number-of-extreme-precipitation-events', '5'),
('/metadata/figures/3643', 'tn-observed-summer-precipitation', '6'),
('/metadata/figures/3644', 'tn-projected-change-in-spring-precipitation', '7')
]),
('texas', [
('/metadata/figures/4039', 'tx-observed-and-projected-temperature-change', '1'),
('/metadata/figures/4045', 'tx-observed-number-of-extremely-hot-days', '2'),
('/metadata/figures/4048', 'tx-observed-number-of-very-warm-nights', '3'),
('/metadata/figures/4047', 'tx-observed-number-of-days-below-freezing', '4a'),
('/metadata/figures/4044', 'tx-observed-annual-precipitation', '4b'),
('/metadata/figures/4046', 'tx-observed-number-of-extreme-precipitation-events', '4c'),
# ('', 'total-hurricane-events-in-texas', '4d'),
# ('', 'total-rainfall-amounts-in-may-2015-texas', '5'),
('/metadata/figures/4043', 'texas-palmer-drought-severity-index', '6'),
# ('', 'storm-surge-heights-at-galveston-bay', '7'),
# ('', 'tx-change-in-annual-precipitation', '8'),
('/metadata/figures/4042', 'tx-past-and-projected-changes-in-global-sea-level', '9')
]),
('utah', [
('/metadata/figures/3716', 'ut-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3718', 'ut-observed-number-of-extremely-hot-days', '2'),
('/metadata/figures/3721', 'ut-observed-number-of-very-warm-nights', '3'),
('/metadata/figures/3720', 'ut-observed-number-of-very-cold-nights', '4a'),
('/metadata/figures/3717', 'ut-observed-annual-precipitation', '4b'),
('/metadata/figures/3719', 'ut-observed-number-of-extreme-precipitation-events', '4c'),
('/metadata/figures/3908', 'ut-end-of-season-snow-water-equivalent-depth-at-ben-lomond-peak', '4d'),
('/metadata/figures/3909', 'utah-palmer-drought-severity-index', '5'),
('/metadata/figures/3927', 'ut-water-levels-in-the-great-salt-lake-at-saltair-boat-harbor', '6'),
('/metadata/figures/3722', 'ut-projected-change-in-winter-precipitation', '7'),
]),
('vermont', [
('/metadata/figures/3629', 'vt-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3632', 'vt-observed-number-of-hot-days', '2a'),
('/metadata/figures/3634', 'vt-observed-number-of-very-cold-nights', '2b'),
('/metadata/figures/3630', 'vt-observed-summer-temperature', '2c-a'),
('/metadata/figures/3631', 'vt-observed-winter-temperature', '2c-b'),
('/metadata/figures/3635', 'vt-observed-number-of-extreme-precipitation-events', '2d'),
('/metadata/figures/3633', 'vt-observed-number-of-warm-nights', '3'),
('/metadata/figures/3636', 'vt-observed-annual-precipitation', '4'),
('/metadata/figures/3637', 'vt-projected-change-in-winter-precipitation', '5')
]),
('virginia', [
('/metadata/figures/3980', 'va-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3985', 'va-observed-number-of-very-hot-days', '2a'),
('/metadata/figures/3986', 'va-observed-number-of-very-warm-nights', '2b'),
('/metadata/figures/3982', 'va-observed-annual-precipitation', '2c'),
('/metadata/figures/3987', 'va-observed-summer-precipitation', '2d'),
('/metadata/figures/3984', 'va-observed-number-of-very-cold-nights', '3'),
('/metadata/figures/3988', 'va-observed-summer-temperature', '4'),
('/metadata/figures/3983', 'va-observed-number-of-extreme-precipitation-events', '5'),
# ('', 'va-projected-change-in-annual-precipitation', '6'),
('/metadata/figures/3989', 'va-past-and-projected-changes-in-global-sea-level', '7')
]),
('washington', [
# ('/metadata/figures/3917', 'end-of-season-water-equivalent-depth-at-paradise', 'None'),
('/metadata/figures/4025', 'wa-observed-and-projected-temperature-change', '1'),
# ('', 'wa-observed-number-of-very-hot-days', '2'), #Deprecated
# ('', 'wa-observed-number-of-very-cold-nights', '3'), #Deprecated
# ('', 'wa-observed-number-of-nights-below-freezing', '4a'), #Deprecated
# ('', 'wa-observed-number-of-warm-nights', '4b'), #Deprecated
('/metadata/figures/3910', 'wa-observed-annual-precipitation', '2c'),
# ('', 'wa-observed-number-of-extreme-precipitation-events', '4d'), #Deprecated
('/metadata/figures/4026', 'wa-past-and-projected-changes-in-global-sea-level', '5'),
# ('', 'observed-and-projected-annual-number-of-tidal-floods-for-seattle-wa', '6'),
('/metadata/figures/3916', 'wa-projected-change-in-winter-precipitation', '7')
]),
('west-virginia', [
('/metadata/figures/3991', 'wv-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3996', 'wv-observed-number-of-very-hot-days', '2'),
('/metadata/figures/3997', 'wv-observed-number-of-warm-nights', '3'),
('/metadata/figures/3995', 'wv-observed-number-of-very-cold-nights', '4'),
('/metadata/figures/3993', 'wv-observed-annual-precipitation', '5'),
('/metadata/figures/3994', 'wv-observed-number-of-extreme-precipitation-events', '6'),
# ('', 'wv-projected-change-in-annual-precipitation', '7')
]),
('wisconsin', [
('/metadata/figures/3998', 'wi-observed-and-projected-temperature-change', '1'),
('/metadata/figures/4009', 'wi-observed-winter-temperature', '2a-1'),
('/metadata/figures/4007', 'wi-observed-summer-temperature', '2a-2'),
('/metadata/figures/4004', 'wi-observed-number-of-very-hot-days', '2b'),
('/metadata/figures/4005', 'wi-observed-number-of-warm-nights', '2c'),
('/metadata/figures/4008', 'wi-observed-winter-precipitation', '2d-1'),
('/metadata/figures/4006', 'wi-observed-summer-precipitation', '2d-2'),
('/metadata/figures/4003', 'wi-observed-number-of-very-cold-days', '3'),
('/metadata/figures/4000', 'wi-ice-cover-on-lake-mendota', '4'),
('/metadata/figures/4001', 'wi-observed-annual-precipitation', '5'),
('/metadata/figures/4002', 'wi-observed-number-of-extreme-precipitation-events', '6'),
('/metadata/figures/3956', 'wi-annual-lake-wide-average-water-levels-for-lake-michigan-huron', '7'),
('/metadata/figures/3999', 'wi-projected-change-in-spring-precipitation', '8')
]),
('wyoming', [
('/metadata/figures/3957', 'wy-observed-and-projected-temperature-change', '1'),
('/metadata/figures/3939', 'wy-observed-number-of-very-hot-days', '2'),
('/metadata/figures/3940', 'wy-observed-number-of-warm-nights', '3'),
('/metadata/figures/3942', 'wy-observed-winter-temperature', '4a-1'),
('/metadata/figures/3941', 'wy-observed-summer-temperature', '4a-2'),
('/metadata/figures/3938', 'wy-observed-number-of-very-cold-days', '4b'),
('/metadata/figures/3936', 'wy-observed-annual-precipitation', '4c'),
('/metadata/figures/3937', 'wy-observed-number-of-extreme-precipitation-events', '4d'),
('/metadata/figures/3926', 'end-of-season-snow-water-equivalent-depth-at-lewis-lake-divide', '5'),
('/metadata/figures/3925', 'wy-projected-change-in-spring-precipitation', '6')
])
])
}
|
kenshay/ImageScript
|
refs/heads/master
|
ProgramData/SystemFiles/Python/Lib/site-packages/gitdb/test/test_example.py
|
6
|
# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
#
# This module is part of GitDB and is released under
# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
"""Module with examples from the tutorial section of the docs"""
import os
from gitdb.test.lib import TestBase
from gitdb import IStream
from gitdb.db import LooseObjectDB
from io import BytesIO
class TestExamples(TestBase):
def test_base(self):
ldb = LooseObjectDB(os.path.join(self.gitrepopath, 'objects'))
for sha1 in ldb.sha_iter():
oinfo = ldb.info(sha1)
ostream = ldb.stream(sha1)
assert oinfo[:3] == ostream[:3]
assert len(ostream.read()) == ostream.size
assert ldb.has_object(oinfo.binsha)
# END for each sha in database
# assure we close all files
try:
del(ostream)
del(oinfo)
except UnboundLocalError:
pass
# END ignore exception if there are no loose objects
data = "my data".encode("ascii")
istream = IStream("blob", len(data), BytesIO(data))
# the object does not yet have a sha
assert istream.binsha is None
ldb.store(istream)
# now the sha is set
assert len(istream.binsha) == 20
assert ldb.has_object(istream.binsha)
|
wenhuizhang/neutron
|
refs/heads/master
|
neutron/extensions/securitygroup.py
|
4
|
# Copyright (c) 2012 OpenStack Foundation.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import netaddr
from oslo_config import cfg
from oslo_utils import uuidutils
import six
from neutron.api import extensions
from neutron.api.v2 import attributes as attr
from neutron.api.v2 import base
from neutron.common import constants as const
from neutron.common import exceptions as nexception
from neutron import manager
from neutron import quota
# Security group Exceptions
class SecurityGroupInvalidPortRange(nexception.InvalidInput):
message = _("For TCP/UDP protocols, port_range_min must be "
"<= port_range_max")
class SecurityGroupInvalidPortValue(nexception.InvalidInput):
message = _("Invalid value for port %(port)s")
class SecurityGroupInvalidIcmpValue(nexception.InvalidInput):
message = _("Invalid value for ICMP %(field)s (%(attr)s) "
"%(value)s. It must be 0 to 255.")
class SecurityGroupMissingIcmpType(nexception.InvalidInput):
message = _("ICMP code (port-range-max) %(value)s is provided"
" but ICMP type (port-range-min) is missing.")
class SecurityGroupInUse(nexception.InUse):
message = _("Security Group %(id)s %(reason)s.")
def __init__(self, **kwargs):
if 'reason' not in kwargs:
kwargs['reason'] = _("in use")
super(SecurityGroupInUse, self).__init__(**kwargs)
class SecurityGroupCannotRemoveDefault(nexception.InUse):
message = _("Insufficient rights for removing default security group.")
class SecurityGroupCannotUpdateDefault(nexception.InUse):
message = _("Updating default security group not allowed.")
class SecurityGroupDefaultAlreadyExists(nexception.InUse):
message = _("Default security group already exists.")
class SecurityGroupRuleInvalidProtocol(nexception.InvalidInput):
message = _("Security group rule protocol %(protocol)s not supported. "
"Only protocol values %(values)s and integer representations "
"[0 to 255] are supported.")
class SecurityGroupRulesNotSingleTenant(nexception.InvalidInput):
message = _("Multiple tenant_ids in bulk security group rule create"
" not allowed")
class SecurityGroupRemoteGroupAndRemoteIpPrefix(nexception.InvalidInput):
message = _("Only remote_ip_prefix or remote_group_id may "
"be provided.")
class SecurityGroupProtocolRequiredWithPorts(nexception.InvalidInput):
message = _("Must also specifiy protocol if port range is given.")
class SecurityGroupNotSingleGroupRules(nexception.InvalidInput):
message = _("Only allowed to update rules for "
"one security profile at a time")
class SecurityGroupNotFound(nexception.NotFound):
message = _("Security group %(id)s does not exist")
class SecurityGroupRuleNotFound(nexception.NotFound):
message = _("Security group rule %(id)s does not exist")
class DuplicateSecurityGroupRuleInPost(nexception.InUse):
message = _("Duplicate Security Group Rule in POST.")
class SecurityGroupRuleExists(nexception.InUse):
message = _("Security group rule already exists. Rule id is %(id)s.")
class SecurityGroupRuleInUse(nexception.InUse):
message = _("Security Group Rule %(id)s %(reason)s.")
def __init__(self, **kwargs):
if 'reason' not in kwargs:
kwargs['reason'] = _("in use")
super(SecurityGroupRuleInUse, self).__init__(**kwargs)
class SecurityGroupRuleParameterConflict(nexception.InvalidInput):
message = _("Conflicting value ethertype %(ethertype)s for CIDR %(cidr)s")
class SecurityGroupConflict(nexception.Conflict):
message = _("Error %(reason)s while attempting the operation.")
def convert_protocol(value):
if value is None:
return
try:
val = int(value)
if val >= 0 and val <= 255:
# Set value of protocol number to string due to bug 1381379,
# PostgreSQL fails when it tries to compare integer with string,
# that exists in db.
return str(value)
raise SecurityGroupRuleInvalidProtocol(
protocol=value, values=sg_supported_protocols)
except (ValueError, TypeError):
if value.lower() in sg_supported_protocols:
return value.lower()
raise SecurityGroupRuleInvalidProtocol(
protocol=value, values=sg_supported_protocols)
except AttributeError:
raise SecurityGroupRuleInvalidProtocol(
protocol=value, values=sg_supported_protocols)
def convert_ethertype_to_case_insensitive(value):
if isinstance(value, six.string_types):
for ethertype in sg_supported_ethertypes:
if ethertype.lower() == value.lower():
return ethertype
def convert_validate_port_value(port):
if port is None:
return port
try:
val = int(port)
except (ValueError, TypeError):
raise SecurityGroupInvalidPortValue(port=port)
if val >= 0 and val <= 65535:
return val
else:
raise SecurityGroupInvalidPortValue(port=port)
def convert_to_uuid_list_or_none(value_list):
if value_list is None:
return
for sg_id in value_list:
if not uuidutils.is_uuid_like(sg_id):
msg = _("'%s' is not an integer or uuid") % sg_id
raise nexception.InvalidInput(error_message=msg)
return value_list
def convert_ip_prefix_to_cidr(ip_prefix):
if not ip_prefix:
return
try:
cidr = netaddr.IPNetwork(ip_prefix)
return str(cidr)
except (ValueError, TypeError, netaddr.AddrFormatError):
raise nexception.InvalidCIDR(input=ip_prefix)
def _validate_name_not_default(data, valid_values=None):
if data.lower() == "default":
raise SecurityGroupDefaultAlreadyExists()
attr.validators['type:name_not_default'] = _validate_name_not_default
sg_supported_protocols = [None, const.PROTO_NAME_TCP, const.PROTO_NAME_UDP,
const.PROTO_NAME_ICMP, const.PROTO_NAME_ICMP_V6]
sg_supported_ethertypes = ['IPv4', 'IPv6']
# Attribute Map
RESOURCE_ATTRIBUTE_MAP = {
'security_groups': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'name': {'allow_post': True, 'allow_put': True,
'is_visible': True, 'default': '',
'validate': {'type:name_not_default': attr.NAME_MAX_LEN}},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': attr.DESCRIPTION_MAX_LEN},
'is_visible': True, 'default': ''},
'tenant_id': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'is_visible': True},
'security_group_rules': {'allow_post': False, 'allow_put': False,
'is_visible': True},
},
'security_group_rules': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'security_group_id': {'allow_post': True, 'allow_put': False,
'is_visible': True, 'required_by_policy': True},
'remote_group_id': {'allow_post': True, 'allow_put': False,
'default': None, 'is_visible': True},
'direction': {'allow_post': True, 'allow_put': True,
'is_visible': True,
'validate': {'type:values': ['ingress', 'egress']}},
'protocol': {'allow_post': True, 'allow_put': False,
'is_visible': True, 'default': None,
'convert_to': convert_protocol},
'port_range_min': {'allow_post': True, 'allow_put': False,
'convert_to': convert_validate_port_value,
'default': None, 'is_visible': True},
'port_range_max': {'allow_post': True, 'allow_put': False,
'convert_to': convert_validate_port_value,
'default': None, 'is_visible': True},
'ethertype': {'allow_post': True, 'allow_put': False,
'is_visible': True, 'default': 'IPv4',
'convert_to': convert_ethertype_to_case_insensitive,
'validate': {'type:values': sg_supported_ethertypes}},
'remote_ip_prefix': {'allow_post': True, 'allow_put': False,
'default': None, 'is_visible': True,
'convert_to': convert_ip_prefix_to_cidr},
'tenant_id': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'is_visible': True},
}
}
SECURITYGROUPS = 'security_groups'
EXTENDED_ATTRIBUTES_2_0 = {
'ports': {SECURITYGROUPS: {'allow_post': True,
'allow_put': True,
'is_visible': True,
'convert_to': convert_to_uuid_list_or_none,
'default': attr.ATTR_NOT_SPECIFIED}}}
security_group_quota_opts = [
cfg.IntOpt('quota_security_group',
default=10,
help=_('Number of security groups allowed per tenant. '
'A negative value means unlimited.')),
cfg.IntOpt('quota_security_group_rule',
default=100,
help=_('Number of security rules allowed per tenant. '
'A negative value means unlimited.')),
]
cfg.CONF.register_opts(security_group_quota_opts, 'QUOTAS')
class Securitygroup(extensions.ExtensionDescriptor):
"""Security group extension."""
@classmethod
def get_name(cls):
return "security-group"
@classmethod
def get_alias(cls):
return "security-group"
@classmethod
def get_description(cls):
return "The security groups extension."
@classmethod
def get_updated(cls):
return "2012-10-05T10:00:00-00:00"
@classmethod
def get_resources(cls):
"""Returns Ext Resources."""
my_plurals = [(key, key[:-1]) for key in RESOURCE_ATTRIBUTE_MAP.keys()]
attr.PLURALS.update(dict(my_plurals))
exts = []
plugin = manager.NeutronManager.get_plugin()
for resource_name in ['security_group', 'security_group_rule']:
collection_name = resource_name.replace('_', '-') + "s"
params = RESOURCE_ATTRIBUTE_MAP.get(resource_name + "s", dict())
quota.QUOTAS.register_resource_by_name(resource_name)
controller = base.create_resource(collection_name,
resource_name,
plugin, params, allow_bulk=True,
allow_pagination=True,
allow_sorting=True)
ex = extensions.ResourceExtension(collection_name,
controller,
attr_map=params)
exts.append(ex)
return exts
def get_extended_resources(self, version):
if version == "2.0":
return dict(list(EXTENDED_ATTRIBUTES_2_0.items()) +
list(RESOURCE_ATTRIBUTE_MAP.items()))
else:
return {}
@six.add_metaclass(abc.ABCMeta)
class SecurityGroupPluginBase(object):
@abc.abstractmethod
def create_security_group(self, context, security_group):
pass
@abc.abstractmethod
def update_security_group(self, context, id, security_group):
pass
@abc.abstractmethod
def delete_security_group(self, context, id):
pass
@abc.abstractmethod
def get_security_groups(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
pass
@abc.abstractmethod
def get_security_group(self, context, id, fields=None):
pass
@abc.abstractmethod
def create_security_group_rule(self, context, security_group_rule):
pass
@abc.abstractmethod
def delete_security_group_rule(self, context, id):
pass
@abc.abstractmethod
def get_security_group_rules(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
pass
@abc.abstractmethod
def get_security_group_rule(self, context, id, fields=None):
pass
|
ikaee/bfr-attendant
|
refs/heads/master
|
facerecognitionlibrary/jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/classifier.py
|
4
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classifier class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import metrics as metrics_lib
from tensorflow.contrib.framework import deprecated_arg_values
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.session_bundle import exporter
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
def classification_signature_fn(examples, unused_features, predictions):
"""Creates classification signature from given examples and predictions.
Args:
examples: `Tensor`.
unused_features: `dict` of `Tensor`s.
predictions: `dict` of `Tensor`s.
Returns:
Tuple of default classification signature and empty named signatures.
"""
signature = exporter.classification_signature(
examples,
classes_tensor=predictions[Classifier.CLASS_OUTPUT],
scores_tensor=predictions[Classifier.PROBABILITY_OUTPUT])
return signature, {}
def _get_classifier_metrics(unused_n_classes):
return {
('accuracy', 'classes'): metrics_lib.streaming_accuracy
}
class Classifier(estimator.Estimator):
"""Classifier single output Estimator.
Given logits generating function, provides class / probabilities heads and
functions to work with them.
"""
CLASS_OUTPUT = 'classes'
PROBABILITY_OUTPUT = 'probabilities'
def __init__(self, model_fn, n_classes, model_dir=None, config=None,
params=None, feature_engineering_fn=None):
"""Constructor for Classifier.
Args:
model_fn: (targets, predictions, mode) -> logits, loss, train_op
n_classes: Number of classes
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
config: Configuration object (optional)
params: `dict` of hyper parameters that will be passed into `model_fn`.
feature_engineering_fn: Feature engineering function. Takes features and
targets which are the output of `input_fn` and
returns features and targets which will be fed
into the model.
"""
self._n_classes = n_classes
self._logits_fn = model_fn
if params:
model_fn = self._classifier_model_with_params
else:
model_fn = self._classifier_model
super(Classifier, self).__init__(
model_fn=model_fn, model_dir=model_dir, config=config, params=params,
feature_engineering_fn=feature_engineering_fn)
def evaluate(self,
x=None,
y=None,
input_fn=None,
feed_fn=None,
batch_size=None,
steps=None,
metrics=None,
name=None):
"""Evaluates given model with provided evaluation data.
See superclass Estimator for more details.
Args:
x: features.
y: targets.
input_fn: Input function.
feed_fn: Function creating a feed dict every time it is called.
batch_size: minibatch size to use on the input.
steps: Number of steps for which to evaluate model.
metrics: Dict of metric ops to run. If None, the default metrics are used.
name: Name of the evaluation.
Returns:
Returns `dict` with evaluation results.
"""
metrics = metrics or _get_classifier_metrics(self._n_classes)
return super(Classifier, self).evaluate(x=x,
y=y,
input_fn=input_fn,
batch_size=batch_size,
steps=steps,
metrics=metrics,
name=name)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE, estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict(self, x=None, input_fn=None, batch_size=None, as_iterable=False):
"""Returns predicted classes for given features.
Args:
x: Matrix of shape [n_samples, n_features...]. Can be iterator that
returns arrays of features. The training input samples for fitting the
model. If set, `input_fn` must be `None`.
input_fn: Input function. If set, `x` and 'batch_size' must be `None`.
batch_size: Override default batch size. If set, 'input_fn' must be
'None'.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
A numpy array of predicted classes (or an iterable of predicted classes if
as_iterable is True).
Raises:
ValueError: If x and input_fn are both provided or both `None`.
"""
predictions = super(Classifier, self).predict(
x=x, input_fn=input_fn, batch_size=batch_size, as_iterable=as_iterable,
outputs=[Classifier.CLASS_OUTPUT])
if as_iterable:
return (p[Classifier.CLASS_OUTPUT] for p in predictions)
else:
return predictions[Classifier.CLASS_OUTPUT]
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE, estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict_proba(
self, x=None, input_fn=None, batch_size=None, as_iterable=False):
"""Returns predicted probabilty distributions for given features.
Args:
x: Matrix of shape [n_samples, n_features...]. Can be iterator that
returns arrays of features. The training input samples for fitting the
model. If set, `input_fn` must be `None`.
input_fn: Input function. If set, `x` and 'batch_size' must be `None`.
batch_size: Override default batch size. If set, 'input_fn' must be
'None'.
as_iterable: If True, return an iterable which keeps yielding predictions
for each example until inputs are exhausted. Note: The inputs must
terminate if you want the iterable to terminate (e.g. be sure to pass
num_epochs=1 if you are using something like read_batch_features).
Returns:
A numpy array of predicted probability distributions (or an iterable of
predicted probability distributions if as_iterable is True).
Raises:
ValueError: If x and input_fn are both provided or both `None`.
"""
predictions = super(Classifier, self).predict(
x=x, input_fn=input_fn, batch_size=batch_size, as_iterable=as_iterable,
outputs=[Classifier.PROBABILITY_OUTPUT])
if as_iterable:
return (p[Classifier.PROBABILITY_OUTPUT] for p in predictions)
else:
return predictions[Classifier.PROBABILITY_OUTPUT]
def _classifier_model(self, features, targets, mode):
return self._convert_to_estimator_model_result(
self._logits_fn(features, targets, mode))
def _classifier_model_with_params(self, features, targets, mode, params):
return self._convert_to_estimator_model_result(
self._logits_fn(features, targets, mode, params))
def _convert_to_estimator_model_result(self, logits_fn_result):
logits, loss, train_op = logits_fn_result
return {
Classifier.CLASS_OUTPUT:
math_ops.argmax(logits, len(logits.get_shape()) - 1),
Classifier.PROBABILITY_OUTPUT: nn.softmax(logits)
}, loss, train_op
|
40223134/w16b_test
|
refs/heads/master
|
static/Brython3.1.3-20150514-095342/Lib/ui/__init__.py
|
606
|
from browser import html, document
from .dialog import *
from .progressbar import *
from .slider import *
def add_stylesheet():
_link=html.LINK(Href='/src/Lib/ui/css/smoothness/jquery-ui-1.10.3.custom.min.css')
_link.rel='stylesheet'
document <= _link
|
gptech/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/aos/aos_logical_device_map.py
|
78
|
#!/usr/bin/python
#
# (c) 2017 Apstra Inc, <community@apstra.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aos_logical_device_map
author: Damien Garros (@dgarros)
version_added: "2.3"
short_description: Manage AOS Logical Device Map
description:
- Apstra AOS Logical Device Map module let you manage your Logical Device Map easily. You can create
create and delete Logical Device Map by Name, ID or by using a JSON File. This module
is idempotent and support the I(check) mode. It's using the AOS REST API.
requirements:
- "aos-pyez >= 0.6.0"
options:
session:
description:
- An existing AOS session as obtained by M(aos_login) module.
required: true
name:
description:
- Name of the Logical Device Map to manage.
Only one of I(name), I(id) or I(content) can be set.
id:
description:
- AOS Id of the Logical Device Map to manage (can't be used to create a new Logical Device Map),
Only one of I(name), I(id) or I(content) can be set.
content:
description:
- Datastructure of the Logical Device Map to manage. The data can be in YAML / JSON or
directly a variable. It's the same datastructure that is returned
on success in I(value). Only one of I(name), I(id) or I(content) can be set.
state:
description:
- Indicate what is the expected state of the Logical Device Map (present or not).
default: present
choices: ['present', 'absent']
'''
EXAMPLES = '''
- name: "Create an Logical Device Map with one subnet"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "my-logical-device-map"
state: present
- name: "Create an Logical Device Map with multiple subnets"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "my-other-logical-device-map"
state: present
- name: "Check if an Logical Device Map exist with same subnets by ID"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "45ab26fc-c2ed-4307-b330-0870488fa13e"
state: present
- name: "Delete an Logical Device Map by name"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "my-logical-device-map"
state: absent
- name: "Delete an Logical Device Map by id"
aos_logical_device_map:
session: "{{ aos_session }}"
id: "45ab26fc-c2ed-4307-b330-0870488fa13e"
state: absent
# Save an Logical Device Map to a file
- name: "Access Logical Device Map 1/3"
aos_logical_device_map:
session: "{{ aos_session }}"
name: "my-logical-device-map"
state: present
register: logical_device_map
- name: "Save Logical Device Map into a file in JSON 2/3"
copy:
content: "{{ logical_device_map.value | to_nice_json }}"
dest: logical_device_map_saved.json
- name: "Save Logical Device Map into a file in YAML 3/3"
copy:
content: "{{ logical_device_map.value | to_nice_yaml }}"
dest: logical_device_map_saved.yaml
- name: "Load Logical Device Map from a JSON file"
aos_logical_device_map:
session: "{{ aos_session }}"
content: "{{ lookup('file', 'resources/logical_device_map_saved.json') }}"
state: present
- name: "Load Logical Device Map from a YAML file"
aos_logical_device_map:
session: "{{ aos_session }}"
content: "{{ lookup('file', 'resources/logical_device_map_saved.yaml') }}"
state: present
'''
RETURNS = '''
name:
description: Name of the Logical Device Map
returned: always
type: str
sample: Server-IpAddrs
id:
description: AOS unique ID assigned to the Logical Device Map
returned: always
type: str
sample: fcc4ac1c-e249-4fe7-b458-2138bfb44c06
value:
description: Value of the object as returned by the AOS Server
returned: always
type: dict
sample: {'...'}
'''
import json
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.aos import get_aos_session, find_collection_item, do_load_resource, check_aos_version, content_to_dict
#########################################################
# State Processing
#########################################################
def logical_device_map_absent(module, aos, my_log_dev_map):
margs = module.params
# If the module do not exist, return directly
if my_log_dev_map.exists is False:
module.exit_json(changed=False, name=margs['name'], id='', value={})
# If not in check mode, delete Logical Device Map
if not module.check_mode:
try:
# Need to wait for 1sec before a delete to workaround a current
# limitation in AOS
time.sleep(1)
my_log_dev_map.delete()
except:
module.fail_json(msg="An error occurred, while trying to delete the Logical Device Map")
module.exit_json( changed=True,
name=my_log_dev_map.name,
id=my_log_dev_map.id,
value={} )
def logical_device_map_present(module, aos, my_log_dev_map):
margs = module.params
# if content is defined, create object from Content
if margs['content'] is not None:
if 'display_name' in module.params['content'].keys():
do_load_resource(module, aos.LogicalDeviceMaps, module.params['content']['display_name'])
else:
module.fail_json(msg="Unable to find display_name in 'content', Mandatory")
# if my_log_dev_map doesn't exist already, create a new one
if my_log_dev_map.exists is False and 'content' not in margs.keys():
module.fail_json(msg="'Content' is mandatory for module that don't exist currently")
module.exit_json( changed=False,
name=my_log_dev_map.name,
id=my_log_dev_map.id,
value=my_log_dev_map.value )
#########################################################
# Main Function
#########################################################
def logical_device_map(module):
margs = module.params
try:
aos = get_aos_session(module, margs['session'])
except:
module.fail_json(msg="Unable to login to the AOS server")
item_name = False
item_id = False
if margs['content'] is not None:
content = content_to_dict(module, margs['content'] )
if 'display_name' in content.keys():
item_name = content['display_name']
else:
module.fail_json(msg="Unable to extract 'display_name' from 'content'")
elif margs['name'] is not None:
item_name = margs['name']
elif margs['id'] is not None:
item_id = margs['id']
#----------------------------------------------------
# Find Object if available based on ID or Name
#----------------------------------------------------
try:
my_log_dev_map = find_collection_item(aos.LogicalDeviceMaps,
item_name=item_name,
item_id=item_id)
except:
module.fail_json(msg="Unable to find the Logical Device Map based on name or ID, something went wrong")
#----------------------------------------------------
# Proceed based on State value
#----------------------------------------------------
if margs['state'] == 'absent':
logical_device_map_absent(module, aos, my_log_dev_map)
elif margs['state'] == 'present':
logical_device_map_present(module, aos, my_log_dev_map)
def main():
module = AnsibleModule(
argument_spec=dict(
session=dict(required=True, type="dict"),
name=dict(required=False ),
id=dict(required=False ),
content=dict(required=False, type="json"),
state=dict( required=False,
choices=['present', 'absent'],
default="present")
),
mutually_exclusive = [('name', 'id', 'content')],
required_one_of=[('name', 'id', 'content')],
supports_check_mode=True
)
# Check if aos-pyez is present and match the minimum version
check_aos_version(module, '0.6.0')
logical_device_map(module)
if __name__ == "__main__":
main()
|
etherkit/OpenBeacon2
|
refs/heads/master
|
client/linux-arm/venv/lib/python3.5/site-packages/PyInstaller/hooks/hook-PyQt5.QtSql.py
|
66
|
#-----------------------------------------------------------------------------
# Copyright (c) 2013-2019, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from PyInstaller.utils.hooks.qt import add_qt5_dependencies
hiddenimports, binaries, datas = add_qt5_dependencies(__file__)
|
dylan-reeves/Simple-Plant-MS
|
refs/heads/master
|
sites/views.py
|
1
|
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import user_passes_test
from django.views import generic
from .forms import siteForm
from .models import site
# The method is used by the method decorator to verify that the user has permission
# to access the view
def is_in_multiple_groups(user):
return user.groups.filter(name__in=['superadmin']).exists()
#==============================================================================
#=========================SITE VIEWS===========================================
#==============================================================================
# Default landing page for sites app simply displays clickable list of sites
class IndexView(generic.ListView):
template_name = 'sites/index.html'
context_object_name = 'site_list'
def get_queryset(self):
return site.objects.all()
# check the user is a memeber of the superadmin group and dispatch the
# view
@method_decorator(user_passes_test(is_in_multiple_groups, login_url='/accounts/denied/'))
def dispatch(self, *args, **kwargs):
return super(IndexView, self).dispatch(*args, **kwargs)
#==============================================================================
#======================SITE DETAIL VIEW========================================
#==============================================================================
# Displays all the fields of a single site entry
class DetailView(generic.DetailView):
model = site
template_name = 'sites/details.html'
context_object_name = 'site_details'
# check the user is a memeber of the superadmin group and dispatch the
# view
@method_decorator(user_passes_test(is_in_multiple_groups, login_url='/accounts/denied/'))
def dispatch(self, *args, **kwargs):
return super(DetailView, self).dispatch(*args, **kwargs)
#==============================================================================
#==================CREATE NEW SITE=============================================
#==============================================================================
# Loads and handles the form to create a new site
class CreateView(generic.CreateView):
model = site
template_name = 'sites/create.html'
fields = ['name', 'manager', 'reportGroup']
success_url = '/sites/'
# check the user is a memeber of the superadmin group and dispatch the
# view
@method_decorator(user_passes_test(is_in_multiple_groups, login_url='/accounts/denied/'))
def dispatch(self, *args, **kwargs):
return super(CreateView, self).dispatch(*args, **kwargs)
#==============================================================================
#==========================VIEW TO UPDATE SITES================================
#==============================================================================
# loads and handles updating of sites
class UpdateView(generic.UpdateView):
model = site
fields = ['name', 'manager', 'reportGroup']
template_name = 'sites/update.html'
success_url = '/sites/'
# check the user is a memeber of the superadmin group and dispatch the
# view
@method_decorator(user_passes_test(is_in_multiple_groups, login_url='/accounts/denied/'))
def dispatch(self, *args, **kwargs):
return super(UpdateView, self).dispatch(*args, **kwargs)
#==============================================================================
#=========================VIEW TO DELETE SITES=================================
#==============================================================================
# Displays the site delete confirmation page
class DeleteView(generic.DeleteView):
model = site
success_url = '/sites/'
template_name = 'sites/delete.html'
context_object_name = 'site_details'
# check the user is a memeber of the superadmin group and dispatch the
# view
@method_decorator(user_passes_test(is_in_multiple_groups, login_url='/accounts/denied/'))
def dispatch(self, *args, **kwargs):
return super(DeleteView, self).dispatch(*args, **kwargs)
|
andrewcmyers/tensorflow
|
refs/heads/master
|
tensorflow/contrib/learn/python/learn/datasets/base.py
|
125
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Base utilities for loading datasets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import csv
import os
from os import path
import random
import tempfile
import time
import numpy as np
from six.moves import urllib
from tensorflow.contrib.framework import deprecated
from tensorflow.python.platform import gfile
Dataset = collections.namedtuple('Dataset', ['data', 'target'])
Datasets = collections.namedtuple('Datasets', ['train', 'validation', 'test'])
def load_csv_with_header(filename,
target_dtype,
features_dtype,
target_column=-1):
"""Load dataset from CSV file with a header row."""
with gfile.Open(filename) as csv_file:
data_file = csv.reader(csv_file)
header = next(data_file)
n_samples = int(header[0])
n_features = int(header[1])
data = np.zeros((n_samples, n_features), dtype=features_dtype)
target = np.zeros((n_samples,), dtype=target_dtype)
for i, row in enumerate(data_file):
target[i] = np.asarray(row.pop(target_column), dtype=target_dtype)
data[i] = np.asarray(row, dtype=features_dtype)
return Dataset(data=data, target=target)
def load_csv_without_header(filename,
target_dtype,
features_dtype,
target_column=-1):
"""Load dataset from CSV file without a header row."""
with gfile.Open(filename) as csv_file:
data_file = csv.reader(csv_file)
data, target = [], []
for row in data_file:
target.append(row.pop(target_column))
data.append(np.asarray(row, dtype=features_dtype))
target = np.array(target, dtype=target_dtype)
data = np.array(data)
return Dataset(data=data, target=target)
def shrink_csv(filename, ratio):
"""Create a smaller dataset of only 1/ratio of original data."""
filename_small = filename.replace('.', '_small.')
with gfile.Open(filename_small, 'w') as csv_file_small:
writer = csv.writer(csv_file_small)
with gfile.Open(filename) as csv_file:
reader = csv.reader(csv_file)
i = 0
for row in reader:
if i % ratio == 0:
writer.writerow(row)
i += 1
def load_iris(data_path=None):
"""Load Iris dataset.
Args:
data_path: string, path to iris dataset (optional)
Returns:
Dataset object containing data in-memory.
"""
if data_path is None:
module_path = path.dirname(__file__)
data_path = path.join(module_path, 'data', 'iris.csv')
return load_csv_with_header(
data_path,
target_dtype=np.int,
features_dtype=np.float)
def load_boston(data_path=None):
"""Load Boston housing dataset.
Args:
data_path: string, path to boston dataset (optional)
Returns:
Dataset object containing data in-memory.
"""
if data_path is None:
module_path = path.dirname(__file__)
data_path = path.join(module_path, 'data', 'boston_house_prices.csv')
return load_csv_with_header(
data_path,
target_dtype=np.float,
features_dtype=np.float)
def retry(initial_delay,
max_delay,
factor=2.0,
jitter=0.25,
is_retriable=None):
"""Simple decorator for wrapping retriable functions.
Args:
initial_delay: the initial delay.
factor: each subsequent retry, the delay is multiplied by this value.
(must be >= 1).
jitter: to avoid lockstep, the returned delay is multiplied by a random
number between (1-jitter) and (1+jitter). To add a 20% jitter, set
jitter = 0.2. Must be < 1.
max_delay: the maximum delay allowed (actual max is
max_delay * (1 + jitter).
is_retriable: (optional) a function that takes an Exception as an argument
and returns true if retry should be applied.
"""
if factor < 1:
raise ValueError('factor must be >= 1; was %f' % (factor,))
if jitter >= 1:
raise ValueError('jitter must be < 1; was %f' % (jitter,))
# Generator to compute the individual delays
def delays():
delay = initial_delay
while delay <= max_delay:
yield delay * random.uniform(1 - jitter, 1 + jitter)
delay *= factor
def wrap(fn):
"""Wrapper function factory invoked by decorator magic."""
def wrapped_fn(*args, **kwargs):
"""The actual wrapper function that applies the retry logic."""
for delay in delays():
try:
return fn(*args, **kwargs)
except Exception as e: # pylint: disable=broad-except)
if is_retriable is None:
continue
if is_retriable(e):
time.sleep(delay)
else:
raise
return fn(*args, **kwargs)
return wrapped_fn
return wrap
_RETRIABLE_ERRNOS = {
110, # Connection timed out [socket.py]
}
def _is_retriable(e):
return isinstance(e, IOError) and e.errno in _RETRIABLE_ERRNOS
@retry(initial_delay=1.0, max_delay=16.0, is_retriable=_is_retriable)
def urlretrieve_with_retry(url, filename=None):
return urllib.request.urlretrieve(url, filename)
def maybe_download(filename, work_directory, source_url):
"""Download the data from source url, unless it's already here.
Args:
filename: string, name of the file in the directory.
work_directory: string, path to working directory.
source_url: url to download from if file doesn't exist.
Returns:
Path to resulting file.
"""
if not gfile.Exists(work_directory):
gfile.MakeDirs(work_directory)
filepath = os.path.join(work_directory, filename)
if not gfile.Exists(filepath):
temp_file_name, _ = urlretrieve_with_retry(source_url)
gfile.Copy(temp_file_name, filepath)
with gfile.GFile(filepath) as f:
size = f.size()
print('Successfully downloaded', filename, size, 'bytes.')
return filepath
|
da1z/intellij-community
|
refs/heads/master
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_0/_pkg0_0_0/_pkg0_0_0_0/_pkg0_0_0_0_1/_mod0_0_0_0_1_4.py
|
30
|
name0_0_0_0_1_4_0 = None
name0_0_0_0_1_4_1 = None
name0_0_0_0_1_4_2 = None
name0_0_0_0_1_4_3 = None
name0_0_0_0_1_4_4 = None
|
olivernina/idropout
|
refs/heads/master
|
layer.py
|
1
|
# Copyright (c) 2011, Alex Krizhevsky (akrizhevsky@gmail.com)
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from math import exp
import sys
import ConfigParser as cfg
import os
import numpy as n
import numpy.random as nr
from math import ceil, floor
from ordereddict import OrderedDict
from os import linesep as NL
from options import OptionsParser
import re
class LayerParsingError(Exception):
pass
# A neuron that doesn't take parameters
class NeuronParser:
def __init__(self, type, func_str, uses_acts=True, uses_inputs=True):
self.type = type
self.func_str = func_str
self.uses_acts = uses_acts
self.uses_inputs = uses_inputs
def parse(self, type):
if type == self.type:
return {'type': self.type,
'params': {},
'usesActs': self.uses_acts,
'usesInputs': self.uses_inputs}
return None
# A neuron that takes parameters
class ParamNeuronParser(NeuronParser):
neuron_regex = re.compile(r'^\s*(\w+)\s*\[\s*(\w+(\s*,\w+)*)\s*\]\s*$')
def __init__(self, type, func_str, uses_acts=True, uses_inputs=True):
NeuronParser.__init__(self, type, func_str, uses_acts, uses_inputs)
m = self.neuron_regex.match(type)
self.base_type = m.group(1)
self.param_names = m.group(2).split(',')
assert len(set(self.param_names)) == len(self.param_names)
def parse(self, type):
m = re.match(r'^%s\s*\[([\d,\.\s\-e]*)\]\s*$' % self.base_type, type)
if m:
try:
param_vals = [float(v.strip()) for v in m.group(1).split(',')]
if len(param_vals) == len(self.param_names):
return {'type': self.base_type,
'params': dict(zip(self.param_names, param_vals)),
'usesActs': self.uses_acts,
'usesInputs': self.uses_inputs}
except TypeError:
pass
return None
class AbsTanhNeuronParser(ParamNeuronParser):
def __init__(self):
ParamNeuronParser.__init__(self, 'abstanh[a,b]', 'f(x) = a * |tanh(b * x)|')
def parse(self, type):
dic = ParamNeuronParser.parse(self, type)
# Make b positive, since abs(tanh(bx)) = abs(tanh(-bx)) and the C++ code
# assumes b is positive.
if dic:
dic['params']['b'] = abs(dic['params']['b'])
return dic
# Subclass that throws more convnet-specific exceptions than the default
class MyConfigParser(cfg.SafeConfigParser):
def safe_get(self, section, option, f=cfg.SafeConfigParser.get, typestr=None, default=None):
try:
return f(self, section, option)
except cfg.NoOptionError, e:
if default is not None:
return default
raise LayerParsingError("Layer '%s': required parameter '%s' missing" % (section, option))
except ValueError, e:
if typestr is None:
raise e
raise LayerParsingError("Layer '%s': parameter '%s' must be %s" % (section, option, typestr))
def safe_get_list(self, section, option, f=str, typestr='strings', default=None):
v = self.safe_get(section, option, default=default)
if type(v) == list:
return v
try:
return [f(x.strip()) for x in v.split(',')]
except:
raise LayerParsingError("Layer '%s': parameter '%s' must be ','-delimited list of %s" % (section, option, typestr))
def safe_get_int(self, section, option, default=None):
return self.safe_get(section, option, f=cfg.SafeConfigParser.getint, typestr='int', default=default)
def safe_get_float(self, section, option, default=None):
return self.safe_get(section, option, f=cfg.SafeConfigParser.getfloat, typestr='float', default=default)
def safe_get_bool(self, section, option, default=None):
return self.safe_get(section, option, f=cfg.SafeConfigParser.getboolean, typestr='bool', default=default)
def safe_get_float_list(self, section, option, default=None):
return self.safe_get_list(section, option, float, typestr='floats', default=default)
def safe_get_int_list(self, section, option, default=None):
return self.safe_get_list(section, option, int, typestr='ints', default=default)
def safe_get_bool_list(self, section, option, default=None):
return self.safe_get_list(section, option, lambda x: x.lower() in ('true', '1'), typestr='bools', default=default)
# A class that implements part of the interface of MyConfigParser
class FakeConfigParser(object):
def __init__(self, dic):
self.dic = dic
def safe_get(self, section, option, default=None):
return self.dic[option]
class LayerParser:
def __init__(self):
self.dic = {}
self.set_defaults()
# Post-processing step -- this is called after all layers have been initialized
def optimize(self, layers):
self.dic['actsTarget'] = -1
self.dic['actsGradTarget'] = -1
# Add parameters from layer parameter file
def add_params(self, mcp):
dic, name = self.dic, self.dic['name']
dic['dropout'] = 0.0
if name in mcp.sections():
dic['dropout'] = mcp.safe_get_float(name, 'dropout', default=0.0)
def init(self, dic):
self.dic = dic
return self
def set_defaults(self):
self.dic['outputs'] = 0
self.dic['parser'] = self
self.dic['requiresParams'] = False
# Does this layer use its own activity matrix
# for some purpose other than computing its output?
# Usually, this will only be true for layers that require their
# own activity matrix for gradient computations. For example, layers
# with logistic units must compute the gradient y * (1 - y), where y is
# the activity matrix.
#
# Layers that do not not use their own activity matrix should advertise
# this, since this will enable memory-saving matrix re-use optimizations.
#
# The default value of this property is True, for safety purposes.
# If a layer advertises that it does not use its own activity matrix when
# in fact it does, bad things will happen.
self.dic['usesActs'] = True
# Does this layer use the activity matrices of its input layers
# for some purpose other than computing its output?
#
# Again true by default for safety
self.dic['usesInputs'] = True
# Force this layer to use its own activity gradient matrix,
# instead of borrowing one from one of its inputs.
#
# This should be true for layers where the mapping from output
# gradient to input gradient is non-elementwise.
self.dic['forceOwnActs'] = True
# Does this layer need the gradient at all?
# Should only be true for layers with parameters (weights).
self.dic['gradConsumer'] = False
def parse(self, name, mcp, prev_layers, model=None):
self.prev_layers = prev_layers
self.dic['name'] = name
self.dic['type'] = mcp.safe_get(name, 'type')
return self.dic
def verify_float_range(self, v, param_name, _min, _max):
self.verify_num_range(v, param_name, _min, _max, strconv=lambda x: '%.3f' % x)
def verify_num_range(self, v, param_name, _min, _max, strconv=lambda x:'%d' % x):
if type(v) == list:
for i,vv in enumerate(v):
self._verify_num_range(vv, param_name, _min, _max, i, strconv=strconv)
else:
self._verify_num_range(v, param_name, _min, _max, strconv=strconv)
def _verify_num_range(self, v, param_name, _min, _max, input=-1, strconv=lambda x:'%d' % x):
layer_name = self.dic['name'] if input < 0 else '%s[%d]' % (self.dic['name'], input)
if _min is not None and _max is not None and (v < _min or v > _max):
raise LayerParsingError("Layer '%s': parameter '%s' must be in the range %s-%s" % (layer_name, param_name, strconv(_min), strconv(_max)))
elif _min is not None and v < _min:
raise LayerParsingError("Layer '%s': parameter '%s' must be greater than or equal to %s" % (layer_name, param_name, strconv(_min)))
elif _max is not None and v > _max:
raise LayerParsingError("Layer '%s': parameter '%s' must be smaller than or equal to %s" % (layer_name, param_name, strconv(_max)))
def verify_divisible(self, value, div, value_name, div_name=None, input_idx=0):
layer_name = self.dic['name'] if len(self.dic['inputs']) == 0 else '%s[%d]' % (self.dic['name'], input_idx)
if value % div != 0:
raise LayerParsingError("Layer '%s': parameter '%s' must be divisible by %s" % (layer_name, value_name, str(div) if div_name is None else "'%s'" % div_name))
def verify_str_in(self, value, lst):
if value not in lst:
raise LayerParsingError("Layer '%s': parameter '%s' must be one of %s" % (self.dic['name'], value, ", ".join("'%s'" % s for s in lst)))
def verify_int_in(self, value, lst):
if value not in lst:
raise LayerParsingError("Layer '%s': parameter '%s' must be one of %s" % (self.dic['name'], value, ", ".join("'%d'" % s for s in lst)))
# This looks for neuron=x arguments in various layers, and creates
# separate layer definitions for them.
@staticmethod
def detach_neuron_layers(layers):
layers_new = []
for i, l in enumerate(layers):
layers_new += [l]
if l['type'] != 'neuron' and 'neuron' in l and l['neuron']:
NeuronLayerParser().detach_neuron_layer(i, layers, layers_new)
return layers_new
@staticmethod
def parse_layers(layer_cfg_path, param_cfg_path, model, layers=[]):
try:
if not os.path.exists(layer_cfg_path):
raise LayerParsingError("Layer definition file '%s' does not exist" % layer_cfg_path)
if not os.path.exists(param_cfg_path):
raise LayerParsingError("Layer parameter file '%s' does not exist" % param_cfg_path)
if len(layers) == 0:
mcp = MyConfigParser(dict_type=OrderedDict)
mcp.read([layer_cfg_path])
for name in mcp.sections():
if not mcp.has_option(name, 'type'):
raise LayerParsingError("Layer '%s': no type given" % name)
ltype = mcp.safe_get(name, 'type')
if ltype not in layer_parsers:
raise LayerParsingError("Layer '%s': Unknown layer type: '%s'" % (name, ltype))
layers += [layer_parsers[ltype]().parse(name, mcp, layers, model)]
layers = LayerParser.detach_neuron_layers(layers)
for l in layers:
lp = layer_parsers[l['type']]()
l['parser'].optimize(layers)
del l['parser']
for l in layers:
if not l['type'].startswith('cost.'):
found = max(l['name'] in [layers[n]['name'] for n in l2['inputs']] for l2 in layers if 'inputs' in l2)
if not found:
raise LayerParsingError("Layer '%s' of type '%s' is unused" % (l['name'], l['type']))
mcp = MyConfigParser(dict_type=OrderedDict)
mcp.read([param_cfg_path])
for l in layers:
if not mcp.has_section(l['name']) and l['requiresParams']:
raise LayerParsingError("Layer '%s' of type '%s' requires extra parameters, but none given in file '%s'." % (l['name'], l['type'], param_cfg_path))
lp = layer_parsers[l['type']]().init(l)
lp.add_params(mcp)
lp.dic['conserveMem'] = model.op.get_value('conserve_mem')
except LayerParsingError, e:
print e
sys.exit(1)
return layers
@staticmethod
def register_layer_parser(ltype, cls):
if ltype in layer_parsers:
raise LayerParsingError("Layer type '%s' already registered" % ltype)
layer_parsers[ltype] = cls
# Any layer that takes an input (i.e. non-data layer)
class LayerWithInputParser(LayerParser):
def __init__(self, num_inputs=-1):
LayerParser.__init__(self)
self.num_inputs = num_inputs
def verify_num_params(self, params):
for param in params:
if len(self.dic[param]) != len(self.dic['inputs']):
raise LayerParsingError("Layer '%s': %s list length does not match number of inputs" % (self.dic['name'], param))
def optimize(self, layers):
LayerParser.optimize(self, layers)
dic = self.dic
# Check if I have an input that no one else uses.
if not dic['forceOwnActs']:
for i, inp in enumerate(dic['inputs']):
l = layers[inp]
if l['outputs'] == dic['outputs'] and sum('inputs' in ll and inp in ll['inputs'] for ll in layers) == 1:
# I can share my activity matrix with this layer
# if it does not use its activity matrix, and I
# do not need to remember my inputs.
if not l['usesActs'] and not dic['usesInputs']:
dic['actsTarget'] = i
# print "Layer '%s' sharing activity matrix with layer '%s'" % (dic['name'], l['name'])
# I can share my gradient matrix with this layer.
dic['actsGradTarget'] = i
# print "Layer '%s' sharing activity gradient matrix with layer '%s'" % (dic['name'], l['name'])
def parse(self, name, mcp, prev_layers, model=None):
dic = LayerParser.parse(self, name, mcp, prev_layers, model)
dic['inputs'] = [inp.strip() for inp in mcp.safe_get(name, 'inputs').split(',')]
prev_names = [p['name'] for p in prev_layers]
for inp in dic['inputs']:
if inp not in prev_names:
raise LayerParsingError("Layer '%s': input layer '%s' not defined" % (name, inp))
dic['inputs'] = [prev_names.index(inp) for inp in dic['inputs']]
dic['inputLayers'] = [prev_layers[inp] for inp in dic['inputs']]
for inp in dic['inputs']:
if prev_layers[inp]['outputs'] == 0:
raise LayerParsingError("Layer '%s': input layer '%s' does not produce any output" % (name, prev_names[inp]))
dic['numInputs'] = [prev_layers[i]['outputs'] for i in dic['inputs']]
# Layers can declare a neuron activation function to apply to their output, as a shortcut
# to avoid declaring a separate neuron layer above themselves.
dic['neuron'] = mcp.safe_get(name, 'neuron', default="")
if self.num_inputs > 0 and len(dic['numInputs']) != self.num_inputs:
raise LayerParsingError("Layer '%s': number of inputs must be %d", name, self.num_inputs)
# input_layers = [prev_layers[i] for i in dic['inputs']]
# dic['gradConsumer'] = any(l['gradConsumer'] for l in dic['inputLayers'])
# dic['usesActs'] = dic['gradConsumer'] # A conservative setting by default for layers with input
return dic
def verify_img_size(self):
dic = self.dic
if dic['numInputs'][0] % dic['imgPixels'] != 0 or dic['imgSize'] * dic['imgSize'] != dic['imgPixels']:
raise LayerParsingError("Layer '%s': has %-d dimensional input, not interpretable as %d-channel images" % (dic['name'], dic['numInputs'][0], dic['channels']))
@staticmethod
def grad_consumers_below(dic):
if dic['gradConsumer']:
return True
if 'inputLayers' in dic:
return any(LayerWithInputParser.grad_consumers_below(l) for l in dic['inputLayers'])
def verify_no_grads(self):
if LayerWithInputParser.grad_consumers_below(self.dic):
raise LayerParsingError("Layer '%s': layers of type '%s' cannot propagate gradient and must not be placed over layers with parameters." % (self.dic['name'], self.dic['type']))
class NailbedLayerParser(LayerWithInputParser):
def __init__(self):
LayerWithInputParser.__init__(self, num_inputs=1)
def parse(self, name, mcp, prev_layers, model=None):
dic = LayerWithInputParser.parse(self, name, mcp, prev_layers, model)
dic['forceOwnActs'] = False
dic['usesActs'] = False
dic['usesInputs'] = False
dic['channels'] = mcp.safe_get_int(name, 'channels')
dic['stride'] = mcp.safe_get_int(name, 'stride')
self.verify_num_range(dic['channels'], 'channels', 1, None)
# Computed values
dic['imgPixels'] = dic['numInputs'][0] / dic['channels']
dic['imgSize'] = int(n.sqrt(dic['imgPixels']))
dic['outputsX'] = (dic['imgSize'] + dic['stride'] - 1) / dic['stride']
dic['start'] = (dic['imgSize'] - dic['stride'] * (dic['outputsX'] - 1)) / 2
dic['outputs'] = dic['channels'] * dic['outputsX']**2
self.verify_num_range(dic['outputsX'], 'outputsX', 0, None)
self.verify_img_size()
print "Initialized bed-of-nails layer '%s', producing %dx%d %d-channel output" % (name, dic['outputsX'], dic['outputsX'], dic['channels'])
return dic
class GaussianBlurLayerParser(LayerWithInputParser):
def __init__(self):
LayerWithInputParser.__init__(self, num_inputs=1)
def parse(self, name, mcp, prev_layers, model=None):
dic = LayerWithInputParser.parse(self, name, mcp, prev_layers, model)
dic['forceOwnActs'] = False
dic['usesActs'] = False
dic['usesInputs'] = False
dic['outputs'] = dic['numInputs'][0]
dic['channels'] = mcp.safe_get_int(name, 'channels')
dic['filterSize'] = mcp.safe_get_int(name, 'filterSize')
dic['stdev'] = mcp.safe_get_float(name, 'stdev')
self.verify_num_range(dic['channels'], 'channels', 1, None)
self.verify_int_in(dic['filterSize'], [3, 5, 7, 9])
# Computed values
dic['imgPixels'] = dic['numInputs'][0] / dic['channels']
dic['imgSize'] = int(n.sqrt(dic['imgPixels']))
dic['filter'] = n.array([exp(-(dic['filterSize']/2 - i)**2 / float(2 * dic['stdev']**2))
for i in xrange(dic['filterSize'])], dtype=n.float32).reshape(1, dic['filterSize'])
dic['filter'] /= dic['filter'].sum()
self.verify_img_size()
if dic['filterSize'] > dic['imgSize']:
raise LayerParsingError("Later '%s': filter size (%d) must be smaller than image size (%d)." % (dic['name'], dic['filterSize'], dic['imgSize']))
print "Initialized Gaussian blur layer '%s', producing %dx%d %d-channel output" % (name, dic['imgSize'], dic['imgSize'], dic['channels'])
return dic
class ResizeLayerParser(LayerWithInputParser):
def __init__(self):
LayerWithInputParser.__init__(self, num_inputs=1)
def parse(self, name, mcp, prev_layers, model=None):
dic = LayerWithInputParser.parse(self, name, mcp, prev_layers, model)
dic['forceOwnActs'] = False
dic['usesActs'] = False
dic['usesInputs'] = False
dic['channels'] = mcp.safe_get_int(name, 'channels')
dic['imgPixels'] = dic['numInputs'][0] / dic['channels']
dic['imgSize'] = int(n.sqrt(dic['imgPixels']))
dic['scale'] = mcp.safe_get_float(name, 'scale')
dic['tgtSize'] = int(floor(dic['imgSize'] / dic['scale']))
dic['tgtPixels'] = dic['tgtSize']**2
self.verify_num_range(dic['channels'], 'channels', 1, None)
# Really not recommended to use this for such severe scalings
self.verify_float_range(dic['scale'], 'scale', 0.5, 2)
dic['outputs'] = dic['channels'] * dic['tgtPixels']
self.verify_img_size()
self.verify_no_grads()
print "Initialized resize layer '%s', producing %dx%d %d-channel output" % (name, dic['tgtSize'], dic['tgtSize'], dic['channels'])
return dic
class RandomScaleLayerParser(LayerWithInputParser):
def __init__(self):
LayerWithInputParser.__init__(self, num_inputs=1)
def parse(self, name, mcp, prev_layers, model=None):
dic = LayerWithInputParser.parse(self, name, mcp, prev_layers, model)
dic['forceOwnActs'] = False
dic['usesActs'] = False
dic['usesInputs'] = False
dic['channels'] = mcp.safe_get_int(name, 'channels')
self.verify_num_range(dic['channels'], 'channels', 1, None)
# Computed values
dic['imgPixels'] = dic['numInputs'][0] / dic['channels']
dic['imgSize'] = int(n.sqrt(dic['imgPixels']))
dic['maxScale'] = mcp.safe_get_float(name, 'maxScale')
dic['tgtSize'] = int(floor(dic['imgSize'] / dic['maxScale']))
dic['tgtPixels'] = dic['tgtSize']**2
self.verify_float_range(dic['maxScale'], 'maxScale', 1, 2)
dic['outputs'] = dic['channels'] * dic['tgtPixels']
self.verify_img_size()
self.verify_no_grads()
print "Initialized random scale layer '%s', producing %dx%d %d-channel output" % (name, dic['tgtSize'], dic['tgtSize'], dic['channels'])
return dic
class ColorTransformLayerParser(LayerWithInputParser):
def __init__(self):
LayerWithInputParser.__init__(self, num_inputs=1)
def parse(self, name, mcp, prev_layers, model=None):
dic = LayerWithInputParser.parse(self, name, mcp, prev_layers, model)
dic['forceOwnActs'] = False
dic['usesActs'] = False
dic['usesInputs'] = False
# Computed values
dic['imgPixels'] = dic['numInputs'][0] / 3
dic['imgSize'] = int(n.sqrt(dic['imgPixels']))
dic['channels'] = 3
dic['outputs'] = dic['numInputs'][0]
self.verify_img_size()
self.verify_no_grads()
return dic
class RGBToYUVLayerParser(ColorTransformLayerParser):
def __init__(self):
ColorTransformLayerParser.__init__(self)
def parse(self, name, mcp, prev_layers, model=None):
dic = ColorTransformLayerParser.parse(self, name, mcp, prev_layers, model)
print "Initialized RGB --> YUV layer '%s', producing %dx%d %d-channel output" % (name, dic['imgSize'], dic['imgSize'], dic['channels'])
return dic
class RGBToLABLayerParser(ColorTransformLayerParser):
def __init__(self):
ColorTransformLayerParser.__init__(self)
def parse(self, name, mcp, prev_layers, model=None):
dic = ColorTransformLayerParser.parse(self, name, mcp, prev_layers, model)
dic['center'] = mcp.safe_get_bool(name, 'center', default=False)
print "Initialized RGB --> LAB layer '%s', producing %dx%d %d-channel output" % (name, dic['imgSize'], dic['imgSize'], dic['channels'])
return dic
class NeuronLayerParser(LayerWithInputParser):
def __init__(self):
LayerWithInputParser.__init__(self, num_inputs=1)
@staticmethod
def get_unused_layer_name(layers, wish):
layer_names = set([l['name'] for l in layers])
if wish not in layer_names:
return wish
for i in xrange(1, 100):
name = '%s.%d' % (wish, i)
if name not in layer_names:
return name
raise LayerParsingError("This is insane.")
def parse_neuron(self, neuron_str):
for n in neuron_parsers:
p = n.parse(neuron_str)
if p: # Successfully parsed neuron, return it
self.dic['neuron'] = p
self.dic['usesActs'] = self.dic['neuron']['usesActs']
self.dic['usesInputs'] = self.dic['neuron']['usesInputs']
return
# Could not parse neuron
# Print available neuron types
colnames = ['Neuron type', 'Function']
m = max(len(colnames[0]), OptionsParser._longest_value(neuron_parsers, key=lambda x:x.type)) + 2
ntypes = [OptionsParser._bold(colnames[0].ljust(m))] + [n.type.ljust(m) for n in neuron_parsers]
fnames = [OptionsParser._bold(colnames[1])] + [n.func_str for n in neuron_parsers]
usage_lines = NL.join(ntype + fname for ntype,fname in zip(ntypes, fnames))
raise LayerParsingError("Layer '%s': unable to parse neuron type '%s'. Valid neuron types: %sWhere neurons have parameters, they must be floats." % (self.dic['name'], neuron_str, NL + usage_lines + NL))
def detach_neuron_layer(self, idx, layers, layers_new):
dic = self.dic
self.set_defaults()
dic['name'] = NeuronLayerParser.get_unused_layer_name(layers, '%s_neuron' % layers[idx]['name'])
dic['type'] = 'neuron'
dic['inputs'] = layers[idx]['name']
dic['neuron'] = layers[idx]['neuron']
dic = self.parse(dic['name'], FakeConfigParser(dic), layers_new)
# Link upper layers to this new one
for l in layers[idx+1:]:
if 'inputs' in l:
l['inputs'] = [i + (i >= len(layers_new) - 1) for i in l['inputs']]
if 'weightSourceLayerIndices' in l:
l['weightSourceLayerIndices'] = [i + (i >= len(layers_new)) for i in l['weightSourceLayerIndices']]
layers_new += [dic]
# print "Initialized implicit neuron layer '%s', producing %d outputs" % (dic['name'], dic['outputs'])
def parse(self, name, mcp, prev_layers, model=None):
dic = LayerWithInputParser.parse(self, name, mcp, prev_layers, model)
dic['outputs'] = dic['numInputs'][0]
self.parse_neuron(dic['neuron'])
dic['forceOwnActs'] = False
print "Initialized neuron layer '%s', producing %d outputs" % (name, dic['outputs'])
return dic
class EltwiseSumLayerParser(LayerWithInputParser):
def __init__(self):
LayerWithInputParser.__init__(self)
def parse(self, name, mcp, prev_layers, model):
dic = LayerWithInputParser.parse(self, name, mcp, prev_layers, model)
if len(set(dic['numInputs'])) != 1:
raise LayerParsingError("Layer '%s': all inputs must have the same dimensionality. Got dimensionalities: %s" % (name, ", ".join(str(s) for s in dic['numInputs'])))
dic['outputs'] = dic['numInputs'][0]
dic['usesInputs'] = False
dic['usesActs'] = False
dic['forceOwnActs'] = False
dic['coeffs'] = mcp.safe_get_float_list(name, 'coeffs', default=[1.0] * len(dic['inputs']))
print "Initialized elementwise sum layer '%s', producing %d outputs" % (name, dic['outputs'])
return dic
class EltwiseMaxLayerParser(LayerWithInputParser):
def __init__(self):
LayerWithInputParser.__init__(self)
def parse(self, name, mcp, prev_layers, model):
dic = LayerWithInputParser.parse(self, name, mcp, prev_layers, model)
if len(dic['inputs']) < 2:
raise LayerParsingError("Layer '%s': elementwise max layer must have at least 2 inputs, got %d." % (name, len(dic['inputs'])))
if len(set(dic['numInputs'])) != 1:
raise LayerParsingError("Layer '%s': all inputs must have the same dimensionality. Got dimensionalities: %s" % (name, ", ".join(str(s) for s in dic['numInputs'])))
dic['outputs'] = dic['numInputs'][0]
print "Initialized elementwise max layer '%s', producing %d outputs" % (name, dic['outputs'])
return dic
class WeightLayerParser(LayerWithInputParser):
LAYER_PAT = re.compile(r'^\s*([^\s\[]+)(?:\[(\d+)\])?\s*$') # matches things like layername[5], etc
def __init__(self):
LayerWithInputParser.__init__(self)
@staticmethod
def get_layer_name(name_str):
m = WeightLayerParser.LAYER_PAT.match(name_str)
if not m:
return None
return m.group(1), m.group(2)
def add_params(self, mcp):
LayerWithInputParser.add_params(self, mcp)
dic, name = self.dic, self.dic['name']
dic['epsW'] = mcp.safe_get_float_list(name, 'epsW')
dic['epsB'] = mcp.safe_get_float(name, 'epsB')
dic['momW'] = mcp.safe_get_float_list(name, 'momW')
dic['momB'] = mcp.safe_get_float(name, 'momB')
dic['wc'] = mcp.safe_get_float_list(name, 'wc')
self.verify_num_params(['epsW', 'momW', 'wc'])
dic['gradConsumer'] = dic['epsB'] > 0 or any(w > 0 for w in dic['epsW'])
@staticmethod
def unshare_weights(layer, layers, matrix_idx=None):
def unshare(layer, layers, indices):
for i in indices:
if layer['weightSourceLayerIndices'][i] >= 0:
src_name = layers[layer['weightSourceLayerIndices'][i]]['name']
src_matrix_idx = layer['weightSourceMatrixIndices'][i]
layer['weightSourceLayerIndices'][i] = -1
layer['weightSourceMatrixIndices'][i] = -1
layer['weights'][i] = layer['weights'][i].copy()
layer['weightsInc'][i] = n.zeros_like(layer['weights'][i])
print "Unshared weight matrix %s[%d] from %s[%d]." % (layer['name'], i, src_name, src_matrix_idx)
else:
print "Weight matrix %s[%d] already unshared." % (layer['name'], i)
if 'weightSourceLayerIndices' in layer:
unshare(layer, layers, range(len(layer['inputs'])) if matrix_idx is None else [matrix_idx])
# Load weight/biases initialization module
def call_init_func(self, param_name, shapes, input_idx=-1):
dic = self.dic
func_pat = re.compile('^([^\.]+)\.([^\(\)]+)\s*(?:\(([^,]+(?:,[^,]+)*)\))?$')
m = func_pat.match(dic[param_name])
if not m:
raise LayerParsingError("Layer '%s': '%s' parameter must have format 'moduleName.functionName(param1,param2,...)'; got: %s." % (dic['name'], param_name, dic['initWFunc']))
module, func = m.group(1), m.group(2)
params = m.group(3).split(',') if m.group(3) is not None else []
try:
mod = __import__(module)
return getattr(mod, func)(dic['name'], input_idx, shapes, params=params) if input_idx >= 0 else getattr(mod, func)(dic['name'], shapes, params=params)
except (ImportError, AttributeError, TypeError), e:
raise LayerParsingError("Layer '%s': %s." % (dic['name'], e))
def make_weights(self, initW, rows, cols, order='C'):
dic = self.dic
dic['weights'], dic['weightsInc'] = [], []
if dic['initWFunc']: # Initialize weights from user-supplied python function
# Initialization function is supplied in the format
# module.func
for i in xrange(len(dic['inputs'])):
dic['weights'] += [self.call_init_func('initWFunc', (rows[i], cols[i]), input_idx=i)]
if type(dic['weights'][i]) != n.ndarray:
raise LayerParsingError("Layer '%s[%d]': weight initialization function %s must return numpy.ndarray object. Got: %s." % (dic['name'], i, dic['initWFunc'], type(dic['weights'][i])))
if dic['weights'][i].dtype != n.float32:
raise LayerParsingError("Layer '%s[%d]': weight initialization function %s must weight matrices consisting of single-precision floats. Got: %s." % (dic['name'], i, dic['initWFunc'], dic['weights'][i].dtype))
if dic['weights'][i].shape != (rows[i], cols[i]):
raise LayerParsingError("Layer '%s[%d]': weight matrix returned by weight initialization function %s has wrong shape. Should be: %s; got: %s." % (dic['name'], i, dic['initWFunc'], (rows[i], cols[i]), dic['weights'][i].shape))
# Convert to desired order
dic['weights'][i] = n.require(dic['weights'][i], requirements=order)
dic['weightsInc'] += [n.zeros_like(dic['weights'][i])]
print "Layer '%s[%d]' initialized weight matrices from function %s" % (dic['name'], i, dic['initWFunc'])
else:
for i in xrange(len(dic['inputs'])):
if dic['weightSourceLayerIndices'][i] >= 0: # Shared weight matrix
src_layer = self.prev_layers[dic['weightSourceLayerIndices'][i]] if dic['weightSourceLayerIndices'][i] < len(self.prev_layers) else dic
dic['weights'] += [src_layer['weights'][dic['weightSourceMatrixIndices'][i]]]
dic['weightsInc'] += [src_layer['weightsInc'][dic['weightSourceMatrixIndices'][i]]]
if dic['weights'][i].shape != (rows[i], cols[i]):
raise LayerParsingError("Layer '%s': weight sharing source matrix '%s' has shape %dx%d; should be %dx%d."
% (dic['name'], dic['weightSource'][i], dic['weights'][i].shape[0], dic['weights'][i].shape[1], rows[i], cols[i]))
print "Layer '%s' initialized weight matrix %d from %s" % (dic['name'], i, dic['weightSource'][i])
else:
dic['weights'] += [n.array(initW[i] * nr.randn(rows[i], cols[i]), dtype=n.single, order=order)]
dic['weightsInc'] += [n.zeros_like(dic['weights'][i])]
def make_biases(self, rows, cols, order='C'):
dic = self.dic
if dic['initBFunc']:
dic['biases'] = self.call_init_func('initBFunc', (rows, cols))
if type(dic['biases']) != n.ndarray:
raise LayerParsingError("Layer '%s': bias initialization function %s must return numpy.ndarray object. Got: %s." % (dic['name'], dic['initBFunc'], type(dic['biases'])))
if dic['biases'].dtype != n.float32:
raise LayerParsingError("Layer '%s': bias initialization function %s must return numpy.ndarray object consisting of single-precision floats. Got: %s." % (dic['name'], dic['initBFunc'], dic['biases'].dtype))
if dic['biases'].shape != (rows, cols):
raise LayerParsingError("Layer '%s': bias vector returned by bias initialization function %s has wrong shape. Should be: %s; got: %s." % (dic['name'], dic['initBFunc'], (rows, cols), dic['biases'].shape))
dic['biases'] = n.require(dic['biases'], requirements=order)
print "Layer '%s' initialized bias vector from function %s" % (dic['name'], dic['initBFunc'])
else:
dic['biases'] = dic['initB'] * n.ones((rows, cols), order='C', dtype=n.single)
dic['biasesInc'] = n.zeros_like(dic['biases'])
def parse(self, name, mcp, prev_layers, model):
dic = LayerWithInputParser.parse(self, name, mcp, prev_layers, model)
dic['requiresParams'] = True
dic['gradConsumer'] = True
dic['initW'] = mcp.safe_get_float_list(name, 'initW', default=0.01)
dic['initB'] = mcp.safe_get_float(name, 'initB', default=0)
dic['initWFunc'] = mcp.safe_get(name, 'initWFunc', default="")
dic['initBFunc'] = mcp.safe_get(name, 'initBFunc', default="")
# Find shared weight matrices
dic['weightSource'] = mcp.safe_get_list(name, 'weightSource', default=[''] * len(dic['inputs']))
self.verify_num_params(['initW', 'weightSource'])
prev_names = map(lambda x: x['name'], prev_layers)
dic['weightSourceLayerIndices'] = []
dic['weightSourceMatrixIndices'] = []
for i, src_name in enumerate(dic['weightSource']):
src_layer_idx = src_layer_matrix_idx = -1
if src_name != '':
src_layer_match = WeightLayerParser.get_layer_name(src_name)
if src_layer_match is None:
raise LayerParsingError("Layer '%s': unable to parse weight sharing source '%s'. Format is layer[idx] or just layer, in which case idx=0 is used." % (name, src_name))
src_layer_name = src_layer_match[0]
src_layer_matrix_idx = int(src_layer_match[1]) if src_layer_match[1] is not None else 0
if prev_names.count(src_layer_name) == 0 and src_layer_name != name:
raise LayerParsingError("Layer '%s': weight sharing source layer '%s' does not exist." % (name, src_layer_name))
src_layer_idx = prev_names.index(src_layer_name) if src_layer_name != name else len(prev_names)
src_layer = prev_layers[src_layer_idx] if src_layer_name != name else dic
if src_layer['type'] != dic['type']:
raise LayerParsingError("Layer '%s': weight sharing source layer '%s' is of type '%s'; should be '%s'." % (name, src_layer_name, src_layer['type'], dic['type']))
if src_layer_name != name and len(src_layer['weights']) <= src_layer_matrix_idx:
raise LayerParsingError("Layer '%s': weight sharing source layer '%s' has %d weight matrices, but '%s[%d]' requested." % (name, src_layer_name, len(src_layer['weights']), src_name, src_layer_matrix_idx))
if src_layer_name == name and src_layer_matrix_idx >= i:
raise LayerParsingError("Layer '%s': weight sharing source '%s[%d]' not defined yet." % (name, name, src_layer_matrix_idx))
dic['weightSourceLayerIndices'] += [src_layer_idx]
dic['weightSourceMatrixIndices'] += [src_layer_matrix_idx]
return dic
class FCLayerParser(WeightLayerParser):
def __init__(self):
WeightLayerParser.__init__(self)
def parse(self, name, mcp, prev_layers, model):
dic = WeightLayerParser.parse(self, name, mcp, prev_layers, model)
dic['usesActs'] = False
dic['outputs'] = mcp.safe_get_int(name, 'outputs')
self.verify_num_range(dic['outputs'], 'outputs', 1, None)
self.make_weights(dic['initW'], dic['numInputs'], [dic['outputs']] * len(dic['numInputs']), order='F')
self.make_biases(1, dic['outputs'], order='F')
print "Initialized fully-connected layer '%s', producing %d outputs" % (name, dic['outputs'])
return dic
class FCDropOutLayerParser( FCLayerParser ):
def __init__(self):
FCLayerParser.__init__(self)
def parse(self, name, mcp, prev_layers, model):
dic = FCLayerParser.parse(self, name, mcp, prev_layers, model)
dic['rate'] = mcp.safe_get_float( name, 'rate' )
assert( dic['rate'] >= 0 and dic['rate'] <= 1 )
print "Output Drop rate: ", dic['rate']
return dic
class FCDropConnectLayerParser( FCLayerParser ):
def __init__(self):
FCLayerParser.__init__(self)
def parse(self, name, mcp, prev_layers, model):
dic = FCLayerParser.parse(self, name, mcp, prev_layers, model)
dic['rate'] = mcp.safe_get_float( name, 'rate' )
assert( dic['rate'] >= 0 and dic['rate'] <= 1 )
print "Connection Drop rate: ", dic['rate']
return dic
class FCDropConnectFastLayerParser( FCLayerParser ):
def __init__(self):
FCLayerParser.__init__(self)
def parse(self, name, mcp, prev_layers, model):
dic = FCLayerParser.parse(self, name, mcp, prev_layers, model)
dic['rate'] = mcp.safe_get_float( name, 'rate' )
assert( dic['rate'] >= 0 and dic['rate'] <= 1 )
print "Connection Drop rate(fast): ", dic['rate']
return dic
class LocalLayerParser(WeightLayerParser):
def __init__(self):
WeightLayerParser.__init__(self)
# Convert convolutional layer to unshared, locally-connected layer
@staticmethod
def conv_to_local(layers, idx):
layer = layers[idx]
if layer['type'] == 'conv':
layer['type'] = 'local'
for inp in xrange(len(layer['inputs'])):
src_layer_idx = layer['weightSourceLayerIndices'][inp]
if layer['weightSourceLayerIndices'][inp] >= 0:
src_layer = layers[src_layer_idx]
src_matrix_idx = layer['weightSourceMatrixIndices'][inp]
LocalLayerParser.conv_to_local(layers, src_layer_idx)
for w in ('weights', 'weightsInc'):
layer[w][inp] = src_layer[w][src_matrix_idx]
else:
layer['weights'][inp] = n.require(n.reshape(n.tile(n.reshape(layer['weights'][inp], (1, n.prod(layer['weights'][inp].shape))), (layer['modules'], 1)),
(layer['modules'] * layer['filterChannels'][inp] * layer['filterPixels'][inp], layer['filters'])),
requirements='C')
layer['weightsInc'][inp] = n.zeros_like(layer['weights'][inp])
if layer['sharedBiases']:
layer['biases'] = n.require(n.repeat(layer['biases'], layer['modules'], axis=0), requirements='C')
layer['biasesInc'] = n.zeros_like(layer['biases'])
print "Converted layer '%s' from convolutional to unshared, locally-connected" % layer['name']
# Also call this function on any layers sharing my weights
for i, l in enumerate(layers):
if 'weightSourceLayerIndices' in l and idx in l['weightSourceLayerIndices']:
LocalLayerParser.conv_to_local(layers, i)
return layer
# Returns (groups, filterChannels) array that represents the set
# of image channels to which each group is connected
def gen_rand_conns(self, groups, channels, filterChannels, inputIdx):
dic = self.dic
overSample = groups * filterChannels / channels
filterConns = [x for i in xrange(overSample) for x in nr.permutation(range(channels))]
if dic['initCFunc']: # Initialize connectivity from outside source
filterConns = self.call_init_func('initCFunc', (groups, channels, filterChannels), input_idx=inputIdx)
if len(filterConns) != overSample * channels:
raise LayerParsingError("Layer '%s[%d]': random connectivity initialization function %s must return list of length <groups> * <filterChannels> = %d; got: %d" % (dic['name'], inputIdx, dic['initCFunc'], len(filterConns)))
if any(c not in range(channels) for c in filterConns):
raise LayerParsingError("Layer '%s[%d]': random connectivity initialization function %s must return list of channel indices in the range 0-<channels-1> = 0-%d." % (dic['name'], inputIdx, dic['initCFunc'], channels-1))
# Every "channels" sub-slice should be a permutation of range(channels)
if any(len(set(c)) != len(c) for c in [filterConns[o*channels:(o+1)*channels] for o in xrange(overSample)]):
raise LayerParsingError("Layer '%s[%d]': random connectivity initialization function %s must return list of channel indices such that every non-overlapping sub-list of <channels> = %d elements is a permutation of the integers 0-<channels-1> = 0-%d." % (dic['name'], inputIdx, dic['initCFunc'], channels, channels-1))
elif dic['weightSourceLayerIndices'][inputIdx] >= 0: # Shared weight matrix
src_layer = self.prev_layers[dic['weightSourceLayerIndices'][inputIdx]] if dic['weightSourceLayerIndices'][inputIdx] < len(self.prev_layers) else dic
src_inp = dic['weightSourceMatrixIndices'][inputIdx]
if 'randSparse' not in src_layer or not src_layer['randSparse']:
raise LayerParsingError("Layer '%s[%d]': randSparse is true in this layer but false in weight sharing source layer '%s[%d]'." % (dic['name'], inputIdx, src_layer['name'], src_inp))
if (groups, channels, filterChannels) != (src_layer['groups'][src_inp], src_layer['channels'][src_inp], src_layer['filterChannels'][src_inp]):
raise LayerParsingError("Layer '%s[%d]': groups, channels, filterChannels set to %d, %d, %d, respectively. Does not match setting in weight sharing source layer '%s[%d]': %d, %d, %d." % (dic['name'], inputIdx, groups, channels, filterChannels, src_layer['name'], src_inp, src_layer['groups'][src_inp], src_layer['channels'][src_inp], src_layer['filterChannels'][src_inp]))
filterConns = src_layer['filterConns'][src_inp]
return filterConns
def parse(self, name, mcp, prev_layers, model):
dic = WeightLayerParser.parse(self, name, mcp, prev_layers, model)
dic['requiresParams'] = True
dic['usesActs'] = False
# Supplied values
dic['channels'] = mcp.safe_get_int_list(name, 'channels')
dic['padding'] = mcp.safe_get_int_list(name, 'padding', default=[0]*len(dic['inputs']))
dic['stride'] = mcp.safe_get_int_list(name, 'stride', default=[1]*len(dic['inputs']))
dic['filterSize'] = mcp.safe_get_int_list(name, 'filterSize')
dic['filters'] = mcp.safe_get_int_list(name, 'filters')
dic['groups'] = mcp.safe_get_int_list(name, 'groups', default=[1]*len(dic['inputs']))
dic['randSparse'] = mcp.safe_get_bool_list(name, 'randSparse', default=[False]*len(dic['inputs']))
dic['initW'] = mcp.safe_get_float_list(name, 'initW')
dic['initCFunc'] = mcp.safe_get(name, 'initCFunc', default='')
self.verify_num_params(['channels', 'padding', 'stride', 'filterSize', \
'filters', 'groups', 'randSparse', 'initW'])
self.verify_num_range(dic['stride'], 'stride', 1, None)
self.verify_num_range(dic['filterSize'],'filterSize', 1, None)
self.verify_num_range(dic['padding'], 'padding', 0, None)
self.verify_num_range(dic['channels'], 'channels', 1, None)
self.verify_num_range(dic['groups'], 'groups', 1, None)
# Computed values
dic['imgPixels'] = [numInputs/channels for numInputs,channels in zip(dic['numInputs'], dic['channels'])]
dic['imgSize'] = [int(n.sqrt(imgPixels)) for imgPixels in dic['imgPixels']]
self.verify_num_range(dic['imgSize'], 'imgSize', 1, None)
dic['filters'] = [filters*groups for filters,groups in zip(dic['filters'], dic['groups'])]
dic['filterPixels'] = [filterSize**2 for filterSize in dic['filterSize']]
dic['modulesX'] = [1 + int(ceil((2 * padding + imgSize - filterSize) / float(stride))) for padding,imgSize,filterSize,stride in zip(dic['padding'], dic['imgSize'], dic['filterSize'], dic['stride'])]
dic['filterChannels'] = [channels/groups for channels,groups in zip(dic['channels'], dic['groups'])]
if max(dic['randSparse']): # When randSparse is turned on for any input, filterChannels must be given for all of them
dic['filterChannels'] = mcp.safe_get_int_list(name, 'filterChannels', default=dic['filterChannels'])
self.verify_num_params(['filterChannels'])
if len(set(dic['modulesX'])) != 1 or len(set(dic['filters'])) != 1:
raise LayerParsingError("Layer '%s': all inputs must produce equally-dimensioned output. Dimensions are: %s." % (name, ", ".join("%dx%dx%d" % (filters, modulesX, modulesX) for filters,modulesX in zip(dic['filters'], dic['modulesX']))))
dic['modulesX'] = dic['modulesX'][0]
dic['modules'] = dic['modulesX']**2
dic['filters'] = dic['filters'][0]
dic['outputs'] = dic['modules'] * dic['filters']
dic['filterConns'] = [[]] * len(dic['inputs'])
for i in xrange(len(dic['inputs'])):
if dic['numInputs'][i] % dic['imgPixels'][i] != 0 or dic['imgSize'][i] * dic['imgSize'][i] != dic['imgPixels'][i]:
raise LayerParsingError("Layer '%s[%d]': has %-d dimensional input, not interpretable as square %d-channel images" % (name, i, dic['numInputs'][i], dic['channels'][i]))
if dic['channels'][i] > 3 and dic['channels'][i] % 4 != 0:
raise LayerParsingError("Layer '%s[%d]': number of channels must be smaller than 4 or divisible by 4" % (name, i))
if dic['filterSize'][i] > 2 * dic['padding'][i] + dic['imgSize'][i]:
raise LayerParsingError("Layer '%s[%d]': filter size (%d) greater than image size + 2 * padding (%d)" % (name, i, dic['filterSize'][i], 2 * dic['padding'][i] + dic['imgSize'][i]))
if dic['randSparse'][i]: # Random sparse connectivity requires some extra checks
if dic['groups'][i] == 1:
raise LayerParsingError("Layer '%s[%d]': number of groups must be greater than 1 when using random sparse connectivity" % (name, i))
self.verify_divisible(dic['channels'][i], dic['filterChannels'][i], 'channels', 'filterChannels', input_idx=i)
self.verify_divisible(dic['filterChannels'][i], 4, 'filterChannels', input_idx=i)
self.verify_divisible( dic['groups'][i]*dic['filterChannels'][i], dic['channels'][i], 'groups * filterChannels', 'channels', input_idx=i)
dic['filterConns'][i] = self.gen_rand_conns(dic['groups'][i], dic['channels'][i], dic['filterChannels'][i], i)
else:
if dic['groups'][i] > 1:
self.verify_divisible(dic['channels'][i], 4*dic['groups'][i], 'channels', '4 * groups', input_idx=i)
self.verify_divisible(dic['channels'][i], dic['groups'][i], 'channels', 'groups', input_idx=i)
self.verify_divisible(dic['filters'], 16*dic['groups'][i], 'filters * groups', input_idx=i)
dic['padding'][i] = -dic['padding'][i]
dic['overSample'] = [groups*filterChannels/channels for groups,filterChannels,channels in zip(dic['groups'], dic['filterChannels'], dic['channels'])]
return dic
class ConvLayerParser(LocalLayerParser):
def __init__(self):
LocalLayerParser.__init__(self)
def parse(self, name, mcp, prev_layers, model):
dic = LocalLayerParser.parse(self, name, mcp, prev_layers, model)
dic['partialSum'] = mcp.safe_get_int(name, 'partialSum')
dic['sharedBiases'] = mcp.safe_get_bool(name, 'sharedBiases', default=True)
if dic['partialSum'] != 0 and dic['modules'] % dic['partialSum'] != 0:
raise LayerParsingError("Layer '%s': convolutional layer produces %dx%d=%d outputs per filter, but given partialSum parameter (%d) does not divide this number" % (name, dic['modulesX'], dic['modulesX'], dic['modules'], dic['partialSum']))
num_biases = dic['filters'] if dic['sharedBiases'] else dic['modules']*dic['filters']
eltmult = lambda list1, list2: [l1 * l2 for l1,l2 in zip(list1, list2)]
self.make_weights(dic['initW'], eltmult(dic['filterPixels'], dic['filterChannels']), [dic['filters']] * len(dic['inputs']), order='C')
self.make_biases(num_biases, 1, order='C')
print "Initialized convolutional layer '%s', producing %dx%d %d-channel output" % (name, dic['modulesX'], dic['modulesX'], dic['filters'])
return dic
class LocalUnsharedLayerParser(LocalLayerParser):
def __init__(self):
LocalLayerParser.__init__(self)
def parse(self, name, mcp, prev_layers, model):
dic = LocalLayerParser.parse(self, name, mcp, prev_layers, model)
eltmult = lambda list1, list2: [l1 * l2 for l1,l2 in zip(list1, list2)]
scmult = lambda x, lst: [x * l for l in lst]
self.make_weights(dic['initW'], scmult(dic['modules'], eltmult(dic['filterPixels'], dic['filterChannels'])), [dic['filters']] * len(dic['inputs']), order='C')
self.make_biases(dic['modules'] * dic['filters'], 1, order='C')
print "Initialized locally-connected layer '%s', producing %dx%d %d-channel output" % (name, dic['modulesX'], dic['modulesX'], dic['filters'])
return dic
class DataLayerParser(LayerParser):
def __init__(self):
LayerParser.__init__(self)
def parse(self, name, mcp, prev_layers, model):
dic = LayerParser.parse(self, name, mcp, prev_layers, model)
dic['dataIdx'] = mcp.safe_get_int(name, 'dataIdx')
dic['outputs'] = model.train_data_provider.get_data_dims(idx=dic['dataIdx'])
print "Initialized data layer '%s', producing %d outputs" % (name, dic['outputs'])
return dic
class SoftmaxLayerParser(LayerWithInputParser):
def __init__(self):
LayerWithInputParser.__init__(self, num_inputs=1)
def parse(self, name, mcp, prev_layers, model):
dic = LayerWithInputParser.parse(self, name, mcp, prev_layers, model)
dic['outputs'] = prev_layers[dic['inputs'][0]]['outputs']
print "Initialized softmax layer '%s', producing %d outputs" % (name, dic['outputs'])
return dic
class PoolLayerParser(LayerWithInputParser):
def __init__(self):
LayerWithInputParser.__init__(self, num_inputs=1)
def parse(self, name, mcp, prev_layers, model):
dic = LayerWithInputParser.parse(self, name, mcp, prev_layers, model)
dic['channels'] = mcp.safe_get_int(name, 'channels')
dic['sizeX'] = mcp.safe_get_int(name, 'sizeX')
dic['start'] = mcp.safe_get_int(name, 'start', default=0)
dic['stride'] = mcp.safe_get_int(name, 'stride')
dic['outputsX'] = mcp.safe_get_int(name, 'outputsX', default=0)
dic['pool'] = mcp.safe_get(name, 'pool')
# Avg pooler does not use its acts or inputs
dic['usesActs'] = 'pool' != 'avg'
dic['usesInputs'] = 'pool' != 'avg'
dic['imgPixels'] = dic['numInputs'][0] / dic['channels']
dic['imgSize'] = int(n.sqrt(dic['imgPixels']))
self.verify_num_range(dic['sizeX'], 'sizeX', 1, dic['imgSize'])
self.verify_num_range(dic['stride'], 'stride', 1, dic['sizeX'])
self.verify_num_range(dic['outputsX'], 'outputsX', 0, None)
self.verify_num_range(dic['channels'], 'channels', 1, None)
if LayerWithInputParser.grad_consumers_below(dic):
self.verify_divisible(dic['channels'], 16, 'channels')
self.verify_str_in(dic['pool'], ['max', 'avg'])
self.verify_img_size()
if dic['outputsX'] <= 0:
dic['outputsX'] = int(ceil((dic['imgSize'] - dic['start'] - dic['sizeX']) / float(dic['stride']))) + 1;
dic['outputs'] = dic['outputsX']**2 * dic['channels']
print "Initialized %s-pooling layer '%s', producing %dx%d %d-channel output" % (dic['pool'], name, dic['outputsX'], dic['outputsX'], dic['channels'])
return dic
class NormLayerParser(LayerWithInputParser):
RESPONSE_NORM = 'response'
CONTRAST_NORM = 'contrast'
CROSSMAP_RESPONSE_NORM = 'cross-map response'
def __init__(self, norm_type):
LayerWithInputParser.__init__(self, num_inputs=1)
self.norm_type = norm_type
def add_params(self, mcp):
LayerWithInputParser.add_params(self, mcp)
dic, name = self.dic, self.dic['name']
dic['scale'] = mcp.safe_get_float(name, 'scale')
dic['scale'] /= dic['size'] if self.norm_type == self.CROSSMAP_RESPONSE_NORM else dic['size']**2
dic['pow'] = mcp.safe_get_float(name, 'pow')
def parse(self, name, mcp, prev_layers, model):
dic = LayerWithInputParser.parse(self, name, mcp, prev_layers, model)
dic['requiresParams'] = True
dic['channels'] = mcp.safe_get_int(name, 'channels')
dic['size'] = mcp.safe_get_int(name, 'size')
dic['blocked'] = mcp.safe_get_bool(name, 'blocked', default=False)
dic['imgPixels'] = dic['numInputs'][0] / dic['channels']
dic['imgSize'] = int(n.sqrt(dic['imgPixels']))
# Contrast normalization layer does not use its inputs
dic['usesInputs'] = self.norm_type != self.CONTRAST_NORM
self.verify_num_range(dic['channels'], 'channels', 1, None)
if self.norm_type == self.CROSSMAP_RESPONSE_NORM:
self.verify_num_range(dic['size'], 'size', 2, dic['channels'])
if dic['channels'] % 16 != 0:
raise LayerParsingError("Layer '%s': number of channels must be divisible by 16 when using crossMap" % name)
else:
self.verify_num_range(dic['size'], 'size', 1, dic['imgSize'])
if self.norm_type != self.CROSSMAP_RESPONSE_NORM and dic['channels'] > 3 and dic['channels'] % 4 != 0:
raise LayerParsingError("Layer '%s': number of channels must be smaller than 4 or divisible by 4" % name)
self.verify_img_size()
dic['outputs'] = dic['imgPixels'] * dic['channels']
print "Initialized %s-normalization layer '%s', producing %dx%d %d-channel output" % (self.norm_type, name, dic['imgSize'], dic['imgSize'], dic['channels'])
return dic
class CostParser(LayerWithInputParser):
def __init__(self, num_inputs=-1):
LayerWithInputParser.__init__(self, num_inputs=num_inputs)
def parse(self, name, mcp, prev_layers, model):
dic = LayerWithInputParser.parse(self, name, mcp, prev_layers, model)
dic['requiresParams'] = True
del dic['neuron']
return dic
def add_params(self, mcp):
LayerWithInputParser.add_params(self, mcp)
dic, name = self.dic, self.dic['name']
dic['coeff'] = mcp.safe_get_float(name, 'coeff')
class LogregCostParser(CostParser):
def __init__(self):
CostParser.__init__(self, num_inputs=2)
def parse(self, name, mcp, prev_layers, model):
dic = CostParser.parse(self, name, mcp, prev_layers, model)
if dic['numInputs'][0] != 1: # first input must be labels
raise LayerParsingError("Layer '%s': dimensionality of first input must be 1" % name)
if prev_layers[dic['inputs'][1]]['type'] != 'softmax':
raise LayerParsingError("Layer '%s': second input must be softmax layer" % name)
if dic['numInputs'][1] != model.train_data_provider.get_num_classes():
raise LayerParsingError("Layer '%s': softmax input '%s' must produce %d outputs, because that is the number of classes in the dataset" \
% (name, prev_layers[dic['inputs'][1]]['name'], model.train_data_provider.get_num_classes()))
print "Initialized logistic regression cost '%s'" % name
return dic
class SumOfSquaresCostParser(CostParser):
def __init__(self):
CostParser.__init__(self, num_inputs=1)
def parse(self, name, mcp, prev_layers, model):
dic = CostParser.parse(self, name, mcp, prev_layers, model)
print "Initialized sum-of-squares cost '%s'" % name
return dic
# All the layer parsers
layer_parsers = {'data': lambda : DataLayerParser(),
'fc': lambda : FCLayerParser(),
'fcdropo': lambda : FCDropOutLayerParser(),
'fcdropc': lambda : FCDropConnectLayerParser(),
'fcdropcf': lambda : FCDropConnectFastLayerParser(),
'conv': lambda : ConvLayerParser(),
'local': lambda : LocalUnsharedLayerParser(),
'softmax': lambda : SoftmaxLayerParser(),
'eltsum': lambda : EltwiseSumLayerParser(),
'eltmax': lambda : EltwiseMaxLayerParser(),
'neuron': lambda : NeuronLayerParser(),
'pool': lambda : PoolLayerParser(),
'rnorm': lambda : NormLayerParser(NormLayerParser.RESPONSE_NORM),
'cnorm': lambda : NormLayerParser(NormLayerParser.CONTRAST_NORM),
'cmrnorm': lambda : NormLayerParser(NormLayerParser.CROSSMAP_RESPONSE_NORM),
'nailbed': lambda : NailbedLayerParser(),
'blur': lambda : GaussianBlurLayerParser(),
'resize': lambda : ResizeLayerParser(),
'rgb2yuv': lambda : RGBToYUVLayerParser(),
'rgb2lab': lambda : RGBToLABLayerParser(),
'rscale': lambda : RandomScaleLayerParser(),
'cost.logreg': lambda : LogregCostParser(),
'cost.sum2': lambda : SumOfSquaresCostParser()}
# All the neuron parsers
# This isn't a name --> parser mapping as the layer parsers above because neurons don't have fixed names.
# A user may write tanh[0.5,0.25], etc.
neuron_parsers = sorted([NeuronParser('ident', 'f(x) = x', uses_acts=False, uses_inputs=False),
NeuronParser('logistic', 'f(x) = 1 / (1 + e^-x)', uses_acts=True, uses_inputs=False),
NeuronParser('abs', 'f(x) = |x|', uses_acts=False, uses_inputs=True),
NeuronParser('relu', 'f(x) = max(0, x)', uses_acts=True, uses_inputs=False),
NeuronParser('softrelu', 'f(x) = log(1 + e^x)', uses_acts=True, uses_inputs=False),
NeuronParser('square', 'f(x) = x^2', uses_acts=False, uses_inputs=True),
NeuronParser('sqrt', 'f(x) = sqrt(x)', uses_acts=True, uses_inputs=False),
ParamNeuronParser('tanh[a,b]', 'f(x) = a * tanh(b * x)', uses_acts=True, uses_inputs=False),
ParamNeuronParser('brelu[a]', 'f(x) = min(a, max(0, x))', uses_acts=True, uses_inputs=False),
ParamNeuronParser('linear[a,b]', 'f(x) = a * x + b', uses_acts=True, uses_inputs=False)],
key=lambda x:x.type)
|
azide0x37/modocDB
|
refs/heads/master
|
venv/lib/python2.7/site-packages/setuptools/command/upload_docs.py
|
332
|
# -*- coding: utf-8 -*-
"""upload_docs
Implements a Distutils 'upload_docs' subcommand (upload documentation to
PyPI's pythonhosted.org).
"""
import os
import socket
import zipfile
import tempfile
import sys
import shutil
from base64 import standard_b64encode
from pkg_resources import iter_entry_points
from distutils import log
from distutils.errors import DistutilsOptionError
from distutils.command.upload import upload
from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3
errors = 'surrogateescape' if PY3 else 'strict'
# This is not just a replacement for byte literals
# but works as a general purpose encoder
def b(s, encoding='utf-8'):
if isinstance(s, unicode):
return s.encode(encoding, errors)
return s
class upload_docs(upload):
description = 'Upload documentation to PyPI'
user_options = [
('repository=', 'r',
"url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
('show-response', None,
'display full response text from server'),
('upload-dir=', None, 'directory to upload'),
]
boolean_options = upload.boolean_options
def has_sphinx(self):
if self.upload_dir is None:
for ep in iter_entry_points('distutils.commands', 'build_sphinx'):
return True
sub_commands = [('build_sphinx', has_sphinx)]
def initialize_options(self):
upload.initialize_options(self)
self.upload_dir = None
self.target_dir = None
def finalize_options(self):
upload.finalize_options(self)
if self.upload_dir is None:
if self.has_sphinx():
build_sphinx = self.get_finalized_command('build_sphinx')
self.target_dir = build_sphinx.builder_target_dir
else:
build = self.get_finalized_command('build')
self.target_dir = os.path.join(build.build_base, 'docs')
else:
self.ensure_dirname('upload_dir')
self.target_dir = self.upload_dir
self.announce('Using upload directory %s' % self.target_dir)
def create_zipfile(self, filename):
zip_file = zipfile.ZipFile(filename, "w")
try:
self.mkpath(self.target_dir) # just in case
for root, dirs, files in os.walk(self.target_dir):
if root == self.target_dir and not files:
raise DistutilsOptionError(
"no files found in upload directory '%s'"
% self.target_dir)
for name in files:
full = os.path.join(root, name)
relative = root[len(self.target_dir):].lstrip(os.path.sep)
dest = os.path.join(relative, name)
zip_file.write(full, dest)
finally:
zip_file.close()
def run(self):
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
tmp_dir = tempfile.mkdtemp()
name = self.distribution.metadata.get_name()
zip_file = os.path.join(tmp_dir, "%s.zip" % name)
try:
self.create_zipfile(zip_file)
self.upload_file(zip_file)
finally:
shutil.rmtree(tmp_dir)
def upload_file(self, filename):
f = open(filename, 'rb')
content = f.read()
f.close()
meta = self.distribution.metadata
data = {
':action': 'doc_upload',
'name': meta.get_name(),
'content': (os.path.basename(filename), content),
}
# set up the authentication
credentials = b(self.username + ':' + self.password)
credentials = standard_b64encode(credentials)
if PY3:
credentials = credentials.decode('ascii')
auth = "Basic " + credentials
# Build up the MIME payload for the POST data
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = b('\n--') + b(boundary)
end_boundary = sep_boundary + b('--')
body = []
for key, values in iteritems(data):
title = '\nContent-Disposition: form-data; name="%s"' % key
# handle multiple entries for the same name
if not isinstance(values, list):
values = [values]
for value in values:
if type(value) is tuple:
title += '; filename="%s"' % value[0]
value = value[1]
else:
value = b(value)
body.append(sep_boundary)
body.append(b(title))
body.append(b("\n\n"))
body.append(value)
if value and value[-1:] == b('\r'):
body.append(b('\n')) # write an extra newline (lurve Macs)
body.append(end_boundary)
body.append(b("\n"))
body = b('').join(body)
self.announce("Submitting documentation to %s" % (self.repository),
log.INFO)
# build the Request
# We can't use urllib2 since we need to send the Basic
# auth right with the first request
schema, netloc, url, params, query, fragments = \
urlparse(self.repository)
assert not params and not query and not fragments
if schema == 'http':
conn = httplib.HTTPConnection(netloc)
elif schema == 'https':
conn = httplib.HTTPSConnection(netloc)
else:
raise AssertionError("unsupported schema "+schema)
data = ''
try:
conn.connect()
conn.putrequest("POST", url)
content_type = 'multipart/form-data; boundary=%s' % boundary
conn.putheader('Content-type', content_type)
conn.putheader('Content-length', str(len(body)))
conn.putheader('Authorization', auth)
conn.endheaders()
conn.send(body)
except socket.error:
e = sys.exc_info()[1]
self.announce(str(e), log.ERROR)
return
r = conn.getresponse()
if r.status == 200:
self.announce('Server response (%s): %s' % (r.status, r.reason),
log.INFO)
elif r.status == 301:
location = r.getheader('Location')
if location is None:
location = 'https://pythonhosted.org/%s/' % meta.get_name()
self.announce('Upload successful. Visit %s' % location,
log.INFO)
else:
self.announce('Upload failed (%s): %s' % (r.status, r.reason),
log.ERROR)
if self.show_response:
print('-'*75, r.read(), '-'*75)
|
bnrubin/ubuntu-bots
|
refs/heads/master
|
Webcal/icalendar/prop.py
|
1
|
# -*- coding: latin-1 -*-
"""
This module contains the parser/generators (or coders/encoders if you prefer)
for the classes/datatypes that are used in Icalendar:
###########################################################################
# This module defines these property value data types and property parameters
4.2 Defined property parameters are:
ALTREP, CN, CUTYPE, DELEGATED-FROM, DELEGATED-TO, DIR, ENCODING, FMTTYPE,
FBTYPE, LANGUAGE, MEMBER, PARTSTAT, RANGE, RELATED, RELTYPE, ROLE, RSVP,
SENT-BY, TZID, VALUE
4.3 Defined value data types are:
BINARY, BOOLEAN, CAL-ADDRESS, DATE, DATE-TIME, DURATION, FLOAT, INTEGER,
PERIOD, RECUR, TEXT, TIME, URI, UTC-OFFSET
###########################################################################
iCalendar properties has values. The values are strongly typed. This module
defines these types, calling val.ical() on them, Will render them as defined in
rfc2445.
If you pass any of these classes a Python primitive, you will have an object
that can render itself as iCalendar formatted date.
Property Value Data Types starts with a 'v'. they all have an ical() and
from_ical() method. The ical() method generates a text string in the iCalendar
format. The from_ical() method can parse this format and return a primitive
Python datatype. So it should allways be true that:
x == vDataType.from_ical(VDataType(x).ical())
These types are mainly used for parsing and file generation. But you can set
them directly.
"""
# from python >= 2.3
from datetime import datetime, timedelta, time, date, tzinfo
from types import IntType, StringType, UnicodeType, TupleType, ListType
SequenceTypes = [TupleType, ListType]
import re
import time as _time
# from this package
from icalendar.caselessdict import CaselessDict
from icalendar.parser import Parameters
DATE_PART = r'(\d+)D'
TIME_PART = r'T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?'
DATETIME_PART = '(?:%s)?(?:%s)?' % (DATE_PART, TIME_PART)
WEEKS_PART = r'(\d+)W'
DURATION_REGEX = re.compile(r'([-+]?)P(?:%s|%s)$'
% (WEEKS_PART, DATETIME_PART))
WEEKDAY_RULE = re.compile('(?P<signal>[+-]?)(?P<relative>[\d]?)'
'(?P<weekday>[\w]{2})$')
class vBinary:
"""
Binary property values are base 64 encoded
>>> b = vBinary('This is gibberish')
>>> b.ical()
'VGhpcyBpcyBnaWJiZXJpc2g='
>>> b = vBinary.from_ical('VGhpcyBpcyBnaWJiZXJpc2g=')
>>> b
'This is gibberish'
The roundtrip test
>>> x = 'Binary data æ ø å \x13 \x56'
>>> vBinary(x).ical()
'QmluYXJ5IGRhdGEg5iD4IOUgEyBW'
>>> vBinary.from_ical('QmluYXJ5IGRhdGEg5iD4IOUgEyBW')
'Binary data \\xe6 \\xf8 \\xe5 \\x13 V'
>>> b = vBinary('txt')
>>> b.params
Parameters({'VALUE': 'BINARY', 'ENCODING': 'BASE64'})
"""
def __init__(self, obj):
self.obj = obj
self.params = Parameters(encoding='BASE64', value="BINARY")
def __repr__(self):
return "vBinary(%s)" % str.__repr__(self.obj)
def ical(self):
return self.obj.encode('base-64')[:-1]
def from_ical(ical):
"Parses the data format from ical text format"
try:
return ical.decode('base-64')
except:
raise ValueError, 'Not valid base 64 encoding.'
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vBoolean(int):
"""
Returns specific string according to state
>>> bin = vBoolean(True)
>>> bin.ical()
'TRUE'
>>> bin = vBoolean(0)
>>> bin.ical()
'FALSE'
The roundtrip test
>>> x = True
>>> x == vBoolean.from_ical(vBoolean(x).ical())
True
>>> vBoolean.from_ical('true')
True
"""
def __init__(self, *args, **kwargs):
int.__init__(self, *args, **kwargs)
self.params = Parameters()
def ical(self):
if self:
return 'TRUE'
return 'FALSE'
bool_map = CaselessDict(true=True, false=False)
def from_ical(ical):
"Parses the data format from ical text format"
try:
return vBoolean.bool_map[ical]
except:
raise ValueError, "Expected 'TRUE' or 'FALSE'. Got %s" % ical
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vCalAddress(str):
"""
This just returns an unquoted string
>>> a = vCalAddress('MAILTO:maxm@mxm.dk')
>>> a.params['cn'] = 'Max M'
>>> a.ical()
'MAILTO:maxm@mxm.dk'
>>> str(a)
'MAILTO:maxm@mxm.dk'
>>> a.params
Parameters({'CN': 'Max M'})
>>> vCalAddress.from_ical('MAILTO:maxm@mxm.dk')
'MAILTO:maxm@mxm.dk'
"""
def __init__(self, *args, **kwargs):
str.__init__(self, *args, **kwargs)
self.params = Parameters()
def __repr__(self):
return u"vCalAddress(%s)" % str.__repr__(self)
def ical(self):
return str(self)
def from_ical(ical):
"Parses the data format from ical text format"
try:
return str(ical)
except:
raise ValueError, 'Expected vCalAddress, got: %s' % ical
from_ical = staticmethod(from_ical)
def __str__(self):
return str.__str__(self)
####################################################
# handy tzinfo classes you can use.
ZERO = timedelta(0)
HOUR = timedelta(hours=1)
STDOFFSET = timedelta(seconds = -_time.timezone)
if _time.daylight:
DSTOFFSET = timedelta(seconds = -_time.altzone)
else:
DSTOFFSET = STDOFFSET
DSTDIFF = DSTOFFSET - STDOFFSET
class FixedOffset(tzinfo):
"""Fixed offset in minutes east from UTC."""
def __init__(self, offset, name):
self.__offset = timedelta(minutes = offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return ZERO
class Utc(tzinfo):
"""UTC tzinfo subclass"""
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
UTC = Utc()
class LocalTimezone(tzinfo):
"""
Timezone of the machine where the code is running
"""
def utcoffset(self, dt):
if self._isdst(dt):
return DSTOFFSET
else:
return STDOFFSET
def dst(self, dt):
if self._isdst(dt):
return DSTDIFF
else:
return ZERO
def tzname(self, dt):
return _time.tzname[self._isdst(dt)]
def _isdst(self, dt):
tt = (dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.weekday(), 0, -1)
stamp = _time.mktime(tt)
tt = _time.localtime(stamp)
return tt.tm_isdst > 0
####################################################
class vDatetime:
"""
Render and generates iCalendar datetime format.
Important: if tzinfo is defined it renders itself as "date with utc time"
Meaning that it has a 'Z' appended, and is in absolute time.
>>> d = datetime(2001, 1,1, 12, 30, 0)
>>> dt = vDatetime(d)
>>> dt.ical()
'20010101T123000'
>>> vDatetime.from_ical('20000101T120000')
datetime.datetime(2000, 1, 1, 12, 0)
>>> dutc = datetime(2001, 1,1, 12, 30, 0, tzinfo=UTC)
>>> vDatetime(dutc).ical()
'20010101T123000Z'
>>> vDatetime.from_ical('20010101T000000')
datetime.datetime(2001, 1, 1, 0, 0)
>>> vDatetime.from_ical('20010101T000000A')
Traceback (most recent call last):
...
ValueError: Wrong datetime format: 20010101T000000A
>>> utc = vDatetime.from_ical('20010101T000000Z')
>>> vDatetime(utc).ical()
'20010101T000000Z'
"""
def __init__(self, dt):
self.dt = dt
self.params = Parameters()
def ical(self):
if self.dt.tzinfo:
offset = self.dt.tzinfo.utcoffset(datetime.now())
utc_time = self.dt - self.dt.tzinfo.utcoffset(datetime.now())
return utc_time.strftime("%Y%m%dT%H%M%SZ")
return self.dt.strftime("%Y%m%dT%H%M%S")
def from_ical(ical):
"Parses the data format from ical text format"
try:
timetuple = map(int, ((
ical[:4], # year
ical[4:6], # month
ical[6:8], # day
ical[9:11], # hour
ical[11:13], # minute
ical[13:15], # second
)))
if not ical[15:]:
return datetime(*timetuple)
elif ical[15:16] == 'Z':
timetuple += [0, UTC]
return datetime(*timetuple)
else:
raise ValueError, ical
except:
raise ValueError, 'Wrong datetime format: %s' % ical
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vDate:
"""
Render and generates iCalendar date format.
>>> d = date(2001, 1,1)
>>> vDate(d).ical()
'20010101'
>>> vDate.from_ical('20010102')
datetime.date(2001, 1, 2)
>>> vDate('d').ical()
Traceback (most recent call last):
...
ValueError: Value MUST be a date instance
"""
def __init__(self, dt):
if not isinstance(dt, date):
raise ValueError('Value MUST be a date instance')
self.dt = dt
self.params = Parameters()
def ical(self):
return self.dt.strftime("%Y%m%d")
def from_ical(ical):
"Parses the data format from ical text format"
try:
timetuple = map(int, ((
ical[:4], # year
ical[4:6], # month
ical[6:8], # day
)))
return date(*timetuple)
except:
raise ValueError, 'Wrong date format %s' % ical
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vDuration:
"""
Subclass of timedelta that renders itself in the iCalendar DURATION format.
>>> vDuration(timedelta(11)).ical()
'P11D'
>>> vDuration(timedelta(-14)).ical()
'-P14D'
>>> vDuration(timedelta(1, 7384)).ical()
'P1DT2H3M4S'
>>> vDuration(timedelta(1, 7380)).ical()
'P1DT2H3M'
>>> vDuration(timedelta(1, 7200)).ical()
'P1DT2H'
>>> vDuration(timedelta(0, 7200)).ical()
'PT2H'
>>> vDuration(timedelta(0, 7384)).ical()
'PT2H3M4S'
>>> vDuration(timedelta(0, 184)).ical()
'PT3M4S'
>>> vDuration(timedelta(0, 22)).ical()
'PT22S'
>>> vDuration(timedelta(0, 3622)).ical()
'PT1H0M22S'
>>> vDuration(timedelta(days=1, hours=5)).ical()
'P1DT5H'
>>> vDuration(timedelta(hours=-5)).ical()
'-PT5H'
>>> vDuration(timedelta(days=-1, hours=-5)).ical()
'-P1DT5H'
How does the parsing work?
>>> vDuration.from_ical('PT1H0M22S')
datetime.timedelta(0, 3622)
>>> vDuration.from_ical('kox')
Traceback (most recent call last):
...
ValueError: Invalid iCalendar duration: kox
>>> vDuration.from_ical('-P14D')
datetime.timedelta(-14)
>>> vDuration(11)
Traceback (most recent call last):
...
ValueError: Value MUST be a timedelta instance
"""
def __init__(self, td):
if not isinstance(td, timedelta):
raise ValueError('Value MUST be a timedelta instance')
self.td = td
self.params = Parameters()
def ical(self):
sign = ""
if self.td.days < 0:
sign = "-"
self.td = -self.td
timepart = ""
if self.td.seconds:
timepart = "T"
hours = self.td.seconds // 3600
minutes = self.td.seconds % 3600 // 60
seconds = self.td.seconds % 60
if hours:
timepart += "%dH" % hours
if minutes or (hours and seconds):
timepart += "%dM" % minutes
if seconds:
timepart += "%dS" % seconds
if self.td.days == 0 and timepart:
return "%sP%s" % (sign, timepart)
else:
return "%sP%dD%s" % (sign, abs(self.td.days), timepart)
def from_ical(ical):
"""
Parses the data format from ical text format.
"""
try:
match = DURATION_REGEX.match(ical)
sign, weeks, days, hours, minutes, seconds = match.groups()
if weeks:
value = timedelta(weeks=int(weeks))
else:
value = timedelta(days=int(days or 0),
hours=int(hours or 0),
minutes=int(minutes or 0),
seconds=int(seconds or 0))
if sign == '-':
value = -value
return value
except:
raise ValueError('Invalid iCalendar duration: %s' % ical)
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vFloat(float):
"""
Just a float.
>>> f = vFloat(1.0)
>>> f.ical()
'1.0'
>>> vFloat.from_ical('42')
42.0
>>> vFloat(42).ical()
'42.0'
"""
def __init__(self, *args, **kwargs):
float.__init__(self, *args, **kwargs)
self.params = Parameters()
def ical(self):
return str(self)
def from_ical(ical):
"Parses the data format from ical text format"
try:
return float(ical)
except:
raise ValueError, 'Expected float value, got: %s' % ical
from_ical = staticmethod(from_ical)
class vInt(int):
"""
Just an int.
>>> f = vInt(42)
>>> f.ical()
'42'
>>> vInt.from_ical('13')
13
>>> vInt.from_ical('1s3')
Traceback (most recent call last):
...
ValueError: Expected int, got: 1s3
"""
def __init__(self, *args, **kwargs):
int.__init__(self, *args, **kwargs)
self.params = Parameters()
def ical(self):
return str(self)
def from_ical(ical):
"Parses the data format from ical text format"
try:
return int(ical)
except:
raise ValueError, 'Expected int, got: %s' % ical
from_ical = staticmethod(from_ical)
class vDDDTypes:
"""
A combined Datetime, Date or Duration parser/generator. Their format cannot
be confused, and often values can be of either types. So this is practical.
>>> d = vDDDTypes.from_ical('20010101T123000')
>>> type(d)
<type 'datetime.datetime'>
>>> repr(vDDDTypes.from_ical('20010101T123000Z'))[:65]
'datetime.datetime(2001, 1, 1, 12, 30, tzinfo=<icalendar.prop.Utc '
>>> d = vDDDTypes.from_ical('20010101')
>>> type(d)
<type 'datetime.date'>
>>> vDDDTypes.from_ical('P31D')
datetime.timedelta(31)
>>> vDDDTypes.from_ical('-P31D')
datetime.timedelta(-31)
Bad input
>>> vDDDTypes(42)
Traceback (most recent call last):
...
ValueError: You must use datetime, date or timedelta
"""
def __init__(self, dt):
"Returns vDate from"
wrong_type_used = 1
for typ in (datetime, date, timedelta):
if isinstance(dt, typ):
wrong_type_used = 0
if wrong_type_used:
raise ValueError ('You must use datetime, date or timedelta')
self.dt = dt
def ical(self):
dt = self.dt
if isinstance(dt, datetime):
return vDatetime(dt).ical()
elif isinstance(dt, date):
return vDate(dt).ical()
elif isinstance(dt, timedelta):
return vDuration(dt).ical()
else:
raise ValueEror ('Unknown date type')
def from_ical(ical):
"Parses the data format from ical text format"
u = ical.upper()
if u.startswith('-P') or u.startswith('P'):
return vDuration.from_ical(ical)
try:
return vDatetime.from_ical(ical)
except:
return vDate.from_ical(ical)
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vDDDLists:
"""
A list of vDDDTypes values.
>>> dt_list = vDDDLists.from_ical('19960402T010000Z')
>>> type(dt_list)
<type 'list'>
>>> len(dt_list)
1
>>> type(dt_list[0])
<type 'datetime.datetime'>
>>> str(dt_list[0])
'1996-04-02 01:00:00+00:00'
>>> dt_list = vDDDLists.from_ical('19960402T010000Z,19960403T010000Z,19960404T010000Z')
>>> len(dt_list)
3
>>> str(dt_list[0])
'1996-04-02 01:00:00+00:00'
>>> str(dt_list[2])
'1996-04-04 01:00:00+00:00'
>>> dt_list = vDDDLists('19960402T010000Z')
Traceback (most recent call last):
...
ValueError: Value MUST be a list (of date instances)
>>> dt_list = vDDDLists([])
>>> str(dt_list)
''
>>> dt_list = vDDDLists([datetime(2000,1,1)])
>>> str(dt_list)
'20000101T000000'
>>> dt_list = vDDDLists([datetime(2000,1,1), datetime(2000,11,11)])
>>> str(dt_list)
'20000101T000000,20001111T000000'
"""
def __init__(self, dt_list):
if not isinstance(dt_list, list):
raise ValueError('Value MUST be a list (of date instances)')
vDDD = []
for dt in dt_list:
vDDD.append(vDDDTypes(dt))
self.dts = vDDD
def ical(self):
'''
Generates the text string in the iCalendar format.
'''
dts_ical = [dt.ical() for dt in self.dts]
return ",".join(dts_ical)
def from_ical(ical):
'''
Parses the list of data formats from ical text format.
@param ical: ical text format
'''
out = []
ical_dates = ical.split(",")
for ical_dt in ical_dates:
out.append(vDDDTypes.from_ical(ical_dt))
return out
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vPeriod:
"""
A precise period of time.
One day in exact datetimes
>>> per = (datetime(2000,1,1), datetime(2000,1,2))
>>> p = vPeriod(per)
>>> p.ical()
'20000101T000000/20000102T000000'
>>> per = (datetime(2000,1,1), timedelta(days=31))
>>> p = vPeriod(per)
>>> p.ical()
'20000101T000000/P31D'
Roundtrip
>>> p = vPeriod.from_ical('20000101T000000/20000102T000000')
>>> p
(datetime.datetime(2000, 1, 1, 0, 0), datetime.datetime(2000, 1, 2, 0, 0))
>>> vPeriod(p).ical()
'20000101T000000/20000102T000000'
>>> vPeriod.from_ical('20000101T000000/P31D')
(datetime.datetime(2000, 1, 1, 0, 0), datetime.timedelta(31))
Roundtrip with absolute time
>>> p = vPeriod.from_ical('20000101T000000Z/20000102T000000Z')
>>> vPeriod(p).ical()
'20000101T000000Z/20000102T000000Z'
And an error
>>> vPeriod.from_ical('20000101T000000/Psd31D')
Traceback (most recent call last):
...
ValueError: Expected period format, got: 20000101T000000/Psd31D
Utc datetime
>>> da_tz = FixedOffset(+1.0, 'da_DK')
>>> start = datetime(2000,1,1, tzinfo=da_tz)
>>> end = datetime(2000,1,2, tzinfo=da_tz)
>>> per = (start, end)
>>> vPeriod(per).ical()
'19991231T235900Z/20000101T235900Z'
>>> p = vPeriod((datetime(2000,1,1, tzinfo=da_tz), timedelta(days=31)))
>>> p.ical()
'19991231T235900Z/P31D'
"""
def __init__(self, per):
start, end_or_duration = per
if not (isinstance(start, datetime) or isinstance(start, date)):
raise ValueError('Start value MUST be a datetime or date instance')
if not (isinstance(end_or_duration, datetime) or
isinstance(end_or_duration, date) or
isinstance(end_or_duration, timedelta)):
raise ValueError('end_or_duration MUST be a datetime, date or timedelta instance')
self.start = start
self.end_or_duration = end_or_duration
self.by_duration = 0
if isinstance(end_or_duration, timedelta):
self.by_duration = 1
self.duration = end_or_duration
self.end = self.start + self.duration
else:
self.end = end_or_duration
self.duration = self.end - self.start
if self.start > self.end:
raise ValueError("Start time is greater than end time")
self.params = Parameters()
def __cmp__(self, other):
if not isinstance(other, vPeriod):
raise NotImplementedError(
'Cannot compare vPeriod with %s' % repr(other))
return cmp((self.start, self.end), (other.start, other.end))
def overlaps(self, other):
if self.start > other.start:
return other.overlaps(self)
if self.start <= other.start < self.end:
return True
return False
def ical(self):
if self.by_duration:
return '%s/%s' % (vDatetime(self.start).ical(), vDuration(self.duration).ical())
return '%s/%s' % (vDatetime(self.start).ical(), vDatetime(self.end).ical())
def from_ical(ical):
"Parses the data format from ical text format"
try:
start, end_or_duration = ical.split('/')
start = vDDDTypes.from_ical(start)
end_or_duration = vDDDTypes.from_ical(end_or_duration)
return (start, end_or_duration)
except:
raise ValueError, 'Expected period format, got: %s' % ical
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
def __repr__(self):
if self.by_duration:
p = (self.start, self.duration)
else:
p = (self.start, self.end)
return 'vPeriod(%s)' % repr(p)
class vWeekday(str):
"""
This returns an unquoted weekday abbrevation
>>> a = vWeekday('mo')
>>> a.ical()
'MO'
>>> a = vWeekday('erwer')
Traceback (most recent call last):
...
ValueError: Expected weekday abbrevation, got: ERWER
>>> vWeekday.from_ical('mo')
'MO'
>>> vWeekday.from_ical('+3mo')
'+3MO'
>>> vWeekday.from_ical('Saturday')
Traceback (most recent call last):
...
ValueError: Expected weekday abbrevation, got: Saturday
>>> a = vWeekday('+mo')
>>> a.ical()
'+MO'
>>> a = vWeekday('+3mo')
>>> a.ical()
'+3MO'
>>> a = vWeekday('-tu')
>>> a.ical()
'-TU'
"""
week_days = CaselessDict({"SU":0, "MO":1, "TU":2, "WE":3,
"TH":4, "FR":5, "SA":6})
def __init__(self, *args, **kwargs):
str.__init__(self, *args, **kwargs)
match = WEEKDAY_RULE.match(self)
if match is None:
raise ValueError, 'Expected weekday abbrevation, got: %s' % self
match = match.groupdict()
sign = match['signal']
weekday = match['weekday']
relative = match['relative']
if not weekday in vWeekday.week_days or sign not in '+-':
raise ValueError, 'Expected weekday abbrevation, got: %s' % self
self.relative = relative and int(relative) or None
self.params = Parameters()
def ical(self):
return self.upper()
def from_ical(ical):
"Parses the data format from ical text format"
try:
return vWeekday(ical.upper())
except:
raise ValueError, 'Expected weekday abbrevation, got: %s' % ical
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vFrequency(str):
"""
A simple class that catches illegal values.
>>> f = vFrequency('bad test')
Traceback (most recent call last):
...
ValueError: Expected frequency, got: BAD TEST
>>> vFrequency('daily').ical()
'DAILY'
>>> vFrequency('daily').from_ical('MONTHLY')
'MONTHLY'
"""
frequencies = CaselessDict({
"SECONDLY":"SECONDLY",
"MINUTELY":"MINUTELY",
"HOURLY":"HOURLY",
"DAILY":"DAILY",
"WEEKLY":"WEEKLY",
"MONTHLY":"MONTHLY",
"YEARLY":"YEARLY",
})
def __init__(self, *args, **kwargs):
str.__init__(self, *args, **kwargs)
if not self in vFrequency.frequencies:
raise ValueError, 'Expected frequency, got: %s' % self
self.params = Parameters()
def ical(self):
return self.upper()
def from_ical(ical):
"Parses the data format from ical text format"
try:
return vFrequency(ical.upper())
except:
raise ValueError, 'Expected weekday abbrevation, got: %s' % ical
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vRecur(CaselessDict):
"""
Let's see how close we can get to one from the rfc:
FREQ=YEARLY;INTERVAL=2;BYMONTH=1;BYDAY=SU;BYHOUR=8,9;BYMINUTE=30
>>> r = dict(freq='yearly', interval=2)
>>> r['bymonth'] = 1
>>> r['byday'] = 'su'
>>> r['byhour'] = [8,9]
>>> r['byminute'] = 30
>>> r = vRecur(r)
>>> r.ical()
'BYHOUR=8,9;BYDAY=SU;BYMINUTE=30;BYMONTH=1;FREQ=YEARLY;INTERVAL=2'
>>> r = vRecur(FREQ='yearly', INTERVAL=2)
>>> r['BYMONTH'] = 1
>>> r['BYDAY'] = 'su'
>>> r['BYHOUR'] = [8,9]
>>> r['BYMINUTE'] = 30
>>> r.ical()
'BYDAY=SU;BYMINUTE=30;BYMONTH=1;INTERVAL=2;FREQ=YEARLY;BYHOUR=8,9'
>>> r = vRecur(freq='DAILY', count=10)
>>> r['bysecond'] = [0, 15, 30, 45]
>>> r.ical()
'COUNT=10;FREQ=DAILY;BYSECOND=0,15,30,45'
>>> r = vRecur(freq='DAILY', until=datetime(2005,1,1,12,0,0))
>>> r.ical()
'FREQ=DAILY;UNTIL=20050101T120000'
How do we fare with regards to parsing?
>>> r = vRecur.from_ical('FREQ=DAILY;INTERVAL=2;COUNT=10')
>>> r
{'COUNT': [10], 'FREQ': ['DAILY'], 'INTERVAL': [2]}
>>> vRecur(r).ical()
'COUNT=10;FREQ=DAILY;INTERVAL=2'
>>> r = vRecur.from_ical('FREQ=YEARLY;INTERVAL=2;BYMONTH=1;BYDAY=-SU;BYHOUR=8,9;BYMINUTE=30')
>>> r
{'BYHOUR': [8, 9], 'BYDAY': ['-SU'], 'BYMINUTE': [30], 'BYMONTH': [1], 'FREQ': ['YEARLY'], 'INTERVAL': [2]}
>>> vRecur(r).ical()
'BYDAY=-SU;BYMINUTE=30;INTERVAL=2;BYMONTH=1;FREQ=YEARLY;BYHOUR=8,9'
Some examples from the spec
>>> r = vRecur.from_ical('FREQ=MONTHLY;BYDAY=MO,TU,WE,TH,FR;BYSETPOS=-1')
>>> vRecur(r).ical()
'BYSETPOS=-1;FREQ=MONTHLY;BYDAY=MO,TU,WE,TH,FR'
>>> r = vRecur.from_ical('FREQ=YEARLY;INTERVAL=2;BYMONTH=1;BYDAY=SU;BYHOUR=8,9;BYMINUTE=30')
>>> vRecur(r).ical()
'BYDAY=SU;BYMINUTE=30;INTERVAL=2;BYMONTH=1;FREQ=YEARLY;BYHOUR=8,9'
and some errors
>>> r = vRecur.from_ical('BYDAY=12')
Traceback (most recent call last):
...
ValueError: Error in recurrence rule: BYDAY=12
"""
frequencies = ["SECONDLY", "MINUTELY", "HOURLY", "DAILY", "WEEKLY",
"MONTHLY", "YEARLY"]
types = CaselessDict({
'COUNT':vInt,
'INTERVAL':vInt,
'BYSECOND':vInt,
'BYMINUTE':vInt,
'BYHOUR':vInt,
'BYMONTHDAY':vInt,
'BYYEARDAY':vInt,
'BYMONTH':vInt,
'UNTIL':vDDDTypes,
'BYSETPOS':vInt,
'WKST':vWeekday,
'BYDAY':vWeekday,
'FREQ':vFrequency
})
def __init__(self, *args, **kwargs):
CaselessDict.__init__(self, *args, **kwargs)
self.params = Parameters()
def ical(self):
# SequenceTypes
result = []
for key, vals in self.items():
typ = self.types[key]
if not type(vals) in SequenceTypes:
vals = [vals]
vals = ','.join([typ(val).ical() for val in vals])
result.append('%s=%s' % (key, vals))
return ';'.join(result)
def parse_type(key, values):
# integers
parser = vRecur.types.get(key, vText)
return [parser.from_ical(v) for v in values.split(',')]
parse_type = staticmethod(parse_type)
def from_ical(ical):
"Parses the data format from ical text format"
try:
recur = vRecur()
for pairs in ical.split(';'):
key, vals = pairs.split('=')
recur[key] = vRecur.parse_type(key, vals)
return dict(recur)
except:
raise ValueError, 'Error in recurrence rule: %s' % ical
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vText(unicode):
"""
Simple text
>>> t = vText(u'Simple text')
>>> t.ical()
'Simple text'
Escaped text
>>> t = vText('Text ; with escaped, chars')
>>> t.ical()
'Text \\\\; with escaped\\\\, chars'
Escaped newlines
>>> vText('Text with escaped\N chars').ical()
'Text with escaped\\\\n chars'
If you pass a unicode object, it will be utf-8 encoded. As this is the
(only) standard that RFC 2445 support.
>>> t = vText(u'international chars æøå ÆØÅ ü')
>>> t.ical()
'international chars \\xc3\\xa6\\xc3\\xb8\\xc3\\xa5 \\xc3\\x86\\xc3\\x98\\xc3\\x85 \\xc3\\xbc'
Unicode is converted to utf-8
>>> t = vText(u'international æ ø å')
>>> str(t)
'international \\xc3\\xa6 \\xc3\\xb8 \\xc3\\xa5'
and parsing?
>>> vText.from_ical('Text \\; with escaped\\, chars')
u'Text ; with escaped, chars'
>>> print vText.from_ical('A string with\\; some\\\\ characters in\\Nit')
A string with; some\\ characters in
it
"""
encoding = 'utf-8'
def __init__(self, *args, **kwargs):
unicode.__init__(self, *args, **kwargs)
self.params = Parameters()
def escape(self):
"""
Format value according to iCalendar TEXT escaping rules.
"""
return (self.replace('\N', '\n')
.replace('\\', '\\\\')
.replace(';', r'\;')
.replace(',', r'\,')
.replace('\r\n', r'\n')
.replace('\n', r'\n')
)
def __repr__(self):
return u"vText(%s)" % unicode.__repr__(self)
def ical(self):
return self.escape().encode(self.encoding)
def from_ical(ical):
"Parses the data format from ical text format"
try:
ical = (ical.replace(r'\N', r'\n')
.replace(r'\r\n', '\n')
.replace(r'\n', '\n')
.replace(r'\,', ',')
.replace(r'\;', ';')
.replace('\\\\', '\\'))
return ical.decode(vText.encoding)
except:
raise ValueError, 'Expected ical text, got: %s' % ical
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vTime(time):
"""
A subclass of datetime, that renders itself in the iCalendar time
format.
>>> dt = vTime(12, 30, 0)
>>> dt.ical()
'123000'
>>> vTime.from_ical('123000')
datetime.time(12, 30)
We should also fail, right?
>>> vTime.from_ical('263000')
Traceback (most recent call last):
...
ValueError: Expected time, got: 263000
"""
def __init__(self, *args, **kwargs):
time.__init__(self, *args, **kwargs)
self.params = Parameters()
def ical(self):
return self.strftime("%H%M%S")
def from_ical(ical):
"Parses the data format from ical text format"
try:
timetuple = map(int, (ical[:2],ical[2:4],ical[4:6]))
return time(*timetuple)
except:
raise ValueError, 'Expected time, got: %s' % ical
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vUri(str):
"""
Uniform resource identifier is basically just an unquoted string.
>>> u = vUri('http://www.example.com/')
>>> u.ical()
'http://www.example.com/'
>>> vUri.from_ical('http://www.example.com/') # doh!
'http://www.example.com/'
"""
def __init__(self, *args, **kwargs):
str.__init__(self, *args, **kwargs)
self.params = Parameters()
def ical(self):
return str(self)
def from_ical(ical):
"Parses the data format from ical text format"
try:
return str(ical)
except:
raise ValueError, 'Expected , got: %s' % ical
from_ical = staticmethod(from_ical)
def __str__(self):
return str.__str__(self)
class vGeo:
"""
A special type that is only indirectly defined in the rfc.
>>> g = vGeo((1.2, 3.0))
>>> g.ical()
'1.2;3.0'
>>> g = vGeo.from_ical('37.386013;-122.082932')
>>> g
(37.386012999999998, -122.082932)
>>> vGeo(g).ical()
'37.386013;-122.082932'
>>> vGeo('g').ical()
Traceback (most recent call last):
...
ValueError: Input must be (float, float) for latitude and longitude
"""
def __init__(self, geo):
try:
latitude, longitude = geo
latitude = float(latitude)
longitude = float(longitude)
except:
raise ValueError('Input must be (float, float) for latitude and longitude')
self.latitude = latitude
self.longitude = longitude
self.params = Parameters()
def ical(self):
return '%s;%s' % (self.latitude, self.longitude)
def from_ical(ical):
"Parses the data format from ical text format"
try:
latitude, longitude = ical.split(';')
return (float(latitude), float(longitude))
except:
raise ValueError, "Expected 'float;float' , got: %s" % ical
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vUTCOffset:
"""
Renders itself as a utc offset
>>> u = vUTCOffset(timedelta(hours=2))
>>> u.ical()
'+0200'
>>> u = vUTCOffset(timedelta(hours=-5))
>>> u.ical()
'-0500'
>>> u = vUTCOffset(timedelta())
>>> u.ical()
'0000'
>>> u = vUTCOffset(timedelta(minutes=-30))
>>> u.ical()
'-0030'
>>> u = vUTCOffset(timedelta(hours=2, minutes=-30))
>>> u.ical()
'+0130'
>>> u = vUTCOffset(timedelta(hours=1, minutes=30))
>>> u.ical()
'+0130'
Parsing
>>> vUTCOffset.from_ical('0000')
datetime.timedelta(0)
>>> vUTCOffset.from_ical('-0030')
datetime.timedelta(-1, 84600)
>>> vUTCOffset.from_ical('+0200')
datetime.timedelta(0, 7200)
>>> o = vUTCOffset.from_ical('+0230')
>>> vUTCOffset(o).ical()
'+0230'
And a few failures
>>> vUTCOffset.from_ical('+323k')
Traceback (most recent call last):
...
ValueError: Expected utc offset, got: +323k
>>> vUTCOffset.from_ical('+2400')
Traceback (most recent call last):
...
ValueError: Offset must be less than 24 hours, was +2400
"""
def __init__(self, td):
if not isinstance(td, timedelta):
raise ValueError('Offset value MUST be a timedelta instance')
self.td = td
self.params = Parameters()
def ical(self):
td = self.td
day_in_minutes = (td.days * 24 * 60)
seconds_in_minutes = td.seconds // 60
total_minutes = day_in_minutes + seconds_in_minutes
if total_minutes == 0:
sign = '%s'
elif total_minutes < 0:
sign = '-%s'
else:
sign = '+%s'
hours = abs(total_minutes) // 60
minutes = total_minutes % 60
duration = '%02i%02i' % (hours, minutes)
return sign % duration
def from_ical(ical):
"Parses the data format from ical text format"
try:
sign, hours, minutes = (ical[-5:-4], int(ical[-4:-2]), int(ical[-2:]))
offset = timedelta(hours=hours, minutes=minutes)
except:
raise ValueError, 'Expected utc offset, got: %s' % ical
if offset >= timedelta(hours=24):
raise ValueError, 'Offset must be less than 24 hours, was %s' % ical
if sign == '-':
return -offset
return offset
from_ical = staticmethod(from_ical)
def __str__(self):
return self.ical()
class vInline(str):
"""
This is an especially dumb class that just holds raw unparsed text and has
parameters. Conversion of inline values are handled by the Component class,
so no further processing is needed.
>>> vInline('Some text')
'Some text'
>>> vInline.from_ical('Some text')
'Some text'
>>> t2 = vInline('other text')
>>> t2.params['cn'] = 'Test Osterone'
>>> t2.params
Parameters({'CN': 'Test Osterone'})
"""
def __init__(self,obj):
self.obj = obj
self.params = Parameters()
def ical(self):
return str(self)
def from_ical(ical):
return str(ical)
from_ical = staticmethod(from_ical)
def __str__(self):
return str(self.obj)
class TypesFactory(CaselessDict):
"""
All Value types defined in rfc 2445 are registered in this factory class. To
get a type you can use it like this.
>>> factory = TypesFactory()
>>> datetime_parser = factory['date-time']
>>> dt = datetime_parser(datetime(2001, 1, 1))
>>> dt.ical()
'20010101T000000'
A typical use is when the parser tries to find a content type and use text
as the default
>>> value = '20050101T123000'
>>> value_type = 'date-time'
>>> typ = factory.get(value_type, 'text')
>>> typ.from_ical(value)
datetime.datetime(2005, 1, 1, 12, 30)
It can also be used to directly encode property and parameter values
>>> comment = factory.ical('comment', u'by Rasmussen, Max Møller')
>>> str(comment)
'by Rasmussen\\\\, Max M\\xc3\\xb8ller'
>>> factory.ical('priority', 1)
'1'
>>> factory.ical('cn', u'Rasmussen, Max Møller')
'Rasmussen\\\\, Max M\\xc3\\xb8ller'
>>> factory.from_ical('cn', 'Rasmussen\\\\, Max M\\xc3\\xb8ller')
u'Rasmussen, Max M\\xf8ller'
The value and parameter names don't overlap. So one factory is enough for
both kinds.
"""
def __init__(self, *args, **kwargs):
"Set keys to upper for initial dict"
CaselessDict.__init__(self, *args, **kwargs)
self['binary'] = vBinary
self['boolean'] = vBoolean
self['cal-address'] = vCalAddress
self['date'] = vDDDTypes
self['date-time'] = vDDDTypes
self['duration'] = vDDDTypes
self['float'] = vFloat
self['integer'] = vInt
self['period'] = vPeriod
self['recur'] = vRecur
self['text'] = vText
self['time'] = vTime
self['uri'] = vUri
self['utc-offset'] = vUTCOffset
self['geo'] = vGeo
self['inline'] = vInline
self['date-time-list'] = vDDDLists
#################################################
# Property types
# These are the default types
types_map = CaselessDict({
####################################
# Property valye types
# Calendar Properties
'calscale' : 'text',
'method' : 'text',
'prodid' : 'text',
'version' : 'text',
# Descriptive Component Properties
'attach' : 'uri',
'categories' : 'text',
'class' : 'text',
'comment' : 'text',
'description' : 'text',
'geo' : 'geo',
'location' : 'text',
'percent-complete' : 'integer',
'priority' : 'integer',
'resources' : 'text',
'status' : 'text',
'summary' : 'text',
# Date and Time Component Properties
'completed' : 'date-time',
'dtend' : 'date-time',
'due' : 'date-time',
'dtstart' : 'date-time',
'duration' : 'duration',
'freebusy' : 'period',
'transp' : 'text',
# Time Zone Component Properties
'tzid' : 'text',
'tzname' : 'text',
'tzoffsetfrom' : 'utc-offset',
'tzoffsetto' : 'utc-offset',
'tzurl' : 'uri',
# Relationship Component Properties
'attendee' : 'cal-address',
'contact' : 'text',
'organizer' : 'cal-address',
'recurrence-id' : 'date-time',
'related-to' : 'text',
'url' : 'uri',
'uid' : 'text',
# Recurrence Component Properties
'exdate' : 'date-time-list',
'exrule' : 'recur',
'rdate' : 'date-time-list',
'rrule' : 'recur',
# Alarm Component Properties
'action' : 'text',
'repeat' : 'integer',
'trigger' : 'duration',
# Change Management Component Properties
'created' : 'date-time',
'dtstamp' : 'date-time',
'last-modified' : 'date-time',
'sequence' : 'integer',
# Miscellaneous Component Properties
'request-status' : 'text',
####################################
# parameter types (luckilly there is no name overlap)
'altrep' : 'uri',
'cn' : 'text',
'cutype' : 'text',
'delegated-from' : 'cal-address',
'delegated-to' : 'cal-address',
'dir' : 'uri',
'encoding' : 'text',
'fmttype' : 'text',
'fbtype' : 'text',
'language' : 'text',
'member' : 'cal-address',
'partstat' : 'text',
'range' : 'text',
'related' : 'text',
'reltype' : 'text',
'role' : 'text',
'rsvp' : 'boolean',
'sent-by' : 'cal-address',
'tzid' : 'text',
'value' : 'text',
})
def for_property(self, name):
"Returns a the default type for a property or parameter"
return self[self.types_map.get(name, 'text')]
def ical(self, name, value):
"""
Encodes a named value from a primitive python type to an
icalendar encoded string.
"""
type_class = self.for_property(name)
return type_class(value).ical()
def from_ical(self, name, value):
"""
Decodes a named property or parameter value from an icalendar encoded
string to a primitive python type.
"""
type_class = self.for_property(name)
decoded = type_class.from_ical(str(value))
return decoded
|
schrockntemp/graphscaletemp
|
refs/heads/master
|
graphscale/kvetch/kvetch.py
|
1
|
from uuid import UUID, uuid4
from graphscale.utils import param_check, async_array, print_error
class KvetchShard:
def check_insert_object_vars(self, new_id, type_id, data):
param_check(new_id, UUID, 'new_id')
param_check(type_id, int, 'type_id')
param_check(data, dict, 'data')
if 'obj_id' in data:
raise ValueError('Cannot specify obj_id')
if 'type_id' in data:
raise ValueError('Cannot specify type_id')
class KvetchIndexDefinition:
async def gen_all(self, _shard, _value):
raise Exception('must implement')
class KvetchEdgeDefinition:
def __init__(self, *, edge_name, edge_id, from_id_attr):
self._edge_name = edge_name
self._edge_id = edge_id
self._from_id_attr = from_id_attr
def edge_name(self):
return self._edge_name
def edge_id(self):
return self._edge_id
def from_id_attr(self):
return self._from_id_attr
class Kvetch:
def __init__(self, *, shards, edges, indexes):
param_check(shards, list, 'shards')
param_check(edges, list, 'edges')
param_check(indexes, list, 'indexes')
self._shards = shards
# shard => shard_id
self._shard_lookup = dict(zip(self._shards, range(0, len(shards))))
# index_name => index
self._index_dict = dict(zip([index.index_name() for index in indexes], indexes))
# edge_name => edge
self._edge_dict = dict(zip([edge.edge_name() for edge in edges], edges))
def get_index(self, index_name):
param_check(index_name, str, 'index_name')
return self._index_dict[index_name]
def get_edge_definition_by_name(self, edge_name):
for edge in self._edge_dict.values():
if edge.edge_name() == edge_name:
return edge
raise Exception('edge %s not found in Kvetch' % edge_name)
def get_shard_from_obj_id(self, obj_id):
param_check(obj_id, UUID, 'obj_id')
shard_id = self.get_shard_id_from_obj_id(obj_id)
return self._shards[shard_id]
def get_shard_id_from_obj_id(self, obj_id):
# do something less stupid like consistent hashing
# excellent description here http://michaelnielsen.org/blog/consistent-hashing/
param_check(obj_id, UUID, 'obj_id')
return int(obj_id) % len(self._shards)
async def gen_update_object(self, obj_id, data):
param_check(obj_id, UUID, 'obj_id')
param_check(data, dict, 'data')
shard = self.get_shard_from_obj_id(obj_id)
return await shard.gen_update_object(obj_id, data)
async def gen_delete_object(self, obj_id):
param_check(obj_id, UUID, 'obj_id')
shard = self.get_shard_from_obj_id(obj_id)
return await shard.gen_delete_object(obj_id)
async def gen_insert_object(self, type_id, data):
param_check(type_id, int, 'type_id')
param_check(data, dict, 'data')
new_id = uuid4()
shard = self.get_shard_from_obj_id(new_id)
await shard.gen_insert_object(new_id, type_id, data)
for edge_definition in self._edge_dict.values():
attr = edge_definition.from_id_attr()
if not(attr in data) or not data[attr]:
continue
from_id = data[attr]
from_id_shard = self.get_shard_from_obj_id(from_id)
await from_id_shard.gen_insert_edge(edge_definition, from_id, new_id, {})
for index in self._index_dict.values():
if index.indexed_type_id() != type_id:
continue
attr = index.indexed_attr()
if not(attr in data) or not data[attr]:
continue
indexed_value = data[attr]
indexed_shard = self.get_shard_from_obj_id(new_id)
await indexed_shard.gen_insert_index_entry(index, indexed_value, new_id)
return new_id
async def gen_insert_objects(self, type_id, datas):
param_check(datas, list, 'datas')
if len(self._shards) > 1:
raise Exception('shards > 1 currently not supported')
shard = self._shards[0]
new_ids = []
for _ in range(0, len(datas)):
new_ids.append(uuid4())
await shard.gen_insert_objects(new_ids, type_id, datas)
return new_ids
async def gen_object(self, obj_id):
param_check(obj_id, UUID, 'obj_id')
shard = self.get_shard_from_obj_id(obj_id)
return await shard.gen_object(obj_id)
async def gen_objects(self, ids):
# construct dictionary of shard_id to all ids in that shard
shard_to_ids = {} # shard_id => [id]
for obj_id in ids:
shard_id = self.get_shard_id_from_obj_id(obj_id)
if not shard_id in shard_to_ids:
shard_to_ids[shard_id] = []
shard_to_ids[shard_id].append(obj_id)
# construct list of coros (one per shard) in order to fetch in parallel
unawaited_gens = []
for shard_id, ids_in_shard in shard_to_ids.items():
shard = self._shards[shard_id]
unawaited_gens.append(shard.gen_objects(ids_in_shard))
obj_dict_per_shard = await async_array(unawaited_gens)
# flatten results into single dict
results = {}
for obj_dict in obj_dict_per_shard:
for obj_id, obj in obj_dict.items():
results[obj_id] = obj
return results
async def gen_objects_of_type(self, type_id, after=None, first=None):
if len(self._shards) > 1:
raise Exception('shards > 1 currently not supported')
shard = self._shards[0]
return await shard.gen_objects_of_type(type_id, after, first)
async def gen_edges(self, edge_definition, from_id, after=None, first=None):
shard = self.get_shard_from_obj_id(from_id)
return await shard.gen_edges(edge_definition, from_id, after=after, first=first)
async def gen_from_index(self, index, index_value):
ids = []
for shard in self._shards:
index_entries = await shard.gen_index_entries(index, index_value)
ids.extend([entry['target_id'] for entry in index_entries])
return await self.gen_objects(ids)
async def gen_id_from_index(self, index_name, index_value):
index = self.get_index(index_name)
ids = await self.gen_ids_from_index(index, index_value)
if not ids:
return None
return ids[0]
async def gen_ids_from_index(self, index, index_value):
ids = []
for shard in self._shards:
index_entries = await shard.gen_index_entries(index, index_value)
ids.extend([entry['target_id'] for entry in index_entries])
return ids
|
TaskEvolution/Task-Coach-Evolution
|
refs/heads/master
|
taskcoach/taskcoachlib/thirdparty/filters/optionaltags.py
|
1727
|
from __future__ import absolute_import, division, unicode_literals
from . import _base
class Filter(_base.Filter):
def slider(self):
previous1 = previous2 = None
for token in self.source:
if previous1 is not None:
yield previous2, previous1, token
previous2 = previous1
previous1 = token
yield previous2, previous1, None
def __iter__(self):
for previous, token, next in self.slider():
type = token["type"]
if type == "StartTag":
if (token["data"] or
not self.is_optional_start(token["name"], previous, next)):
yield token
elif type == "EndTag":
if not self.is_optional_end(token["name"], next):
yield token
else:
yield token
def is_optional_start(self, tagname, previous, next):
type = next and next["type"] or None
if tagname in 'html':
# An html element's start tag may be omitted if the first thing
# inside the html element is not a space character or a comment.
return type not in ("Comment", "SpaceCharacters")
elif tagname == 'head':
# A head element's start tag may be omitted if the first thing
# inside the head element is an element.
# XXX: we also omit the start tag if the head element is empty
if type in ("StartTag", "EmptyTag"):
return True
elif type == "EndTag":
return next["name"] == "head"
elif tagname == 'body':
# A body element's start tag may be omitted if the first thing
# inside the body element is not a space character or a comment,
# except if the first thing inside the body element is a script
# or style element and the node immediately preceding the body
# element is a head element whose end tag has been omitted.
if type in ("Comment", "SpaceCharacters"):
return False
elif type == "StartTag":
# XXX: we do not look at the preceding event, so we never omit
# the body element's start tag if it's followed by a script or
# a style element.
return next["name"] not in ('script', 'style')
else:
return True
elif tagname == 'colgroup':
# A colgroup element's start tag may be omitted if the first thing
# inside the colgroup element is a col element, and if the element
# is not immediately preceeded by another colgroup element whose
# end tag has been omitted.
if type in ("StartTag", "EmptyTag"):
# XXX: we do not look at the preceding event, so instead we never
# omit the colgroup element's end tag when it is immediately
# followed by another colgroup element. See is_optional_end.
return next["name"] == "col"
else:
return False
elif tagname == 'tbody':
# A tbody element's start tag may be omitted if the first thing
# inside the tbody element is a tr element, and if the element is
# not immediately preceeded by a tbody, thead, or tfoot element
# whose end tag has been omitted.
if type == "StartTag":
# omit the thead and tfoot elements' end tag when they are
# immediately followed by a tbody element. See is_optional_end.
if previous and previous['type'] == 'EndTag' and \
previous['name'] in ('tbody', 'thead', 'tfoot'):
return False
return next["name"] == 'tr'
else:
return False
return False
def is_optional_end(self, tagname, next):
type = next and next["type"] or None
if tagname in ('html', 'head', 'body'):
# An html element's end tag may be omitted if the html element
# is not immediately followed by a space character or a comment.
return type not in ("Comment", "SpaceCharacters")
elif tagname in ('li', 'optgroup', 'tr'):
# A li element's end tag may be omitted if the li element is
# immediately followed by another li element or if there is
# no more content in the parent element.
# An optgroup element's end tag may be omitted if the optgroup
# element is immediately followed by another optgroup element,
# or if there is no more content in the parent element.
# A tr element's end tag may be omitted if the tr element is
# immediately followed by another tr element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] == tagname
else:
return type == "EndTag" or type is None
elif tagname in ('dt', 'dd'):
# A dt element's end tag may be omitted if the dt element is
# immediately followed by another dt element or a dd element.
# A dd element's end tag may be omitted if the dd element is
# immediately followed by another dd element or a dt element,
# or if there is no more content in the parent element.
if type == "StartTag":
return next["name"] in ('dt', 'dd')
elif tagname == 'dd':
return type == "EndTag" or type is None
else:
return False
elif tagname == 'p':
# A p element's end tag may be omitted if the p element is
# immediately followed by an address, article, aside,
# blockquote, datagrid, dialog, dir, div, dl, fieldset,
# footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu,
# nav, ol, p, pre, section, table, or ul, element, or if
# there is no more content in the parent element.
if type in ("StartTag", "EmptyTag"):
return next["name"] in ('address', 'article', 'aside',
'blockquote', 'datagrid', 'dialog',
'dir', 'div', 'dl', 'fieldset', 'footer',
'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
'header', 'hr', 'menu', 'nav', 'ol',
'p', 'pre', 'section', 'table', 'ul')
else:
return type == "EndTag" or type is None
elif tagname == 'option':
# An option element's end tag may be omitted if the option
# element is immediately followed by another option element,
# or if it is immediately followed by an <code>optgroup</code>
# element, or if there is no more content in the parent
# element.
if type == "StartTag":
return next["name"] in ('option', 'optgroup')
else:
return type == "EndTag" or type is None
elif tagname in ('rt', 'rp'):
# An rt element's end tag may be omitted if the rt element is
# immediately followed by an rt or rp element, or if there is
# no more content in the parent element.
# An rp element's end tag may be omitted if the rp element is
# immediately followed by an rt or rp element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] in ('rt', 'rp')
else:
return type == "EndTag" or type is None
elif tagname == 'colgroup':
# A colgroup element's end tag may be omitted if the colgroup
# element is not immediately followed by a space character or
# a comment.
if type in ("Comment", "SpaceCharacters"):
return False
elif type == "StartTag":
# XXX: we also look for an immediately following colgroup
# element. See is_optional_start.
return next["name"] != 'colgroup'
else:
return True
elif tagname in ('thead', 'tbody'):
# A thead element's end tag may be omitted if the thead element
# is immediately followed by a tbody or tfoot element.
# A tbody element's end tag may be omitted if the tbody element
# is immediately followed by a tbody or tfoot element, or if
# there is no more content in the parent element.
# A tfoot element's end tag may be omitted if the tfoot element
# is immediately followed by a tbody element, or if there is no
# more content in the parent element.
# XXX: we never omit the end tag when the following element is
# a tbody. See is_optional_start.
if type == "StartTag":
return next["name"] in ['tbody', 'tfoot']
elif tagname == 'tbody':
return type == "EndTag" or type is None
else:
return False
elif tagname == 'tfoot':
# A tfoot element's end tag may be omitted if the tfoot element
# is immediately followed by a tbody element, or if there is no
# more content in the parent element.
# XXX: we never omit the end tag when the following element is
# a tbody. See is_optional_start.
if type == "StartTag":
return next["name"] == 'tbody'
else:
return type == "EndTag" or type is None
elif tagname in ('td', 'th'):
# A td element's end tag may be omitted if the td element is
# immediately followed by a td or th element, or if there is
# no more content in the parent element.
# A th element's end tag may be omitted if the th element is
# immediately followed by a td or th element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] in ('td', 'th')
else:
return type == "EndTag" or type is None
return False
|
freedesktop-unofficial-mirror/gstreamer__cerbero
|
refs/heads/master
|
cerbero/commands/__init__.py
|
27
|
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
__all__ = ['Command', 'register_command', 'run']
from cerbero.errors import FatalError
from cerbero.utils import _
from cerbero.utils import messages as m
class Command:
"""Base class for Command objects"""
doc = ''
name = None
def __init__(self, arguments=[]):
self.arguments = arguments
def run(self, config, args):
"""The body of the command"""
raise NotImplementedError
def add_parser(self, subparsers):
self.parser = subparsers.add_parser(self.name, help=_(self.doc))
for arg in self.arguments:
arg.add_to_parser(self.parser)
# dictionary with the list of commands
# command_name -> command_instance
_commands = {}
def register_command(command_class):
command = command_class()
_commands[command.name] = command
def load_commands(subparsers):
import os
commands_dir = os.path.abspath(os.path.dirname(__file__))
for name in os.listdir(commands_dir):
name, extension = os.path.splitext(name)
if extension != '.py':
continue
try:
__import__('cerbero.commands.%s' % name)
except ImportError, e:
m.warning("Error importing command %s:\n %s" % (name, e))
for command in _commands.values():
command.add_parser(subparsers)
def run(command, config, args):
# if the command hasn't been registered, load a module by the same name
if command not in _commands:
raise FatalError(_('command not found'))
return _commands[command].run(config, args)
|
tms/node-gyp
|
refs/heads/master
|
gyp/pylib/gyp/MSVSUserFile.py
|
2710
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Visual Studio user preferences file writer."""
import os
import re
import socket # for gethostname
import gyp.common
import gyp.easy_xml as easy_xml
#------------------------------------------------------------------------------
def _FindCommandInPath(command):
"""If there are no slashes in the command given, this function
searches the PATH env to find the given command, and converts it
to an absolute path. We have to do this because MSVS is looking
for an actual file to launch a debugger on, not just a command
line. Note that this happens at GYP time, so anything needing to
be built needs to have a full path."""
if '/' in command or '\\' in command:
# If the command already has path elements (either relative or
# absolute), then assume it is constructed properly.
return command
else:
# Search through the path list and find an existing file that
# we can access.
paths = os.environ.get('PATH','').split(os.pathsep)
for path in paths:
item = os.path.join(path, command)
if os.path.isfile(item) and os.access(item, os.X_OK):
return item
return command
def _QuoteWin32CommandLineArgs(args):
new_args = []
for arg in args:
# Replace all double-quotes with double-double-quotes to escape
# them for cmd shell, and then quote the whole thing if there
# are any.
if arg.find('"') != -1:
arg = '""'.join(arg.split('"'))
arg = '"%s"' % arg
# Otherwise, if there are any spaces, quote the whole arg.
elif re.search(r'[ \t\n]', arg):
arg = '"%s"' % arg
new_args.append(arg)
return new_args
class Writer(object):
"""Visual Studio XML user user file writer."""
def __init__(self, user_file_path, version, name):
"""Initializes the user file.
Args:
user_file_path: Path to the user file.
version: Version info.
name: Name of the user file.
"""
self.user_file_path = user_file_path
self.version = version
self.name = name
self.configurations = {}
def AddConfig(self, name):
"""Adds a configuration to the project.
Args:
name: Configuration name.
"""
self.configurations[name] = ['Configuration', {'Name': name}]
def AddDebugSettings(self, config_name, command, environment = {},
working_directory=""):
"""Adds a DebugSettings node to the user file for a particular config.
Args:
command: command line to run. First element in the list is the
executable. All elements of the command will be quoted if
necessary.
working_directory: other files which may trigger the rule. (optional)
"""
command = _QuoteWin32CommandLineArgs(command)
abs_command = _FindCommandInPath(command[0])
if environment and isinstance(environment, dict):
env_list = ['%s="%s"' % (key, val)
for (key,val) in environment.iteritems()]
environment = ' '.join(env_list)
else:
environment = ''
n_cmd = ['DebugSettings',
{'Command': abs_command,
'WorkingDirectory': working_directory,
'CommandArguments': " ".join(command[1:]),
'RemoteMachine': socket.gethostname(),
'Environment': environment,
'EnvironmentMerge': 'true',
# Currently these are all "dummy" values that we're just setting
# in the default manner that MSVS does it. We could use some of
# these to add additional capabilities, I suppose, but they might
# not have parity with other platforms then.
'Attach': 'false',
'DebuggerType': '3', # 'auto' debugger
'Remote': '1',
'RemoteCommand': '',
'HttpUrl': '',
'PDBPath': '',
'SQLDebugging': '',
'DebuggerFlavor': '0',
'MPIRunCommand': '',
'MPIRunArguments': '',
'MPIRunWorkingDirectory': '',
'ApplicationCommand': '',
'ApplicationArguments': '',
'ShimCommand': '',
'MPIAcceptMode': '',
'MPIAcceptFilter': ''
}]
# Find the config, and add it if it doesn't exist.
if config_name not in self.configurations:
self.AddConfig(config_name)
# Add the DebugSettings onto the appropriate config.
self.configurations[config_name].append(n_cmd)
def WriteIfChanged(self):
"""Writes the user file."""
configs = ['Configurations']
for config, spec in sorted(self.configurations.iteritems()):
configs.append(spec)
content = ['VisualStudioUserFile',
{'Version': self.version.ProjectVersion(),
'Name': self.name
},
configs]
easy_xml.WriteXmlIfChanged(content, self.user_file_path,
encoding="Windows-1252")
|
devinbalkind/eden
|
refs/heads/master
|
private/templates/EVASS/maintenance.py
|
15
|
# -*- coding: utf-8 -*-
import datetime
import os
import time
from gluon import current
# =============================================================================
class Daily():
""" Daily Maintenance Tasks """
def __call__(self):
db = current.db
s3db = current.s3db
request = current.request
now = request.utcnow
month_past = now - datetime.timedelta(weeks=4)
# Cleanup Scheduler logs
table = s3db.scheduler_run
db(table.start_time < month_past).delete()
# Cleanup Sync logs
table = s3db.sync_log
db(table.timestmp < month_past).delete()
# Cleanup Sessions
osjoin = os.path.join
osstat = os.stat
osremove = os.remove
folder = osjoin(request.global_settings.applications_parent,
request.folder,
"sessions")
# Convert to UNIX time
month_past_u = time.mktime(month_past.timetuple())
for file in os.listdir(folder):
filepath = osjoin(folder, file)
status = osstat(filepath)
if status.st_mtime < month_past_u:
try:
osremove(filepath)
except:
pass
# END =========================================================================
|
SpaceGroupUCL/qgisSpaceSyntaxToolkit
|
refs/heads/master
|
esstoolkit/external/networkx/generators/atlas.py
|
1
|
"""
Generators for the small graph atlas.
"""
import gzip
from itertools import islice
import os
import os.path
import networkx as nx
__all__ = ["graph_atlas", "graph_atlas_g"]
#: The total number of graphs in the atlas.
#:
#: The graphs are labeled starting from 0 and extending to (but not
#: including) this number.
NUM_GRAPHS = 1253
#: The absolute path representing the directory containing this file.
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
#: The path to the data file containing the graph edge lists.
#:
#: This is the absolute filename of the gzipped text file containing the
#: edge list for each graph in the atlas. The file contains one entry
#: per graph in the atlas, in sequential order, starting from graph
#: number 0 and extending through graph number 1252 (see
#: :data:`NUM_GRAPHS`). Each entry looks like
#:
#: .. sourcecode:: text
#:
#: GRAPH 6
#: NODES 3
#: 0 1
#: 0 2
#:
#: where the first two lines are the graph's index in the atlas and the
#: number of nodes in the graph, and the remaining lines are the edge
#: list.
#:
#: This file was generated from a Python list of graphs via code like
#: the following::
#:
#: import gzip
#: from networkx.generators.atlas import graph_atlas_g
#: from networkx.readwrite.edgelist import write_edgelist
#:
#: with gzip.open('atlas.dat.gz', 'wb') as f:
#: for i, G in enumerate(graph_atlas_g()):
#: f.write(bytes(f'GRAPH {i}\n', encoding='utf-8'))
#: f.write(bytes(f'NODES {len(G)}\n', encoding='utf-8'))
#: write_edgelist(G, f, data=False)
#:
ATLAS_FILE = os.path.join(THIS_DIR, "atlas.dat.gz")
def _generate_graphs():
"""Sequentially read the file containing the edge list data for the
graphs in the atlas and generate the graphs one at a time.
This function reads the file given in :data:`.ATLAS_FILE`.
"""
with gzip.open(ATLAS_FILE, "rb") as f:
line = f.readline()
while line and line.startswith(b"GRAPH"):
# The first two lines of each entry tell us the index of the
# graph in the list and the number of nodes in the graph.
# They look like this:
#
# GRAPH 3
# NODES 2
#
graph_index = int(line[6:].rstrip())
line = f.readline()
num_nodes = int(line[6:].rstrip())
# The remaining lines contain the edge list, until the next
# GRAPH line (or until the end of the file).
edgelist = []
line = f.readline()
while line and not line.startswith(b"GRAPH"):
edgelist.append(line.rstrip())
line = f.readline()
G = nx.Graph()
G.name = f"G{graph_index}"
G.add_nodes_from(range(num_nodes))
G.add_edges_from(tuple(map(int, e.split())) for e in edgelist)
yield G
def graph_atlas(i):
"""Returns graph number `i` from the Graph Atlas.
For more information, see :func:`.graph_atlas_g`.
Parameters
----------
i : int
The index of the graph from the atlas to get. The graph at index
0 is assumed to be the null graph.
Returns
-------
list
A list of :class:`~networkx.Graph` objects, the one at index *i*
corresponding to the graph *i* in the Graph Atlas.
See also
--------
graph_atlas_g
Notes
-----
The time required by this function increases linearly with the
argument `i`, since it reads a large file sequentially in order to
generate the graph [1]_.
References
----------
.. [1] Ronald C. Read and Robin J. Wilson, *An Atlas of Graphs*.
Oxford University Press, 1998.
"""
if not (0 <= i < NUM_GRAPHS):
raise ValueError(f"index must be between 0 and {NUM_GRAPHS}")
return next(islice(_generate_graphs(), i, None))
def graph_atlas_g():
"""Returns the list of all graphs with up to seven nodes named in the
Graph Atlas.
The graphs are listed in increasing order by
1. number of nodes,
2. number of edges,
3. degree sequence (for example 111223 < 112222),
4. number of automorphisms,
in that order, with three exceptions as described in the *Notes*
section below. This causes the list to correspond with the index of
the graphs in the Graph Atlas [atlas]_, with the first graph,
``G[0]``, being the null graph.
Returns
-------
list
A list of :class:`~networkx.Graph` objects, the one at index *i*
corresponding to the graph *i* in the Graph Atlas.
See also
--------
graph_atlas
Notes
-----
This function may be expensive in both time and space, since it
reads a large file sequentially in order to populate the list.
Although the NetworkX atlas functions match the order of graphs
given in the "Atlas of Graphs" book, there are (at least) three
errors in the ordering described in the book. The following three
pairs of nodes violate the lexicographically nondecreasing sorted
degree sequence rule:
- graphs 55 and 56 with degree sequences 001111 and 000112,
- graphs 1007 and 1008 with degree sequences 3333444 and 3333336,
- graphs 1012 and 1213 with degree sequences 1244555 and 1244456.
References
----------
.. [atlas] Ronald C. Read and Robin J. Wilson,
*An Atlas of Graphs*.
Oxford University Press, 1998.
"""
return list(_generate_graphs())
|
mortenm12/P7-CBR-for-Timed-I-O
|
refs/heads/master
|
python_dbm/test.py
|
2
|
import udbm
import unittest
import pdb
class UDBMTest(unittest.TestCase):
def setUp(self):
self.c = udbm.Context(["x", "y", "z"], name = "c")
def test_int_valuation(self):
c = self.c
v = udbm.IntValuation(c)
self.assertRaises(KeyError, lambda :(v["not_in_federation"]))
self.assertRaises(TypeError, v.__setitem__, ("x", 0.1)) # too bad we aren't using python 2.7 where it's possible to use "with self.assertRaises"
v["x"] = 1
v["y"] = 1
v["z"] = 1
self.assertTrue(( (c.x == 1) & (c.y == 1) & (c.z == 1)).contains(v))
self.assertFalse(((c.x == 2) & (c.y == 1) & (c.z == 1)).contains(v))
def test_float_valuation(self):
c = self.c
v = udbm.FloatValuation(c)
self.assertRaises(KeyError, lambda :(v["not_in_federation"]))
v["x"] = 1.0
v["y"] = 1.01
v["z"] = 1
self.assertFalse(( (c.x == 1) & (c.y == 1) & (c.z == 1)).contains(v))
self.assertTrue(((c.x == 1) & (c.y < 2) & (c.y > 1) & (c.z == 1)).contains(v))
def test_set_operations(self):
c = self.c
self.assertTrue( (c.x == 1) == (c.x >= 1) & (c.x <= 1))
self.assertFalse( (c.x == 1) == (c.x >= 1) & (c.x < 1))
self.assertTrue( (c.x != 1) == ((c.x > 1) | (c.x < 1)))
self.assertFalse( (c.x != 1) == ((c.x > 1) | (c.x <= 1)))
self.assertTrue( (c.x == 1) & (c.y == 1) == (c.y == 1) & (c.x == 1))
self.assertTrue( (c.x == 1) | (c.y == 1) == (c.y == 1) | (c.x == 1))
self.assertFalse( (c.x == 1) | (c.y == 1) != (c.y == 1) | (c.x == 1))
self.assertFalse( (c.x == 1) & (c.y == 1) != (c.y == 1) & (c.x == 1))
self.assertTrue( (c.x == 1) & (c.y == 1) != (c.y == 1) | (c.x == 1))
self.assertTrue( (c.x == 1) & ((c.y == 1) | (c.z ==1)) == (c.x == 1) & (c.y == 1) |(c.x == 1) & (c.z ==1) )
self.assertFalse( (c.x == 1) & ((c.y == 1) | (c.z ==1)) == (c.x == 1) & (c.y == 1) |(c.x == 1) )
self.assertTrue( (c.x - c.y <= 1) == (c.y - c.x >= -1))
self.assertFalse( (c.x - c.y <= 1) == (c.y - c.x > -1))
self.assertTrue( ((c.x - c.y == 1) & (c.x == 1) ) == ((c.x == 1) & (c.y == 0)) )
self.assertTrue( (c.x - c.y != 1) == ((c.x - c.y > 1) | (c.x - c.y < 1)) )
def test_zero(self):
c = self.c
self.assertFalse( (c.x == 1).hasZero())
self.assertFalse( (c.x > 1).hasZero())
self.assertTrue( (c.x < 1).hasZero())
self.assertTrue( ((c.x == 1) & (c.z == 2)).setZero() == ((c.x == 0 ) & (c.z == 0) & (c.y ==0) ))
self.assertTrue( ((c.x == 1) & (c.z == 2)).setZero().hasZero())
def test_update_clocks(self):
c = self.c
self.assertTrue( ((c.x == 1) | (c.z == 2)).updateValue(c.x, 2) == (c.x == 2 ) )
self.assertTrue( ((c.x == 1) & (c.z == 2)).resetValue(c.x) == ((c.x == 0 ) & (c.z == 2) ) )
self.assertTrue( ((c.x == 1) & (c.x - c.y == 0)).updateValue(c.x, 2) == ((c.x == 2) & (c.y == 1) ) )
def test_str(self):
c = self.c
self.assertTrue(str((c.x == 1) & (c.y == 1)) == "(c.x==1 & c.x==c.y & c.y==1)")
def test_copy(self):
c = self.c
a = ((c.x - c.y)==1)
b = a.copy()
d = b.copy()
self.assertTrue( a == b)
b &= (c.z == 1)
d |= (c.z == 1)
self.assertFalse( a == b)
self.assertFalse( d == b)
def test_reduce(self):
c = self.c
a = (c.x >= 1) | (c.x <= 1)
self.assertTrue(a.getSize() == 2)
a.reduce()
self.assertTrue(a.getSize() == 1)
def test_convex_hull(self):
c = self.c
d1 = (c.x >= 1) & (c.x <=2) & (c.y>=1) & (c.y <=2)
d2 = (c.x >= 3) & (c.x <=4) & (c.y>=3) & (c.y <=4)
d3 = (c.x - c.y <= 1) & (c.y - c.x <= 1) & (c.x >= 1) & (c.y >= 1) & (c.x <= 4) & (c.y <= 4)
self.assertTrue((d1 + d2) == d3)
self.assertTrue((d1 | d2).convexHull() == d3)
d1 += d2
self.assertTrue(d1 == d3)
def test_sub(self):
c = self.c
d1 = (c.x >= 1) & (c.x <=2) & (c.y>=1) & (c.y <=2)
d2 = (c.x >= 3) & (c.x <=4) & (c.y>=3) & (c.y <=4)
d3 = d1 | d2
self.assertTrue(d3 - d1 == d2)
d3 -= d2
self.assertTrue(d3 == d1)
def test_up_down(self):
c = udbm.Context(["x", "y"]) # we need only two variables here
d1 = (c.x >= 1) & (c.x <=2) & (c.y>=1) & (c.y <=2)
d2 = (c.x - c.y <= 1) & (c.y - c.x <= 1) & (c.x >= 1) & (c.y >= 1)
d3 = (c.x - c.y <= 1) & (c.y - c.x <= 1) & (c.x <= 2) & (c.y <= 2)
self.assertTrue(d1.up() == d2)
self.assertTrue(d1.down() == d3)
def test_isnt_mutable(self):
c = self.c
d1 = (c.x - c.y <= 1) & (c.y - c.x <= 1) & (c.x >= 1) & (c.y >= 1) & (c.y <= 4) # some random
d2 = d1.copy()
d2.up()
self.assertTrue(d1 == d2)
d2.down()
self.assertTrue(d1 == d2)
d2.down()
self.assertTrue(d1 == d2)
d2.freeClock(c.x)
self.assertTrue(d1 == d2)
d2.convexHull()
self.assertTrue(d1 == d2)
d2.predt(d2)
self.assertTrue(d1 == d2)
d2.resetValue(c.x)
self.assertTrue(d1 == d2)
def test_set_init(self):
c = self.c
d = (c.x - c.y <= 1) & (c.y - c.x <= 1) & (c.x >= 1) & (c.y >= 1) & (c.y <= 4) # some random
d.setInit()
self.assertTrue(d == ((c.x >= 0) & (c.y >= 0) & (c.z >= 0)))
self.assertTrue(d != ((c.x >= 1) & (c.y >= 0) & (c.z >= 0)))
def test_federation_ops(self):
c = self.c
d1 = (c.x - c.y <= 1) & (c.y - c.x <= 1) & (c.x >= 1) & (c.y >= 1) & (c.y <= 4) # some random
d2 = (c.x - c.y <= 1) & (c.y - c.x <= 1) & (c.x >= 1)
self.assertTrue(d1 <= d2)
self.assertTrue(d1 < d2)
self.assertTrue(d2 >= d1)
self.assertTrue(d2 > d1)
self.assertFalse(d1 == d2)
self.assertTrue(d1 != d2)
def test_intern(self):
c = self.c
d = (c["x"] - c["y"] <= 1)
d.intern()
def testExtrapolateMaxBounds(self):
c = self.c
v = (c.x - c.y <= 1) & (c.x < 150) & (c.z < 150) & (c.x - c.z <= 1000)
a = {c.x: 100, c.y:300, c.z:400}
self.assertTrue(v.extrapolateMaxBounds(a) == ((c.x-c.y<=1) & (c.z<150)))
def test_free_clock(self):
c = self.c
self.assertTrue(((c.x >= 10) & (c.y >= 10)).freeClock(c.x) == (c.y >= 10))
def test_zero_federation(self):
c = self.c
self.assertTrue(c.getZeroFederation().isZero())
self.assertTrue(c.getZeroFederation().hasZero())
self.assertTrue(udbm.Federation(c).isZero())
self.assertFalse((c.x==1).isZero())
self.assertFalse((c.x==1) == c.getZeroFederation())
def test_hash(self):
c = self.c
self.assertTrue((c.x==1).hash() == (c.x==1).hash())
self.assertFalse((c.y==1).hash() == (c.x==1).hash())
self.assertTrue(((c.x==1) | (c.y == 1)).hash() == ((c.y == 1) | (c.x==1)).hash())
def test_isempty(self):
c = self.c
self.assertTrue(((c.x==1) & (c.x !=1)).isEmpty())
self.assertFalse(((c.x==1) | (c.x !=1)).isEmpty())
self.assertFalse((c.x==1).isEmpty())
self.assertFalse(((c.x==1) & (c.y !=1)).isEmpty())
self.assertTrue( (((c.x==1) & (c.x !=1)) | ((c.y==1) & (c.y !=1))).isEmpty())
def test_tautology(self):
c = self.c
self.assertTrue(c.getTautologyFederation() == c.getTautologyFederation())
a = c.getTautologyFederation()
a &= (c.x == 1) # checking that we don't affect tautology federation
self.assertTrue(c.getTautologyFederation() > (c.x == 1))
self.assertTrue((c.getTautologyFederation() & (c.x == 1)) == (c.x == 1))
c.addClockByName('xx')
assert((c.x <=2) & c.getTautologyFederation() == (c.x <= 2))
assert(c.getTautologyFederation() & (c.x <=2) == (c.x <= 2))
def test_context_items(self):
c = self.c
assert(len(c.items()) == 3)
assert(('x', c.x) in c.items())
assert(('y', c.y) in c.items())
assert(('z', c.z) in c.items())
def test_tighten_relax(self):
c = self.c
a = ((c.x >= 1 ) & (c.x < 2) & (c.y >3) & (c.y<=4)) | (c.z == 4)
self.assertTrue(a.tightenUp() == (c.x >= 1 ) & (c.x < 2) & (c.y >3) & (c.y<4))
self.assertTrue(a.tightenDown() == (c.z > 0) & (c.x > 1 ) & (c.x < 2) & (c.y >3) & (c.y<=4))
self.assertTrue(a.relaxDown() == ((c.x >= 1 ) & (c.x < 2) & (c.y >= 3) & (c.y<=4) | (c.z == 4)))
# self.assertTrue(a.relaxUp() == ((c.x >= 1 ) & (c.x <= 2) & (c.y >3) & (c.y<=4) | (c.z == 4))) # TODO add test here
self.assertTrue(a.relaxAll() == (c.x >= 1 ) & (c.x <= 2) & (c.y >= 3) & (c.y<=4) | (c.z == 4))
def test_invert(self):
c = self.c
assert(~(c.x>=1) == (c.x<1))
assert(~~(c.x>=1) == (c.x>=1))
assert( ~((c.x>=1) & (c.y <= 4)) == ((c.x< 1) | (c.y > 4)))
assert(~~((c.x>=1) & (c.y <= 4)) == ((c.x>=1) & (c.y <= 4)))
def test_empty(self):
c = self.c
assert( ((c.x >=1) | c.getEmptyFederation()) == (c.x >=1))
assert( ((c.x >=1) & c.getEmptyFederation()) != (c.x >=1))
assert( ((c.x >=1) & c.getEmptyFederation()) == c.getEmptyFederation())
def test_fed_clockaccess(self):
c = self.c
f = c.getZeroFederation()
self.assertTrue( (f.x == 1) == (f.x >= 1) & (f.x <= 1))
self.assertFalse( (f.x == 1) == (f.x >= 1) & (f.x < 1))
self.assertTrue( (f.x != 1) == ((f.x > 1) | (f.x < 1)))
self.assertFalse( (f.x != 1) == ((f.x > 1) | (f.x <= 1)))
self.assertTrue( (f.x == 1) & (f.y == 1) == (f.y == 1) & (f.x == 1))
self.assertTrue( (f.x == 1) | (f.y == 1) == (f.y == 1) | (f.x == 1))
self.assertFalse( (f.x == 1) | (f.y == 1) != (f.y == 1) | (f.x == 1))
self.assertFalse( (f.x == 1) & (f.y == 1) != (f.y == 1) & (f.x == 1))
self.assertTrue( (f.x == 1) & (f.y == 1) != (f.y == 1) | (f.x == 1))
self.assertTrue( (f.x == 1) & ((f.y == 1) | (f.z ==1)) == (f.x == 1) & (f.y == 1) |(f.x == 1) & (f.z ==1) )
self.assertFalse( (f.x == 1) & ((f.y == 1) | (f.z ==1)) == (f.x == 1) & (f.y == 1) |(f.x == 1) )
self.assertTrue( (f.x - f.y <= 1) == (f.y - f.x >= -1))
self.assertFalse( (f.x - f.y <= 1) == (f.y - f.x > -1))
self.assertTrue( ((f.x - f.y == 1) & (f.x == 1) ) == ((f.x == 1) & (f.y == 0)) )
self.assertTrue( (f.x - f.y != 1) == ((f.x - f.y > 1) | (f.x - f.y < 1)) )
def test_clock_name(self):
c = self.c
self.assertTrue(c.hasClockByName('x'))
self.assertFalse(c.hasClockByName('w'))
self.assertTrue(str(c.x) == 'c.x')
def test_depend(self):
c = self.c
(c.x > 1).depends(c.x)
(c.x > 2).depends(c.x)
(c.x > 3).depends(c.x)
self.assertTrue((c.x > 1).depends(c.x))
self.assertFalse((c.x > 1).depends(c.y))
self.assertFalse(((c.x > 1)|(c.x <=1)).reduce().depends(c.x))
self.assertTrue(((c.x > 2)|(c.x <=1)).reduce().depends(c.x))
self.assertTrue(((c.x > 1) & (c.y > 1)).reduce().depends(c.y))
self.assertTrue(((c.x > 1) & (c.y > 1)).reduce().depends(c.x))
self.assertTrue(((c.x > 1) | (c.y > 1)).reduce().depends(c.y))
self.assertTrue(((c.x > 1) | (c.y > 1)).reduce().depends(c.x))
#def test_pointer_fun(self):
# c = self.c
# f = c.getZeroFederation()
# a = str(f._fed)
# print a
# #import pdb; pdb.set_trace()
# address = int(a.split('at 0x')[1][:-3], 16)
# print address
# f2 = udbm.udbm_int.fed_t_pointer_to_Federation(address)
# self.assertEqual(f, f2)
def test_updateIncrement(self):
c = self.c
a = (c.y == 5)
b = a & (c.x - c.y == 0)
self.assertEqual(b.updateIncrement(c.x, 1), (c.x == 6) & (c.x-c.y == 1) & (c.y == 5))
self.assertEqual(b, (c.x == 5) & (c.x-c.y == 0) & (c.y == 5))
b.updateIncrementInPlace(c.x, 1)
self.assertEqual(b, (c.x == 6) & (c.x-c.y == 1) & (c.y == 5))
if __name__ == '__main__':
unittest.main()
|
flyfei/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Tools/msi/uisample.py
|
89
|
import msilib,os;dirname=os.path.dirname(__file__)
AdminExecuteSequence = [
(u'InstallValidate', None, 1400),
(u'InstallInitialize', None, 1500),
(u'InstallFinalize', None, 6600),
(u'InstallFiles', None, 4000),
(u'InstallAdminPackage', None, 3900),
(u'FileCost', None, 900),
(u'CostInitialize', None, 800),
(u'CostFinalize', None, 1000),
]
AdminUISequence = [
(u'AdminWelcomeDlg', None, 1230),
(u'FileCost', None, 900),
(u'CostInitialize', None, 800),
(u'CostFinalize', None, 1000),
(u'ExecuteAction', None, 1300),
(u'ExitDialog', None, -1),
(u'FatalError', None, -3),
(u'PrepareDlg', None, 140),
(u'ProgressDlg', None, 1280),
(u'UserExit', None, -2),
]
AdvtExecuteSequence = [
(u'InstallValidate', None, 1400),
(u'InstallInitialize', None, 1500),
(u'InstallFinalize', None, 6600),
(u'CostInitialize', None, 800),
(u'CostFinalize', None, 1000),
(u'CreateShortcuts', None, 4500),
(u'PublishComponents', None, 6200),
(u'PublishFeatures', None, 6300),
(u'PublishProduct', None, 6400),
(u'RegisterClassInfo', None, 4600),
(u'RegisterExtensionInfo', None, 4700),
(u'RegisterMIMEInfo', None, 4900),
(u'RegisterProgIdInfo', None, 4800),
]
BBControl = [
]
Billboard = [
]
Binary = [
(u'bannrbmp', msilib.Binary(os.path.join(dirname,"bannrbmp.bin"))),
(u'completi', msilib.Binary(os.path.join(dirname,"completi.bin"))),
(u'custicon', msilib.Binary(os.path.join(dirname,"custicon.bin"))),
(u'dlgbmp', msilib.Binary(os.path.join(dirname,"dlgbmp.bin"))),
(u'exclamic', msilib.Binary(os.path.join(dirname,"exclamic.bin"))),
(u'info', msilib.Binary(os.path.join(dirname,"info.bin"))),
(u'insticon', msilib.Binary(os.path.join(dirname,"insticon.bin"))),
(u'New', msilib.Binary(os.path.join(dirname,"New.bin"))),
(u'removico', msilib.Binary(os.path.join(dirname,"removico.bin"))),
(u'repairic', msilib.Binary(os.path.join(dirname,"repairic.bin"))),
(u'Up', msilib.Binary(os.path.join(dirname,"Up.bin"))),
]
CheckBox = [
]
Property = [
(u'BannerBitmap', u'bannrbmp'),
(u'IAgree', u'No'),
(u'ProductID', u'none'),
(u'ARPHELPLINK', u'http://www.microsoft.com/management'),
(u'ButtonText_Back', u'< &Back'),
(u'ButtonText_Browse', u'Br&owse'),
(u'ButtonText_Cancel', u'Cancel'),
(u'ButtonText_Exit', u'&Exit'),
(u'ButtonText_Finish', u'&Finish'),
(u'ButtonText_Ignore', u'&Ignore'),
(u'ButtonText_Install', u'&Install'),
(u'ButtonText_Next', u'&Next >'),
(u'ButtonText_No', u'&No'),
(u'ButtonText_OK', u'OK'),
(u'ButtonText_Remove', u'&Remove'),
(u'ButtonText_Repair', u'&Repair'),
(u'ButtonText_Reset', u'&Reset'),
(u'ButtonText_Resume', u'&Resume'),
(u'ButtonText_Retry', u'&Retry'),
(u'ButtonText_Return', u'&Return'),
(u'ButtonText_Yes', u'&Yes'),
(u'CompleteSetupIcon', u'completi'),
(u'ComponentDownload', u'ftp://anonymous@microsoft.com/components/'),
(u'CustomSetupIcon', u'custicon'),
(u'DefaultUIFont', u'DlgFont8'),
(u'DialogBitmap', u'dlgbmp'),
(u'DlgTitleFont', u'{&DlgFontBold8}'),
(u'ErrorDialog', u'ErrorDlg'),
(u'ExclamationIcon', u'exclamic'),
(u'InfoIcon', u'info'),
(u'InstallerIcon', u'insticon'),
(u'INSTALLLEVEL', u'3'),
(u'InstallMode', u'Typical'),
(u'PIDTemplate', u'12345<###-%%%%%%%>@@@@@'),
#(u'ProductLanguage', u'1033'),
(u'Progress1', u'Installing'),
(u'Progress2', u'installs'),
(u'PROMPTROLLBACKCOST', u'P'),
(u'RemoveIcon', u'removico'),
(u'RepairIcon', u'repairic'),
(u'Setup', u'Setup'),
(u'ShowUserRegistrationDlg', u'1'),
(u'Wizard', u'Setup Wizard'),
]
ComboBox = [
]
Control = [
(u'AdminWelcomeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'AdminWelcomeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'AdminWelcomeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'AdminWelcomeDlg', u'Description', u'Text', 135, 70, 220, 30, 196611, None, u'The [Wizard] will create a server image of [ProductName], at a specified network location. Click Next to continue or Cancel to exit the [Wizard].', None, None),
(u'AdminWelcomeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None),
(u'AdminWelcomeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None),
(u'AdminWelcomeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'ExitDialog', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'ExitDialog', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'ExitDialog', u'Cancel', u'PushButton', 304, 243, 56, 17, 1, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'ExitDialog', u'Description', u'Text', 135, 70, 220, 20, 196611, None, u'Click the Finish button to exit the [Wizard].', None, None),
(u'ExitDialog', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Completing the [ProductName] [Wizard]', None, None),
(u'ExitDialog', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Finish', None),
(u'ExitDialog', u'Finish', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Finish]', u'Cancel', None),
(u'FatalError', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'FatalError', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'FatalError', u'Cancel', u'PushButton', 304, 243, 56, 17, 1, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'FatalError', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}[ProductName] [Wizard] ended prematurely', None, None),
(u'FatalError', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Finish', None),
(u'FatalError', u'Finish', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Finish]', u'Cancel', None),
(u'FatalError', u'Description1', u'Text', 135, 70, 220, 40, 196611, None, u'[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.', None, None),
(u'FatalError', u'Description2', u'Text', 135, 115, 220, 20, 196611, None, u'Click the Finish button to exit the [Wizard].', None, None),
(u'PrepareDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Cancel', None),
(u'PrepareDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'PrepareDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'PrepareDlg', u'Description', u'Text', 135, 70, 220, 20, 196611, None, u'Please wait while the [Wizard] prepares to guide you through the installation.', None, None),
(u'PrepareDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None),
(u'PrepareDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', None, None),
(u'PrepareDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', None, None),
(u'PrepareDlg', u'ActionData', u'Text', 135, 125, 220, 30, 196611, None, None, None, None),
(u'PrepareDlg', u'ActionText', u'Text', 135, 100, 220, 20, 196611, None, None, None, None),
(u'ProgressDlg', u'Text', u'Text', 35, 65, 300, 20, 3, None, u'Please wait while the [Wizard] [Progress2] [ProductName]. This may take several minutes.', None, None),
(u'ProgressDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None),
(u'ProgressDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'ProgressDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'ProgressDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'ProgressDlg', u'Title', u'Text', 20, 15, 200, 15, 196611, None, u'[DlgTitleFont][Progress1] [ProductName]', None, None),
(u'ProgressDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None),
(u'ProgressDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', u'Cancel', None),
(u'ProgressDlg', u'ActionText', u'Text', 70, 100, 265, 10, 3, None, None, None, None),
(u'ProgressDlg', u'ProgressBar', u'ProgressBar', 35, 115, 300, 10, 65537, None, u'Progress done', None, None),
(u'ProgressDlg', u'StatusLabel', u'Text', 35, 100, 35, 10, 3, None, u'Status:', None, None),
(u'UserExit', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'UserExit', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'UserExit', u'Cancel', u'PushButton', 304, 243, 56, 17, 1, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'UserExit', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}[ProductName] [Wizard] was interrupted', None, None),
(u'UserExit', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Finish', None),
(u'UserExit', u'Finish', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Finish]', u'Cancel', None),
(u'UserExit', u'Description1', u'Text', 135, 70, 220, 40, 196611, None, u'[ProductName] setup was interrupted. Your system has not been modified. To install this program at a later time, please run the installation again.', None, None),
(u'UserExit', u'Description2', u'Text', 135, 115, 220, 20, 196611, None, u'Click the Finish button to exit the [Wizard].', None, None),
(u'AdminBrowseDlg', u'Up', u'PushButton', 298, 55, 19, 19, 3670019, None, u'Up', u'NewFolder', u'Up One Level|'),
(u'AdminBrowseDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'PathEdit', None),
(u'AdminBrowseDlg', u'PathEdit', u'PathEdit', 84, 202, 261, 17, 3, u'TARGETDIR', None, u'OK', None),
(u'AdminBrowseDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'AdminBrowseDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'AdminBrowseDlg', u'Cancel', u'PushButton', 240, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'ComboLabel', None),
(u'AdminBrowseDlg', u'ComboLabel', u'Text', 25, 58, 44, 10, 3, None, u'&Look in:', u'DirectoryCombo', None),
(u'AdminBrowseDlg', u'DirectoryCombo', u'DirectoryCombo', 70, 55, 220, 80, 458755, u'TARGETDIR', None, u'Up', None),
(u'AdminBrowseDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Browse to the destination folder', None, None),
(u'AdminBrowseDlg', u'DirectoryList', u'DirectoryList', 25, 83, 320, 110, 7, u'TARGETDIR', None, u'PathLabel', None),
(u'AdminBrowseDlg', u'PathLabel', u'Text', 25, 205, 59, 10, 3, None, u'&Folder name:', u'BannerBitmap', None),
(u'AdminBrowseDlg', u'NewFolder', u'PushButton', 325, 55, 19, 19, 3670019, None, u'New', u'DirectoryList', u'Create A New Folder|'),
(u'AdminBrowseDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'Cancel', None),
(u'AdminBrowseDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Change current destination folder', None, None),
(u'AdminInstallPointDlg', u'Text', u'Text', 25, 80, 320, 10, 3, None, u'&Enter a new network location or click Browse to browse to one.', u'PathEdit', None),
(u'AdminInstallPointDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Text', None),
(u'AdminInstallPointDlg', u'PathEdit', u'PathEdit', 25, 93, 320, 18, 3, u'TARGETDIR', None, u'Browse', None),
(u'AdminInstallPointDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'AdminInstallPointDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'AdminInstallPointDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'AdminInstallPointDlg', u'Description', u'Text', 25, 20, 280, 20, 196611, None, u'Please specify a network location for the server image of [ProductName] product', None, None),
(u'AdminInstallPointDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Network Location', None, None),
(u'AdminInstallPointDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None),
(u'AdminInstallPointDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'AdminInstallPointDlg', u'Browse', u'PushButton', 289, 119, 56, 17, 3, None, u'[ButtonText_Browse]', u'Back', None),
(u'AdminRegistrationDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'OrganizationLabel', None),
(u'AdminRegistrationDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'AdminRegistrationDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'AdminRegistrationDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'AdminRegistrationDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Please enter your company information', None, None),
(u'AdminRegistrationDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Company Information', None, None),
(u'AdminRegistrationDlg', u'Back', u'PushButton', 180, 243, 56, 17, 65539, None, u'[ButtonText_Back]', u'Next', None),
(u'AdminRegistrationDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'AdminRegistrationDlg', u'OrganizationLabel', u'Text', 45, 71, 285, 30, 3, None, u'&Please enter the name of your organization in the box below. This will be used as default company name for subsequent installations of [ProductName]:', u'OrganizationEdit', None),
(u'AdminRegistrationDlg', u'CDKeyEdit', u'MaskedEdit', 45, 143, 250, 16, 3, u'PIDKEY', u'[PIDTemplate]', u'Back', None),
(u'AdminRegistrationDlg', u'CDKeyLabel', u'Text', 45, 130, 50, 10, 3, None, u'CD &Key:', u'CDKeyEdit', None),
(u'AdminRegistrationDlg', u'OrganizationEdit', u'Edit', 45, 105, 220, 18, 3, u'COMPANYNAME', u'{80}', u'CDKeyLabel', None),
(u'BrowseDlg', u'Up', u'PushButton', 298, 55, 19, 19, 3670019, None, u'Up', u'NewFolder', u'Up One Level|'),
(u'BrowseDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'PathEdit', None),
(u'BrowseDlg', u'PathEdit', u'PathEdit', 84, 202, 261, 18, 11, u'_BrowseProperty', None, u'OK', None),
(u'BrowseDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'BrowseDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'BrowseDlg', u'Cancel', u'PushButton', 240, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'ComboLabel', None),
(u'BrowseDlg', u'ComboLabel', u'Text', 25, 58, 44, 10, 3, None, u'&Look in:', u'DirectoryCombo', None),
(u'BrowseDlg', u'DirectoryCombo', u'DirectoryCombo', 70, 55, 220, 80, 393227, u'_BrowseProperty', None, u'Up', None),
(u'BrowseDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Browse to the destination folder', None, None),
(u'BrowseDlg', u'DirectoryList', u'DirectoryList', 25, 83, 320, 110, 15, u'_BrowseProperty', None, u'PathLabel', None),
(u'BrowseDlg', u'PathLabel', u'Text', 25, 205, 59, 10, 3, None, u'&Folder name:', u'BannerBitmap', None),
(u'BrowseDlg', u'NewFolder', u'PushButton', 325, 55, 19, 19, 3670019, None, u'New', u'DirectoryList', u'Create A New Folder|'),
(u'BrowseDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'Cancel', None),
(u'BrowseDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Change current destination folder', None, None),
(u'CancelDlg', u'Text', u'Text', 48, 15, 194, 30, 3, None, u'Are you sure you want to cancel [ProductName] installation?', None, None),
(u'CancelDlg', u'Icon', u'Icon', 15, 15, 24, 24, 5242881, None, u'[InfoIcon]', None, u'Information icon|'),
(u'CancelDlg', u'No', u'PushButton', 132, 57, 56, 17, 3, None, u'[ButtonText_No]', u'Yes', None),
(u'CancelDlg', u'Yes', u'PushButton', 72, 57, 56, 17, 3, None, u'[ButtonText_Yes]', u'No', None),
(u'CustomizeDlg', u'Text', u'Text', 25, 55, 320, 20, 3, None, u'Click on the icons in the tree below to change the way features will be installed.', None, None),
(u'CustomizeDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Tree', None),
(u'CustomizeDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'CustomizeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'CustomizeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'CustomizeDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Select the way you want features to be installed.', None, None),
(u'CustomizeDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Custom Setup', None, None),
(u'CustomizeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None),
(u'CustomizeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'CustomizeDlg', u'Browse', u'PushButton', 304, 200, 56, 17, 3, None, u'[ButtonText_Browse]', u'Reset', None),
(u'CustomizeDlg', u'Tree', u'SelectionTree', 25, 85, 175, 95, 7, u'_BrowseProperty', u'Tree of selections', u'Browse', None),
(u'CustomizeDlg', u'Box', u'GroupBox', 210, 81, 140, 98, 1, None, None, None, None),
(u'CustomizeDlg', u'Reset', u'PushButton', 42, 243, 56, 17, 3, None, u'[ButtonText_Reset]', u'DiskCost', None),
(u'CustomizeDlg', u'DiskCost', u'PushButton', 111, 243, 56, 17, 3, None, u'Disk &Usage', u'Back', None),
(u'CustomizeDlg', u'ItemDescription', u'Text', 215, 90, 131, 30, 3, None, u'Multiline description of the currently selected item.', None, None),
(u'CustomizeDlg', u'ItemSize', u'Text', 215, 130, 131, 45, 3, None, u'The size of the currently selected item.', None, None),
(u'CustomizeDlg', u'Location', u'Text', 75, 200, 215, 20, 3, None, u"<The selection's path>", None, None),
(u'CustomizeDlg', u'LocationLabel', u'Text', 25, 200, 50, 10, 3, None, u'Location:', None, None),
(u'DiskCostDlg', u'Text', u'Text', 20, 53, 330, 40, 3, None, u'The highlighted volumes (if any) do not have enough disk space available for the currently selected features. You can either remove some files from the highlighted volumes, or choose to install less features onto local drive(s), or select different destination drive(s).', None, None),
(u'DiskCostDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'OK', None),
(u'DiskCostDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'DiskCostDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'DiskCostDlg', u'Description', u'Text', 20, 20, 280, 20, 196611, None, u'The disk space required for the installation of the selected features.', None, None),
(u'DiskCostDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'BannerBitmap', None),
(u'DiskCostDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Disk Space Requirements', None, None),
(u'DiskCostDlg', u'VolumeList', u'VolumeCostList', 20, 100, 330, 120, 393223, None, u'{120}{70}{70}{70}{70}', None, None),
(u'ErrorDlg', u'Y', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Yes]', None, None),
(u'ErrorDlg', u'A', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Cancel]', None, None),
(u'ErrorDlg', u'C', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Cancel]', None, None),
(u'ErrorDlg', u'ErrorIcon', u'Icon', 15, 15, 24, 24, 5242881, None, u'[InfoIcon]', None, u'Information icon|'),
(u'ErrorDlg', u'ErrorText', u'Text', 48, 15, 205, 60, 3, None, u'Information text', None, None),
(u'ErrorDlg', u'I', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Ignore]', None, None),
(u'ErrorDlg', u'N', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_No]', None, None),
(u'ErrorDlg', u'O', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_OK]', None, None),
(u'ErrorDlg', u'R', u'PushButton', 100, 80, 56, 17, 3, None, u'[ButtonText_Retry]', None, None),
(u'FilesInUse', u'Text', u'Text', 20, 55, 330, 30, 3, None, u'The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.', None, None),
(u'FilesInUse', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Retry', None),
(u'FilesInUse', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'FilesInUse', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'FilesInUse', u'Description', u'Text', 20, 23, 280, 20, 196611, None, u'Some files that need to be updated are currently in use.', None, None),
(u'FilesInUse', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Files in Use', None, None),
(u'FilesInUse', u'Retry', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Retry]', u'Ignore', None),
(u'FilesInUse', u'Exit', u'PushButton', 166, 243, 56, 17, 3, None, u'[ButtonText_Exit]', u'BannerBitmap', None),
(u'FilesInUse', u'Ignore', u'PushButton', 235, 243, 56, 17, 3, None, u'[ButtonText_Ignore]', u'Exit', None),
(u'FilesInUse', u'List', u'ListBox', 20, 87, 330, 130, 7, u'FileInUseProcess', None, None, None),
(u'LicenseAgreementDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'AgreementText', None),
(u'LicenseAgreementDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'LicenseAgreementDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'LicenseAgreementDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'LicenseAgreementDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Please read the following license agreement carefully', None, None),
(u'LicenseAgreementDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]End-User License Agreement', None, None),
(u'LicenseAgreementDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None),
(u'LicenseAgreementDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'LicenseAgreementDlg', u'AgreementText', u'ScrollableText', 20, 60, 330, 120, 7, None, u'{\\rtf1\\ansi\\ansicpg1252\\deff0\\deftab720{\\fonttbl{\\f0\\froman\\fprq2 Times New Roman;}}{\\colortbl\\red0\\green0\\blue0;} \\deflang1033\\horzdoc{\\*\\fchars }{\\*\\lchars }\\pard\\plain\\f0\\fs20 <Your license agreement should go here.>\\par }', u'Buttons', None),
(u'LicenseAgreementDlg', u'Buttons', u'RadioButtonGroup', 20, 187, 330, 40, 3, u'IAgree', None, u'Back', None),
(u'MaintenanceTypeDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'ChangeLabel', None),
(u'MaintenanceTypeDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'MaintenanceTypeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'MaintenanceTypeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'MaintenanceTypeDlg', u'Description', u'Text', 25, 23, 280, 20, 196611, None, u'Select the operation you wish to perform.', None, None),
(u'MaintenanceTypeDlg', u'Title', u'Text', 15, 6, 240, 15, 196611, None, u'[DlgTitleFont]Modify, Repair or Remove installation', None, None),
(u'MaintenanceTypeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None),
(u'MaintenanceTypeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', u'Cancel', None),
(u'MaintenanceTypeDlg', u'ChangeLabel', u'Text', 105, 65, 100, 10, 3, None, u'[DlgTitleFont]&Modify', u'ChangeButton', None),
(u'MaintenanceTypeDlg', u'ChangeButton', u'PushButton', 50, 65, 38, 38, 5767171, None, u'[CustomSetupIcon]', u'RepairLabel', u'Modify Installation|'),
(u'MaintenanceTypeDlg', u'RepairLabel', u'Text', 105, 114, 100, 10, 3, None, u'[DlgTitleFont]Re&pair', u'RepairButton', None),
(u'MaintenanceTypeDlg', u'ChangeText', u'Text', 105, 78, 230, 20, 3, None, u'Allows users to change the way features are installed.', None, None),
(u'MaintenanceTypeDlg', u'RemoveButton', u'PushButton', 50, 163, 38, 38, 5767171, None, u'[RemoveIcon]', u'Back', u'Remove Installation|'),
(u'MaintenanceTypeDlg', u'RemoveLabel', u'Text', 105, 163, 100, 10, 3, None, u'[DlgTitleFont]&Remove', u'RemoveButton', None),
(u'MaintenanceTypeDlg', u'RemoveText', u'Text', 105, 176, 230, 20, 3, None, u'Removes [ProductName] from your computer.', None, None),
(u'MaintenanceTypeDlg', u'RepairButton', u'PushButton', 50, 114, 38, 38, 5767171, None, u'[RepairIcon]', u'RemoveLabel', u'Repair Installation|'),
(u'MaintenanceTypeDlg', u'RepairText', u'Text', 105, 127, 230, 30, 3, None, u'Repairs errors in the most recent installation state - fixes missing or corrupt files, shortcuts and registry entries.', None, None),
(u'MaintenanceWelcomeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'MaintenanceWelcomeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'MaintenanceWelcomeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'MaintenanceWelcomeDlg', u'Description', u'Text', 135, 70, 220, 60, 196611, None, u'The [Wizard] will allow you to change the way [ProductName] features are installed on your computer or even to remove [ProductName] from your computer. Click Next to continue or Cancel to exit the [Wizard].', None, None),
(u'MaintenanceWelcomeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None),
(u'MaintenanceWelcomeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None),
(u'MaintenanceWelcomeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'OutOfDiskDlg', u'Text', u'Text', 20, 53, 330, 40, 3, None, u'The highlighted volumes do not have enough disk space available for the currently selected features. You can either remove some files from the highlighted volumes, or choose to install less features onto local drive(s), or select different destination drive(s).', None, None),
(u'OutOfDiskDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'OK', None),
(u'OutOfDiskDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'OutOfDiskDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'OutOfDiskDlg', u'Description', u'Text', 20, 20, 280, 20, 196611, None, u'Disk space required for the installation exceeds available disk space.', None, None),
(u'OutOfDiskDlg', u'OK', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_OK]', u'BannerBitmap', None),
(u'OutOfDiskDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Out of Disk Space', None, None),
(u'OutOfDiskDlg', u'VolumeList', u'VolumeCostList', 20, 100, 330, 120, 393223, None, u'{120}{70}{70}{70}{70}', None, None),
(u'OutOfRbDiskDlg', u'Text', u'Text', 20, 53, 330, 40, 3, None, u'The highlighted volumes do not have enough disk space available for the currently selected features. You can either remove some files from the highlighted volumes, or choose to install less features onto local drive(s), or select different destination drive(s).', None, None),
(u'OutOfRbDiskDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'No', None),
(u'OutOfRbDiskDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'OutOfRbDiskDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'OutOfRbDiskDlg', u'Description', u'Text', 20, 20, 280, 20, 196611, None, u'Disk space required for the installation exceeds available disk space.', None, None),
(u'OutOfRbDiskDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Out of Disk Space', None, None),
(u'OutOfRbDiskDlg', u'No', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_No]', u'Yes', None),
(u'OutOfRbDiskDlg', u'Yes', u'PushButton', 240, 243, 56, 17, 3, None, u'[ButtonText_Yes]', u'BannerBitmap', None),
(u'OutOfRbDiskDlg', u'VolumeList', u'VolumeCostList', 20, 140, 330, 80, 4587527, None, u'{120}{70}{70}{70}{70}', None, None),
(u'OutOfRbDiskDlg', u'Text2', u'Text', 20, 94, 330, 40, 3, None, u"Alternatively, you may choose to disable the installer's rollback functionality. This allows the installer to restore your computer's original state should the installation be interrupted in any way. Click Yes if you wish to take the risk to disable rollback.", None, None),
(u'ResumeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'ResumeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'ResumeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'ResumeDlg', u'Description', u'Text', 135, 70, 220, 30, 196611, None, u'The [Wizard] will complete the installation of [ProductName] on your computer. Click Install to continue or Cancel to exit the [Wizard].', None, None),
(u'ResumeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Resuming the [ProductName] [Wizard]', None, None),
(u'ResumeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Install', None),
(u'ResumeDlg', u'Install', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Install]', u'Cancel', None),
(u'SetupTypeDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'TypicalLabel', None),
(u'SetupTypeDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'SetupTypeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'SetupTypeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'SetupTypeDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Choose the setup type that best suits your needs', None, None),
(u'SetupTypeDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Choose Setup Type', None, None),
(u'SetupTypeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None),
(u'SetupTypeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 1, None, u'[ButtonText_Next]', u'Cancel', None),
(u'SetupTypeDlg', u'TypicalLabel', u'Text', 105, 65, 100, 10, 3, None, u'[DlgTitleFont]&Typical', u'TypicalButton', None),
(u'SetupTypeDlg', u'CompleteButton', u'PushButton', 50, 171, 38, 38, 5767171, None, u'[CompleteSetupIcon]', u'Back', u'Complete Installation|'),
(u'SetupTypeDlg', u'CompleteLabel', u'Text', 105, 171, 100, 10, 3, None, u'[DlgTitleFont]C&omplete', u'CompleteButton', None),
(u'SetupTypeDlg', u'CompleteText', u'Text', 105, 184, 230, 20, 3, None, u'All program features will be installed. (Requires most disk space)', None, None),
(u'SetupTypeDlg', u'CustomButton', u'PushButton', 50, 118, 38, 38, 5767171, None, u'[CustomSetupIcon]', u'CompleteLabel', u'Custom Installation|'),
(u'SetupTypeDlg', u'CustomLabel', u'Text', 105, 118, 100, 10, 3, None, u'[DlgTitleFont]C&ustom', u'CustomButton', None),
(u'SetupTypeDlg', u'CustomText', u'Text', 105, 131, 230, 30, 3, None, u'Allows users to choose which program features will be installed and where they will be installed. Recommended for advanced users.', None, None),
(u'SetupTypeDlg', u'TypicalButton', u'PushButton', 50, 65, 38, 38, 5767171, None, u'[InstallerIcon]', u'CustomLabel', u'Typical Installation|'),
(u'SetupTypeDlg', u'TypicalText', u'Text', 105, 78, 230, 20, 3, None, u'Installs the most common program features. Recommended for most users.', None, None),
(u'UserRegistrationDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'NameLabel', None),
(u'UserRegistrationDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'UserRegistrationDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'UserRegistrationDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'UserRegistrationDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'Please enter your customer information', None, None),
(u'UserRegistrationDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Customer Information', None, None),
(u'UserRegistrationDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Next', None),
(u'UserRegistrationDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
(u'UserRegistrationDlg', u'OrganizationLabel', u'Text', 45, 110, 100, 15, 3, None, u'&Organization:', u'OrganizationEdit', None),
(u'UserRegistrationDlg', u'CDKeyEdit', u'MaskedEdit', 45, 159, 250, 16, 3, u'PIDKEY', u'[PIDTemplate]', u'Back', None),
(u'UserRegistrationDlg', u'CDKeyLabel', u'Text', 45, 147, 50, 10, 3, None, u'CD &Key:', u'CDKeyEdit', None),
(u'UserRegistrationDlg', u'OrganizationEdit', u'Edit', 45, 122, 220, 18, 3, u'COMPANYNAME', u'{80}', u'CDKeyLabel', None),
(u'UserRegistrationDlg', u'NameLabel', u'Text', 45, 73, 100, 15, 3, None, u'&User Name:', u'NameEdit', None),
(u'UserRegistrationDlg', u'NameEdit', u'Edit', 45, 85, 220, 18, 3, u'USERNAME', u'{80}', u'OrganizationLabel', None),
(u'VerifyReadyDlg', u'Text', u'Text', 25, 70, 320, 20, 3, None, u'Click Install to begin the installation. If you want to review or change any of your installation settings, click Back. Click Cancel to exit the wizard.', None, None),
(u'VerifyReadyDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None),
(u'VerifyReadyDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'VerifyReadyDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'VerifyReadyDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'VerifyReadyDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'The [Wizard] is ready to begin the [InstallMode] installation', None, None),
(u'VerifyReadyDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Ready to Install', None, None),
(u'VerifyReadyDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Install', None),
(u'VerifyReadyDlg', u'Install', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Install]', u'Cancel', None),
(u'VerifyRemoveDlg', u'Text', u'Text', 25, 70, 320, 30, 3, None, u'Click Remove to remove [ProductName] from your computer. If you want to review or change any of your installation settings, click Back. Click Cancel to exit the wizard.', None, None),
(u'VerifyRemoveDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None),
(u'VerifyRemoveDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'VerifyRemoveDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'VerifyRemoveDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'VerifyRemoveDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'You have chosen to remove the program from your computer.', None, None),
(u'VerifyRemoveDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Remove [ProductName]', None, None),
(u'VerifyRemoveDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Remove', None),
(u'VerifyRemoveDlg', u'Remove', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Remove]', u'Cancel', None),
(u'VerifyRepairDlg', u'Text', u'Text', 25, 70, 320, 30, 3, None, u'Click Repair to repair the installation of [ProductName]. If you want to review or change any of your installation settings, click Back. Click Cancel to exit the wizard.', None, None),
(u'VerifyRepairDlg', u'BannerBitmap', u'Bitmap', 0, 0, 374, 44, 1, None, u'[BannerBitmap]', u'Back', None),
(u'VerifyRepairDlg', u'BannerLine', u'Line', 0, 44, 374, 0, 1, None, None, None, None),
(u'VerifyRepairDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'VerifyRepairDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'BannerBitmap', None),
(u'VerifyRepairDlg', u'Description', u'Text', 25, 23, 280, 15, 196611, None, u'The [Wizard] is ready to begin the repair of [ProductName].', None, None),
(u'VerifyRepairDlg', u'Title', u'Text', 15, 6, 200, 15, 196611, None, u'[DlgTitleFont]Repair [ProductName]', None, None),
(u'VerifyRepairDlg', u'Back', u'PushButton', 180, 243, 56, 17, 3, None, u'[ButtonText_Back]', u'Repair', None),
(u'VerifyRepairDlg', u'Repair', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Repair]', u'Cancel', None),
(u'WaitForCostingDlg', u'Text', u'Text', 48, 15, 194, 30, 3, None, u'Please wait while the installer finishes determining your disk space requirements.', None, None),
(u'WaitForCostingDlg', u'Icon', u'Icon', 15, 15, 24, 24, 5242881, None, u'[ExclamationIcon]', None, u'Exclamation icon|'),
(u'WaitForCostingDlg', u'Return', u'PushButton', 102, 57, 56, 17, 3, None, u'[ButtonText_Return]', None, None),
(u'WelcomeDlg', u'Bitmap', u'Bitmap', 0, 0, 370, 234, 1, None, u'[DialogBitmap]', u'Back', None),
(u'WelcomeDlg', u'BottomLine', u'Line', 0, 234, 374, 0, 1, None, None, None, None),
(u'WelcomeDlg', u'Cancel', u'PushButton', 304, 243, 56, 17, 3, None, u'[ButtonText_Cancel]', u'Bitmap', None),
(u'WelcomeDlg', u'Description', u'Text', 135, 70, 220, 30, 196611, None, u'The [Wizard] will install [ProductName] on your computer. Click Next to continue or Cancel to exit the [Wizard].', None, None),
(u'WelcomeDlg', u'Title', u'Text', 135, 20, 220, 60, 196611, None, u'{\\VerdanaBold13}Welcome to the [ProductName] [Wizard]', None, None),
(u'WelcomeDlg', u'Back', u'PushButton', 180, 243, 56, 17, 1, None, u'[ButtonText_Back]', u'Next', None),
(u'WelcomeDlg', u'Next', u'PushButton', 236, 243, 56, 17, 3, None, u'[ButtonText_Next]', u'Cancel', None),
]
ListBox = [
]
ActionText = [
(u'InstallValidate', u'Validating install', None),
(u'InstallFiles', u'Copying new files', u'File: [1], Directory: [9], Size: [6]'),
(u'InstallAdminPackage', u'Copying network install files', u'File: [1], Directory: [9], Size: [6]'),
(u'FileCost', u'Computing space requirements', None),
(u'CostInitialize', u'Computing space requirements', None),
(u'CostFinalize', u'Computing space requirements', None),
(u'CreateShortcuts', u'Creating shortcuts', u'Shortcut: [1]'),
(u'PublishComponents', u'Publishing Qualified Components', u'Component ID: [1], Qualifier: [2]'),
(u'PublishFeatures', u'Publishing Product Features', u'Feature: [1]'),
(u'PublishProduct', u'Publishing product information', None),
(u'RegisterClassInfo', u'Registering Class servers', u'Class Id: [1]'),
(u'RegisterExtensionInfo', u'Registering extension servers', u'Extension: [1]'),
(u'RegisterMIMEInfo', u'Registering MIME info', u'MIME Content Type: [1], Extension: [2]'),
(u'RegisterProgIdInfo', u'Registering program identifiers', u'ProgId: [1]'),
(u'AllocateRegistrySpace', u'Allocating registry space', u'Free space: [1]'),
(u'AppSearch', u'Searching for installed applications', u'Property: [1], Signature: [2]'),
(u'BindImage', u'Binding executables', u'File: [1]'),
(u'CCPSearch', u'Searching for qualifying products', None),
(u'CreateFolders', u'Creating folders', u'Folder: [1]'),
(u'DeleteServices', u'Deleting services', u'Service: [1]'),
(u'DuplicateFiles', u'Creating duplicate files', u'File: [1], Directory: [9], Size: [6]'),
(u'FindRelatedProducts', u'Searching for related applications', u'Found application: [1]'),
(u'InstallODBC', u'Installing ODBC components', None),
(u'InstallServices', u'Installing new services', u'Service: [2]'),
(u'LaunchConditions', u'Evaluating launch conditions', None),
(u'MigrateFeatureStates', u'Migrating feature states from related applications', u'Application: [1]'),
(u'MoveFiles', u'Moving files', u'File: [1], Directory: [9], Size: [6]'),
(u'PatchFiles', u'Patching files', u'File: [1], Directory: [2], Size: [3]'),
(u'ProcessComponents', u'Updating component registration', None),
(u'RegisterComPlus', u'Registering COM+ Applications and Components', u'AppId: [1]{{, AppType: [2], Users: [3], RSN: [4]}}'),
(u'RegisterFonts', u'Registering fonts', u'Font: [1]'),
(u'RegisterProduct', u'Registering product', u'[1]'),
(u'RegisterTypeLibraries', u'Registering type libraries', u'LibID: [1]'),
(u'RegisterUser', u'Registering user', u'[1]'),
(u'RemoveDuplicateFiles', u'Removing duplicated files', u'File: [1], Directory: [9]'),
(u'RemoveEnvironmentStrings', u'Updating environment strings', u'Name: [1], Value: [2], Action [3]'),
(u'RemoveExistingProducts', u'Removing applications', u'Application: [1], Command line: [2]'),
(u'RemoveFiles', u'Removing files', u'File: [1], Directory: [9]'),
(u'RemoveFolders', u'Removing folders', u'Folder: [1]'),
(u'RemoveIniValues', u'Removing INI files entries', u'File: [1], Section: [2], Key: [3], Value: [4]'),
(u'RemoveODBC', u'Removing ODBC components', None),
(u'RemoveRegistryValues', u'Removing system registry values', u'Key: [1], Name: [2]'),
(u'RemoveShortcuts', u'Removing shortcuts', u'Shortcut: [1]'),
(u'RMCCPSearch', u'Searching for qualifying products', None),
(u'SelfRegModules', u'Registering modules', u'File: [1], Folder: [2]'),
(u'SelfUnregModules', u'Unregistering modules', u'File: [1], Folder: [2]'),
(u'SetODBCFolders', u'Initializing ODBC directories', None),
(u'StartServices', u'Starting services', u'Service: [1]'),
(u'StopServices', u'Stopping services', u'Service: [1]'),
(u'UnpublishComponents', u'Unpublishing Qualified Components', u'Component ID: [1], Qualifier: [2]'),
(u'UnpublishFeatures', u'Unpublishing Product Features', u'Feature: [1]'),
(u'UnregisterClassInfo', u'Unregister Class servers', u'Class Id: [1]'),
(u'UnregisterComPlus', u'Unregistering COM+ Applications and Components', u'AppId: [1]{{, AppType: [2]}}'),
(u'UnregisterExtensionInfo', u'Unregistering extension servers', u'Extension: [1]'),
(u'UnregisterFonts', u'Unregistering fonts', u'Font: [1]'),
(u'UnregisterMIMEInfo', u'Unregistering MIME info', u'MIME Content Type: [1], Extension: [2]'),
(u'UnregisterProgIdInfo', u'Unregistering program identifiers', u'ProgId: [1]'),
(u'UnregisterTypeLibraries', u'Unregistering type libraries', u'LibID: [1]'),
(u'WriteEnvironmentStrings', u'Updating environment strings', u'Name: [1], Value: [2], Action [3]'),
(u'WriteIniValues', u'Writing INI files values', u'File: [1], Section: [2], Key: [3], Value: [4]'),
(u'WriteRegistryValues', u'Writing system registry values', u'Key: [1], Name: [2], Value: [3]'),
(u'Advertise', u'Advertising application', None),
(u'GenerateScript', u'Generating script operations for action:', u'[1]'),
(u'InstallSFPCatalogFile', u'Installing system catalog', u'File: [1], Dependencies: [2]'),
(u'MsiPublishAssemblies', u'Publishing assembly information', u'Application Context:[1], Assembly Name:[2]'),
(u'MsiUnpublishAssemblies', u'Unpublishing assembly information', u'Application Context:[1], Assembly Name:[2]'),
(u'Rollback', u'Rolling back action:', u'[1]'),
(u'RollbackCleanup', u'Removing backup files', u'File: [1]'),
(u'UnmoveFiles', u'Removing moved files', u'File: [1], Directory: [9]'),
(u'UnpublishProduct', u'Unpublishing product information', None),
]
ControlCondition = [
(u'CustomizeDlg', u'Browse', u'Hide', u'Installed'),
(u'CustomizeDlg', u'Location', u'Hide', u'Installed'),
(u'CustomizeDlg', u'LocationLabel', u'Hide', u'Installed'),
(u'LicenseAgreementDlg', u'Next', u'Disable', u'IAgree <> "Yes"'),
(u'LicenseAgreementDlg', u'Next', u'Enable', u'IAgree = "Yes"'),
]
ControlEvent = [
(u'AdminWelcomeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'AdminWelcomeDlg', u'Next', u'NewDialog', u'AdminRegistrationDlg', u'1', 2),
(u'AdminWelcomeDlg', u'Next', u'[InstallMode]', u'Server Image', u'1', 1),
(u'ExitDialog', u'Finish', u'EndDialog', u'Return', u'1', None),
(u'FatalError', u'Finish', u'EndDialog', u'Exit', u'1', None),
(u'PrepareDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'ProgressDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'UserExit', u'Finish', u'EndDialog', u'Exit', u'1', None),
(u'AdminBrowseDlg', u'Up', u'DirectoryListUp', u'0', u'1', None),
(u'AdminBrowseDlg', u'Cancel', u'Reset', u'0', u'1', 1),
(u'AdminBrowseDlg', u'Cancel', u'EndDialog', u'Return', u'1', 2),
(u'AdminBrowseDlg', u'NewFolder', u'DirectoryListNew', u'0', u'1', None),
(u'AdminBrowseDlg', u'OK', u'EndDialog', u'Return', u'1', 2),
(u'AdminBrowseDlg', u'OK', u'SetTargetPath', u'TARGETDIR', u'1', 1),
(u'AdminInstallPointDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'AdminInstallPointDlg', u'Back', u'NewDialog', u'AdminRegistrationDlg', u'1', None),
(u'AdminInstallPointDlg', u'Next', u'SetTargetPath', u'TARGETDIR', u'1', 1),
(u'AdminInstallPointDlg', u'Next', u'NewDialog', u'VerifyReadyDlg', u'1', 2),
(u'AdminInstallPointDlg', u'Browse', u'SpawnDialog', u'AdminBrowseDlg', u'1', None),
(u'AdminRegistrationDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'AdminRegistrationDlg', u'Back', u'NewDialog', u'AdminWelcomeDlg', u'1', None),
(u'AdminRegistrationDlg', u'Next', u'NewDialog', u'AdminInstallPointDlg', u'ProductID', 2),
(u'AdminRegistrationDlg', u'Next', u'ValidateProductID', u'0', u'0', 1),
(u'BrowseDlg', u'Up', u'DirectoryListUp', u'0', u'1', None),
(u'BrowseDlg', u'Cancel', u'Reset', u'0', u'1', 1),
(u'BrowseDlg', u'Cancel', u'EndDialog', u'Return', u'1', 2),
(u'BrowseDlg', u'NewFolder', u'DirectoryListNew', u'0', u'1', None),
(u'BrowseDlg', u'OK', u'EndDialog', u'Return', u'1', 2),
(u'BrowseDlg', u'OK', u'SetTargetPath', u'[_BrowseProperty]', u'1', 1),
(u'CancelDlg', u'No', u'EndDialog', u'Return', u'1', None),
(u'CancelDlg', u'Yes', u'EndDialog', u'Exit', u'1', None),
(u'CustomizeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'CustomizeDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'InstallMode = "Change"', None),
(u'CustomizeDlg', u'Back', u'NewDialog', u'SetupTypeDlg', u'InstallMode = "Custom"', None),
(u'CustomizeDlg', u'Next', u'NewDialog', u'VerifyReadyDlg', u'1', None),
(u'CustomizeDlg', u'Browse', u'SelectionBrowse', u'BrowseDlg', u'1', None),
(u'CustomizeDlg', u'Reset', u'Reset', u'0', u'1', None),
(u'CustomizeDlg', u'DiskCost', u'SpawnDialog', u'DiskCostDlg', u'1', 2),
(u'DiskCostDlg', u'OK', u'EndDialog', u'Return', u'1', None),
(u'ErrorDlg', u'Y', u'EndDialog', u'ErrorYes', u'1', None),
(u'ErrorDlg', u'A', u'EndDialog', u'ErrorAbort', u'1', None),
(u'ErrorDlg', u'C', u'EndDialog', u'ErrorCancel', u'1', None),
(u'ErrorDlg', u'I', u'EndDialog', u'ErrorIgnore', u'1', None),
(u'ErrorDlg', u'N', u'EndDialog', u'ErrorNo', u'1', None),
(u'ErrorDlg', u'O', u'EndDialog', u'ErrorOk', u'1', None),
(u'ErrorDlg', u'R', u'EndDialog', u'ErrorRetry', u'1', None),
(u'FilesInUse', u'Retry', u'EndDialog', u'Retry', u'1', None),
(u'FilesInUse', u'Exit', u'EndDialog', u'Exit', u'1', None),
(u'FilesInUse', u'Ignore', u'EndDialog', u'Ignore', u'1', None),
(u'LicenseAgreementDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'LicenseAgreementDlg', u'Back', u'NewDialog', u'WelcomeDlg', u'1', None),
(u'LicenseAgreementDlg', u'Next', u'NewDialog', u'SetupTypeDlg', u'IAgree = "Yes" AND ShowUserRegistrationDlg <> 1', 3),
(u'LicenseAgreementDlg', u'Next', u'NewDialog', u'UserRegistrationDlg', u'IAgree = "Yes" AND ShowUserRegistrationDlg = 1', 1),
(u'LicenseAgreementDlg', u'Next', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 2),
(u'MaintenanceTypeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'MaintenanceTypeDlg', u'Back', u'NewDialog', u'MaintenanceWelcomeDlg', u'1', None),
(u'MaintenanceTypeDlg', u'ChangeButton', u'NewDialog', u'CustomizeDlg', u'1', 4),
(u'MaintenanceTypeDlg', u'ChangeButton', u'[InstallMode]', u'Change', u'1', 1),
(u'MaintenanceTypeDlg', u'ChangeButton', u'[Progress1]', u'Changing', u'1', 2),
(u'MaintenanceTypeDlg', u'ChangeButton', u'[Progress2]', u'changes', u'1', 3),
(u'MaintenanceTypeDlg', u'RemoveButton', u'NewDialog', u'VerifyRemoveDlg', u'1', 4),
(u'MaintenanceTypeDlg', u'RemoveButton', u'[InstallMode]', u'Remove', u'1', 1),
(u'MaintenanceTypeDlg', u'RemoveButton', u'[Progress1]', u'Removing', u'1', 2),
(u'MaintenanceTypeDlg', u'RemoveButton', u'[Progress2]', u'removes', u'1', 3),
(u'MaintenanceTypeDlg', u'RepairButton', u'NewDialog', u'VerifyRepairDlg', u'1', 4),
(u'MaintenanceTypeDlg', u'RepairButton', u'[InstallMode]', u'Repair', u'1', 1),
(u'MaintenanceTypeDlg', u'RepairButton', u'[Progress1]', u'Repairing', u'1', 2),
(u'MaintenanceTypeDlg', u'RepairButton', u'[Progress2]', u'repairs', u'1', 3),
(u'MaintenanceWelcomeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'MaintenanceWelcomeDlg', u'Next', u'NewDialog', u'MaintenanceTypeDlg', u'1', 2),
(u'MaintenanceWelcomeDlg', u'Next', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 1),
(u'OutOfDiskDlg', u'OK', u'EndDialog', u'Return', u'1', None),
(u'OutOfRbDiskDlg', u'No', u'EndDialog', u'Return', u'1', None),
(u'OutOfRbDiskDlg', u'Yes', u'EndDialog', u'Return', u'1', 2),
(u'OutOfRbDiskDlg', u'Yes', u'EnableRollback', u'False', u'1', 1),
(u'ResumeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'ResumeDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 4),
(u'ResumeDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 2),
(u'ResumeDlg', u'Install', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 6),
(u'ResumeDlg', u'Install', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 3),
(u'ResumeDlg', u'Install', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 1),
(u'ResumeDlg', u'Install', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 5),
(u'SetupTypeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'SetupTypeDlg', u'Back', u'NewDialog', u'LicenseAgreementDlg', u'ShowUserRegistrationDlg <> 1', None),
(u'SetupTypeDlg', u'Back', u'NewDialog', u'UserRegistrationDlg', u'ShowUserRegistrationDlg = 1', None),
(u'SetupTypeDlg', u'CompleteButton', u'NewDialog', u'VerifyReadyDlg', u'1', 3),
(u'SetupTypeDlg', u'CompleteButton', u'[InstallMode]', u'Complete', u'1', 1),
(u'SetupTypeDlg', u'CompleteButton', u'SetInstallLevel', u'1000', u'1', 2),
(u'SetupTypeDlg', u'CustomButton', u'NewDialog', u'CustomizeDlg', u'1', 2),
(u'SetupTypeDlg', u'CustomButton', u'[InstallMode]', u'Custom', u'1', 1),
(u'SetupTypeDlg', u'TypicalButton', u'NewDialog', u'VerifyReadyDlg', u'1', 3),
(u'SetupTypeDlg', u'TypicalButton', u'[InstallMode]', u'Typical', u'1', 1),
(u'SetupTypeDlg', u'TypicalButton', u'SetInstallLevel', u'3', u'1', 2),
(u'UserRegistrationDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'UserRegistrationDlg', u'Back', u'NewDialog', u'LicenseAgreementDlg', u'1', None),
(u'UserRegistrationDlg', u'Next', u'NewDialog', u'SetupTypeDlg', u'ProductID', 3),
(u'UserRegistrationDlg', u'Next', u'ValidateProductID', u'0', u'0', 1),
(u'UserRegistrationDlg', u'Next', u'SpawnWaitDialog', u'WaitForCostingDlg', u'CostingComplete = 1', 2),
(u'VerifyReadyDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'VerifyReadyDlg', u'Back', u'NewDialog', u'AdminInstallPointDlg', u'InstallMode = "Server Image"', None),
(u'VerifyReadyDlg', u'Back', u'NewDialog', u'CustomizeDlg', u'InstallMode = "Custom" OR InstallMode = "Change"', None),
(u'VerifyReadyDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'InstallMode = "Repair"', None),
(u'VerifyReadyDlg', u'Back', u'NewDialog', u'SetupTypeDlg', u'InstallMode = "Typical" OR InstallMode = "Complete"', None),
(u'VerifyReadyDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 3),
(u'VerifyReadyDlg', u'Install', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 1),
(u'VerifyReadyDlg', u'Install', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 5),
(u'VerifyReadyDlg', u'Install', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 2),
(u'VerifyReadyDlg', u'Install', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 4),
(u'VerifyRemoveDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'VerifyRemoveDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'1', None),
(u'VerifyRemoveDlg', u'Remove', u'Remove', u'All', u'OutOfDiskSpace <> 1', 1),
(u'VerifyRemoveDlg', u'Remove', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 4),
(u'VerifyRemoveDlg', u'Remove', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 2),
(u'VerifyRemoveDlg', u'Remove', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 6),
(u'VerifyRemoveDlg', u'Remove', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 3),
(u'VerifyRemoveDlg', u'Remove', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 5),
(u'VerifyRepairDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'VerifyRepairDlg', u'Back', u'NewDialog', u'MaintenanceTypeDlg', u'1', None),
(u'VerifyRepairDlg', u'Repair', u'EndDialog', u'Return', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 5),
(u'VerifyRepairDlg', u'Repair', u'EndDialog', u'Return', u'OutOfDiskSpace <> 1', 3),
(u'VerifyRepairDlg', u'Repair', u'SpawnDialog', u'OutOfDiskDlg', u'(OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 1) OR (OutOfDiskSpace = 1 AND PROMPTROLLBACKCOST="F")', 7),
(u'VerifyRepairDlg', u'Repair', u'SpawnDialog', u'OutOfRbDiskDlg', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND (PROMPTROLLBACKCOST="P" OR NOT PROMPTROLLBACKCOST)', 4),
(u'VerifyRepairDlg', u'Repair', u'EnableRollback', u'False', u'OutOfDiskSpace = 1 AND OutOfNoRbDiskSpace = 0 AND PROMPTROLLBACKCOST="D"', 6),
(u'VerifyRepairDlg', u'Repair', u'Reinstall', u'All', u'OutOfDiskSpace <> 1', 2),
(u'VerifyRepairDlg', u'Repair', u'ReinstallMode', u'ecmus', u'OutOfDiskSpace <> 1', 1),
(u'WaitForCostingDlg', u'Return', u'EndDialog', u'Exit', u'1', None),
(u'WelcomeDlg', u'Cancel', u'SpawnDialog', u'CancelDlg', u'1', None),
(u'WelcomeDlg', u'Next', u'NewDialog', u'LicenseAgreementDlg', u'1', None),
]
Dialog = [
(u'AdminWelcomeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Next', u'Next', u'Cancel'),
(u'ExitDialog', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Finish', u'Finish', u'Finish'),
(u'FatalError', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Finish', u'Finish', u'Finish'),
(u'PrepareDlg', 50, 50, 370, 270, 1, u'[ProductName] [Setup]', u'Cancel', u'Cancel', u'Cancel'),
(u'ProgressDlg', 50, 50, 370, 270, 1, u'[ProductName] [Setup]', u'Cancel', u'Cancel', u'Cancel'),
(u'UserExit', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Finish', u'Finish', u'Finish'),
(u'AdminBrowseDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'PathEdit', u'OK', u'Cancel'),
(u'AdminInstallPointDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Text', u'Next', u'Cancel'),
(u'AdminRegistrationDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'OrganizationLabel', u'Next', u'Cancel'),
(u'BrowseDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'PathEdit', u'OK', u'Cancel'),
(u'CancelDlg', 50, 10, 260, 85, 3, u'[ProductName] [Setup]', u'No', u'No', u'No'),
(u'CustomizeDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Tree', u'Next', u'Cancel'),
(u'DiskCostDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'OK', u'OK', u'OK'),
(u'ErrorDlg', 50, 10, 270, 105, 65539, u'Installer Information', u'ErrorText', None, None),
(u'FilesInUse', 50, 50, 370, 270, 19, u'[ProductName] [Setup]', u'Retry', u'Retry', u'Retry'),
(u'LicenseAgreementDlg', 50, 50, 370, 270, 3, u'[ProductName] License Agreement', u'Buttons', u'Next', u'Cancel'),
(u'MaintenanceTypeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'ChangeLabel', u'ChangeButton', u'Cancel'),
(u'MaintenanceWelcomeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Next', u'Next', u'Cancel'),
(u'OutOfDiskDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'OK', u'OK', u'OK'),
(u'OutOfRbDiskDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'No', u'No', u'No'),
(u'ResumeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Install', u'Install', u'Cancel'),
(u'SetupTypeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'TypicalLabel', u'TypicalButton', u'Cancel'),
(u'UserRegistrationDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'NameLabel', u'Next', u'Cancel'),
(u'VerifyReadyDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Install', u'Install', u'Cancel'),
(u'VerifyRemoveDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Back', u'Back', u'Cancel'),
(u'VerifyRepairDlg', 50, 50, 370, 270, 35, u'[ProductName] [Setup]', u'Repair', u'Repair', u'Cancel'),
(u'WaitForCostingDlg', 50, 10, 260, 85, 3, u'[ProductName] [Setup]', u'Return', u'Return', u'Return'),
(u'WelcomeDlg', 50, 50, 370, 270, 3, u'[ProductName] [Setup]', u'Next', u'Next', u'Cancel'),
]
EventMapping = [
(u'PrepareDlg', u'ActionData', u'ActionData', u'Text'),
(u'PrepareDlg', u'ActionText', u'ActionText', u'Text'),
(u'ProgressDlg', u'ActionText', u'ActionText', u'Text'),
(u'ProgressDlg', u'ProgressBar', u'SetProgress', u'Progress'),
(u'AdminBrowseDlg', u'DirectoryCombo', u'IgnoreChange', u'IgnoreChange'),
(u'BrowseDlg', u'DirectoryCombo', u'IgnoreChange', u'IgnoreChange'),
(u'CustomizeDlg', u'Next', u'SelectionNoItems', u'Enabled'),
(u'CustomizeDlg', u'Reset', u'SelectionNoItems', u'Enabled'),
(u'CustomizeDlg', u'DiskCost', u'SelectionNoItems', u'Enabled'),
(u'CustomizeDlg', u'ItemDescription', u'SelectionDescription', u'Text'),
(u'CustomizeDlg', u'ItemSize', u'SelectionSize', u'Text'),
(u'CustomizeDlg', u'Location', u'SelectionPath', u'Text'),
(u'CustomizeDlg', u'Location', u'SelectionPathOn', u'Visible'),
(u'CustomizeDlg', u'LocationLabel', u'SelectionPathOn', u'Visible'),
]
InstallExecuteSequence = [
(u'InstallValidate', None, 1400),
(u'InstallInitialize', None, 1500),
(u'InstallFinalize', None, 6600),
(u'InstallFiles', None, 4000),
(u'FileCost', None, 900),
(u'CostInitialize', None, 800),
(u'CostFinalize', None, 1000),
(u'CreateShortcuts', None, 4500),
(u'PublishComponents', None, 6200),
(u'PublishFeatures', None, 6300),
(u'PublishProduct', None, 6400),
(u'RegisterClassInfo', None, 4600),
(u'RegisterExtensionInfo', None, 4700),
(u'RegisterMIMEInfo', None, 4900),
(u'RegisterProgIdInfo', None, 4800),
(u'ValidateProductID', None, 700),
(u'AllocateRegistrySpace', u'NOT Installed', 1550),
(u'AppSearch', None, 400),
(u'BindImage', None, 4300),
(u'CCPSearch', u'NOT Installed', 500),
(u'CreateFolders', None, 3700),
(u'DeleteServices', u'VersionNT', 2000),
(u'DuplicateFiles', None, 4210),
(u'FindRelatedProducts', None, 200),
(u'InstallODBC', None, 5400),
(u'InstallServices', u'VersionNT', 5800),
(u'LaunchConditions', None, 100),
(u'MigrateFeatureStates', None, 1200),
(u'MoveFiles', None, 3800),
(u'PatchFiles', None, 4090),
(u'ProcessComponents', None, 1600),
(u'RegisterComPlus', None, 5700),
(u'RegisterFonts', None, 5300),
(u'RegisterProduct', None, 6100),
(u'RegisterTypeLibraries', None, 5500),
(u'RegisterUser', None, 6000),
(u'RemoveDuplicateFiles', None, 3400),
(u'RemoveEnvironmentStrings', None, 3300),
(u'RemoveExistingProducts', None, 6700),
(u'RemoveFiles', None, 3500),
(u'RemoveFolders', None, 3600),
(u'RemoveIniValues', None, 3100),
(u'RemoveODBC', None, 2400),
(u'RemoveRegistryValues', None, 2600),
(u'RemoveShortcuts', None, 3200),
(u'RMCCPSearch', u'NOT Installed', 600),
(u'SelfRegModules', None, 5600),
(u'SelfUnregModules', None, 2200),
(u'SetODBCFolders', None, 1100),
(u'StartServices', u'VersionNT', 5900),
(u'StopServices', u'VersionNT', 1900),
(u'UnpublishComponents', None, 1700),
(u'UnpublishFeatures', None, 1800),
(u'UnregisterClassInfo', None, 2700),
(u'UnregisterComPlus', None, 2100),
(u'UnregisterExtensionInfo', None, 2800),
(u'UnregisterFonts', None, 2500),
(u'UnregisterMIMEInfo', None, 3000),
(u'UnregisterProgIdInfo', None, 2900),
(u'UnregisterTypeLibraries', None, 2300),
(u'WriteEnvironmentStrings', None, 5200),
(u'WriteIniValues', None, 5100),
(u'WriteRegistryValues', None, 5000),
]
InstallUISequence = [
#(u'FileCost', None, 900),
#(u'CostInitialize', None, 800),
#(u'CostFinalize', None, 1000),
#(u'ExecuteAction', None, 1300),
#(u'ExitDialog', None, -1),
#(u'FatalError', None, -3),
(u'PrepareDlg', None, 140),
(u'ProgressDlg', None, 1280),
#(u'UserExit', None, -2),
(u'MaintenanceWelcomeDlg', u'Installed AND NOT RESUME AND NOT Preselected', 1250),
(u'ResumeDlg', u'Installed AND (RESUME OR Preselected)', 1240),
(u'WelcomeDlg', u'NOT Installed', 1230),
#(u'AppSearch', None, 400),
#(u'CCPSearch', u'NOT Installed', 500),
#(u'FindRelatedProducts', None, 200),
#(u'LaunchConditions', None, 100),
#(u'MigrateFeatureStates', None, 1200),
#(u'RMCCPSearch', u'NOT Installed', 600),
]
ListView = [
]
RadioButton = [
(u'IAgree', 1, u'Yes', 5, 0, 250, 15, u'{\\DlgFont8}I &accept the terms in the License Agreement', None),
(u'IAgree', 2, u'No', 5, 20, 250, 15, u'{\\DlgFont8}I &do not accept the terms in the License Agreement', None),
]
TextStyle = [
(u'DlgFont8', u'Tahoma', 8, None, 0),
(u'DlgFontBold8', u'Tahoma', 8, None, 1),
(u'VerdanaBold13', u'Verdana', 13, None, 1),
]
UIText = [
(u'AbsentPath', None),
(u'bytes', u'bytes'),
(u'GB', u'GB'),
(u'KB', u'KB'),
(u'MB', u'MB'),
(u'MenuAbsent', u'Entire feature will be unavailable'),
(u'MenuAdvertise', u'Feature will be installed when required'),
(u'MenuAllCD', u'Entire feature will be installed to run from CD'),
(u'MenuAllLocal', u'Entire feature will be installed on local hard drive'),
(u'MenuAllNetwork', u'Entire feature will be installed to run from network'),
(u'MenuCD', u'Will be installed to run from CD'),
(u'MenuLocal', u'Will be installed on local hard drive'),
(u'MenuNetwork', u'Will be installed to run from network'),
(u'ScriptInProgress', u'Gathering required information...'),
(u'SelAbsentAbsent', u'This feature will remain uninstalled'),
(u'SelAbsentAdvertise', u'This feature will be set to be installed when required'),
(u'SelAbsentCD', u'This feature will be installed to run from CD'),
(u'SelAbsentLocal', u'This feature will be installed on the local hard drive'),
(u'SelAbsentNetwork', u'This feature will be installed to run from the network'),
(u'SelAdvertiseAbsent', u'This feature will become unavailable'),
(u'SelAdvertiseAdvertise', u'Will be installed when required'),
(u'SelAdvertiseCD', u'This feature will be available to run from CD'),
(u'SelAdvertiseLocal', u'This feature will be installed on your local hard drive'),
(u'SelAdvertiseNetwork', u'This feature will be available to run from the network'),
(u'SelCDAbsent', u"This feature will be uninstalled completely, you won't be able to run it from CD"),
(u'SelCDAdvertise', u'This feature will change from run from CD state to set to be installed when required'),
(u'SelCDCD', u'This feature will remain to be run from CD'),
(u'SelCDLocal', u'This feature will change from run from CD state to be installed on the local hard drive'),
(u'SelChildCostNeg', u'This feature frees up [1] on your hard drive.'),
(u'SelChildCostPos', u'This feature requires [1] on your hard drive.'),
(u'SelCostPending', u'Compiling cost for this feature...'),
(u'SelLocalAbsent', u'This feature will be completely removed'),
(u'SelLocalAdvertise', u'This feature will be removed from your local hard drive, but will be set to be installed when required'),
(u'SelLocalCD', u'This feature will be removed from your local hard drive, but will be still available to run from CD'),
(u'SelLocalLocal', u'This feature will remain on you local hard drive'),
(u'SelLocalNetwork', u'This feature will be removed from your local hard drive, but will be still available to run from the network'),
(u'SelNetworkAbsent', u"This feature will be uninstalled completely, you won't be able to run it from the network"),
(u'SelNetworkAdvertise', u'This feature will change from run from network state to set to be installed when required'),
(u'SelNetworkLocal', u'This feature will change from run from network state to be installed on the local hard drive'),
(u'SelNetworkNetwork', u'This feature will remain to be run from the network'),
(u'SelParentCostNegNeg', u'This feature frees up [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures free up [4] on your hard drive.'),
(u'SelParentCostNegPos', u'This feature frees up [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures require [4] on your hard drive.'),
(u'SelParentCostPosNeg', u'This feature requires [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures free up [4] on your hard drive.'),
(u'SelParentCostPosPos', u'This feature requires [1] on your hard drive. It has [2] of [3] subfeatures selected. The subfeatures require [4] on your hard drive.'),
(u'TimeRemaining', u'Time remaining: {[1] minutes }{[2] seconds}'),
(u'VolumeCostAvailable', u'Available'),
(u'VolumeCostDifference', u'Difference'),
(u'VolumeCostRequired', u'Required'),
(u'VolumeCostSize', u'Disk Size'),
(u'VolumeCostVolume', u'Volume'),
]
_Validation = [
(u'AdminExecuteSequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'),
(u'AdminExecuteSequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'),
(u'AdminExecuteSequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'),
(u'AdminUISequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'),
(u'AdminUISequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'),
(u'AdminUISequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'),
(u'Condition', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Expression evaluated to determine if Level in the Feature table is to change.'),
(u'Condition', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Reference to a Feature entry in Feature table.'),
(u'Condition', u'Level', u'N', 0, 32767, None, None, None, None, u'New selection Level to set in Feature table if Condition evaluates to TRUE.'),
(u'AdvtExecuteSequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'),
(u'AdvtExecuteSequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'),
(u'AdvtExecuteSequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'),
(u'BBControl', u'Type', u'N', None, None, None, None, u'Identifier', None, u'The type of the control.'),
(u'BBControl', u'BBControl', u'N', None, None, None, None, u'Identifier', None, u'Name of the control. This name must be unique within a billboard, but can repeat on different billboard.'),
(u'BBControl', u'Billboard_', u'N', None, None, u'Billboard', 1, u'Identifier', None, u'External key to the Billboard table, name of the billboard.'),
(u'BBControl', u'X', u'N', 0, 32767, None, None, None, None, u'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.'),
(u'BBControl', u'Y', u'N', 0, 32767, None, None, None, None, u'Vertical coordinate of the upper left corner of the bounding rectangle of the control.'),
(u'BBControl', u'Width', u'N', 0, 32767, None, None, None, None, u'Width of the bounding rectangle of the control.'),
(u'BBControl', u'Height', u'N', 0, 32767, None, None, None, None, u'Height of the bounding rectangle of the control.'),
(u'BBControl', u'Attributes', u'Y', 0, 2147483647, None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this control.'),
(u'BBControl', u'Text', u'Y', None, None, None, None, u'Text', None, u'A string used to set the initial text contained within a control (if appropriate).'),
(u'Billboard', u'Action', u'Y', None, None, None, None, u'Identifier', None, u'The name of an action. The billboard is displayed during the progress messages received from this action.'),
(u'Billboard', u'Billboard', u'N', None, None, None, None, u'Identifier', None, u'Name of the billboard.'),
(u'Billboard', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'An external key to the Feature Table. The billboard is shown only if this feature is being installed.'),
(u'Billboard', u'Ordering', u'Y', 0, 32767, None, None, None, None, u'A positive integer. If there is more than one billboard corresponding to an action they will be shown in the order defined by this column.'),
(u'Binary', u'Name', u'N', None, None, None, None, u'Identifier', None, u'Unique key identifying the binary data.'),
(u'Binary', u'Data', u'N', None, None, None, None, u'Binary', None, u'The unformatted binary data.'),
(u'CheckBox', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to the item.'),
(u'CheckBox', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The value string associated with the item.'),
(u'Property', u'Property', u'N', None, None, None, None, u'Identifier', None, u'Name of property, uppercase if settable by launcher or loader.'),
(u'Property', u'Value', u'N', None, None, None, None, u'Text', None, u'String value for property. Never null or empty.'),
(u'ComboBox', u'Text', u'Y', None, None, None, None, u'Formatted', None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.'),
(u'ComboBox', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this item. All the items tied to the same property become part of the same combobox.'),
(u'ComboBox', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value string associated with this item. Selecting the line will set the associated property to this value.'),
(u'ComboBox', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list.\tThe integers do not have to be consecutive.'),
(u'Control', u'Type', u'N', None, None, None, None, u'Identifier', None, u'The type of the control.'),
(u'Control', u'X', u'N', 0, 32767, None, None, None, None, u'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.'),
(u'Control', u'Y', u'N', 0, 32767, None, None, None, None, u'Vertical coordinate of the upper left corner of the bounding rectangle of the control.'),
(u'Control', u'Width', u'N', 0, 32767, None, None, None, None, u'Width of the bounding rectangle of the control.'),
(u'Control', u'Height', u'N', 0, 32767, None, None, None, None, u'Height of the bounding rectangle of the control.'),
(u'Control', u'Attributes', u'Y', 0, 2147483647, None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this control.'),
(u'Control', u'Text', u'Y', None, None, None, None, u'Formatted', None, u'A string used to set the initial text contained within a control (if appropriate).'),
(u'Control', u'Property', u'Y', None, None, None, None, u'Identifier', None, u'The name of a defined property to be linked to this control. '),
(u'Control', u'Control', u'N', None, None, None, None, u'Identifier', None, u'Name of the control. This name must be unique within a dialog, but can repeat on different dialogs. '),
(u'Control', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'External key to the Dialog table, name of the dialog.'),
(u'Control', u'Control_Next', u'Y', None, None, u'Control', 2, u'Identifier', None, u'The name of an other control on the same dialog. This link defines the tab order of the controls. The links have to form one or more cycles!'),
(u'Control', u'Help', u'Y', None, None, None, None, u'Text', None, u'The help strings used with the button. The text is optional. '),
(u'Icon', u'Name', u'N', None, None, None, None, u'Identifier', None, u'Primary key. Name of the icon file.'),
(u'Icon', u'Data', u'N', None, None, None, None, u'Binary', None, u'Binary stream. The binary icon data in PE (.DLL or .EXE) or icon (.ICO) format.'),
(u'ListBox', u'Text', u'Y', None, None, None, None, u'Text', None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.'),
(u'ListBox', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this item. All the items tied to the same property become part of the same listbox.'),
(u'ListBox', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value string associated with this item. Selecting the line will set the associated property to this value.'),
(u'ListBox', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.'),
(u'ActionText', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to be described.'),
(u'ActionText', u'Description', u'Y', None, None, None, None, u'Text', None, u'Localized description displayed in progress dialog and log when action is executing.'),
(u'ActionText', u'Template', u'Y', None, None, None, None, u'Template', None, u'Optional localized format template used to format action data records for display during action execution.'),
(u'ControlCondition', u'Action', u'N', None, None, None, None, None, u'Default;Disable;Enable;Hide;Show', u'The desired action to be taken on the specified control.'),
(u'ControlCondition', u'Condition', u'N', None, None, None, None, u'Condition', None, u'A standard conditional statement that specifies under which conditions the action should be triggered.'),
(u'ControlCondition', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'A foreign key to the Dialog table, name of the dialog.'),
(u'ControlCondition', u'Control_', u'N', None, None, u'Control', 2, u'Identifier', None, u'A foreign key to the Control table, name of the control.'),
(u'ControlEvent', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'A standard conditional statement that specifies under which conditions an event should be triggered.'),
(u'ControlEvent', u'Ordering', u'Y', 0, 2147483647, None, None, None, None, u'An integer used to order several events tied to the same control. Can be left blank.'),
(u'ControlEvent', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'A foreign key to the Dialog table, name of the dialog.'),
(u'ControlEvent', u'Control_', u'N', None, None, u'Control', 2, u'Identifier', None, u'A foreign key to the Control table, name of the control'),
(u'ControlEvent', u'Event', u'N', None, None, None, None, u'Formatted', None, u'An identifier that specifies the type of the event that should take place when the user interacts with control specified by the first two entries.'),
(u'ControlEvent', u'Argument', u'N', None, None, None, None, u'Formatted', None, u'A value to be used as a modifier when triggering a particular event.'),
(u'Dialog', u'Width', u'N', 0, 32767, None, None, None, None, u'Width of the bounding rectangle of the dialog.'),
(u'Dialog', u'Height', u'N', 0, 32767, None, None, None, None, u'Height of the bounding rectangle of the dialog.'),
(u'Dialog', u'Attributes', u'Y', 0, 2147483647, None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this dialog.'),
(u'Dialog', u'Title', u'Y', None, None, None, None, u'Formatted', None, u"A text string specifying the title to be displayed in the title bar of the dialog's window."),
(u'Dialog', u'Dialog', u'N', None, None, None, None, u'Identifier', None, u'Name of the dialog.'),
(u'Dialog', u'HCentering', u'N', 0, 100, None, None, None, None, u'Horizontal position of the dialog on a 0-100 scale. 0 means left end, 100 means right end of the screen, 50 center.'),
(u'Dialog', u'VCentering', u'N', 0, 100, None, None, None, None, u'Vertical position of the dialog on a 0-100 scale. 0 means top end, 100 means bottom end of the screen, 50 center.'),
(u'Dialog', u'Control_First', u'N', None, None, u'Control', 2, u'Identifier', None, u'Defines the control that has the focus when the dialog is created.'),
(u'Dialog', u'Control_Default', u'Y', None, None, u'Control', 2, u'Identifier', None, u'Defines the default control. Hitting return is equivalent to pushing this button.'),
(u'Dialog', u'Control_Cancel', u'Y', None, None, u'Control', 2, u'Identifier', None, u'Defines the cancel control. Hitting escape or clicking on the close icon on the dialog is equivalent to pushing this button.'),
(u'EventMapping', u'Dialog_', u'N', None, None, u'Dialog', 1, u'Identifier', None, u'A foreign key to the Dialog table, name of the Dialog.'),
(u'EventMapping', u'Control_', u'N', None, None, u'Control', 2, u'Identifier', None, u'A foreign key to the Control table, name of the control.'),
(u'EventMapping', u'Event', u'N', None, None, None, None, u'Identifier', None, u'An identifier that specifies the type of the event that the control subscribes to.'),
(u'EventMapping', u'Attribute', u'N', None, None, None, None, u'Identifier', None, u'The name of the control attribute, that is set when this event is received.'),
(u'InstallExecuteSequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'),
(u'InstallExecuteSequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'),
(u'InstallExecuteSequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'),
(u'AppSearch', u'Property', u'N', None, None, None, None, u'Identifier', None, u'The property associated with a Signature'),
(u'AppSearch', u'Signature_', u'N', None, None, u'Signature;RegLocator;IniLocator;DrLocator;CompLocator', 1, u'Identifier', None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.'),
(u'BindImage', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'The index into the File table. This must be an executable file.'),
(u'BindImage', u'Path', u'Y', None, None, None, None, u'Paths', None, u'A list of ; delimited paths that represent the paths to be searched for the import DLLS. The list is usually a list of properties each enclosed within square brackets [] .'),
(u'CCPSearch', u'Signature_', u'N', None, None, u'Signature;RegLocator;IniLocator;DrLocator;CompLocator', 1, u'Identifier', None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.'),
(u'InstallUISequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'),
(u'InstallUISequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'),
(u'InstallUISequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'),
(u'ListView', u'Text', u'Y', None, None, None, None, u'Text', None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.'),
(u'ListView', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this item. All the items tied to the same property become part of the same listview.'),
(u'ListView', u'Value', u'N', None, None, None, None, u'Identifier', None, u'The value string associated with this item. Selecting the line will set the associated property to this value.'),
(u'ListView', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.'),
(u'ListView', u'Binary_', u'Y', None, None, u'Binary', 1, u'Identifier', None, u'The name of the icon to be displayed with the icon. The binary information is looked up from the Binary Table.'),
(u'RadioButton', u'X', u'N', 0, 32767, None, None, None, None, u'The horizontal coordinate of the upper left corner of the bounding rectangle of the radio button.'),
(u'RadioButton', u'Y', u'N', 0, 32767, None, None, None, None, u'The vertical coordinate of the upper left corner of the bounding rectangle of the radio button.'),
(u'RadioButton', u'Width', u'N', 0, 32767, None, None, None, None, u'The width of the button.'),
(u'RadioButton', u'Height', u'N', 0, 32767, None, None, None, None, u'The height of the button.'),
(u'RadioButton', u'Text', u'Y', None, None, None, None, u'Text', None, u'The visible title to be assigned to the radio button.'),
(u'RadioButton', u'Property', u'N', None, None, None, None, u'Identifier', None, u'A named property to be tied to this radio button. All the buttons tied to the same property become part of the same group.'),
(u'RadioButton', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value string associated with this button. Selecting the button will set the associated property to this value.'),
(u'RadioButton', u'Order', u'N', 1, 32767, None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.'),
(u'RadioButton', u'Help', u'Y', None, None, None, None, u'Text', None, u'The help strings used with the button. The text is optional.'),
(u'TextStyle', u'TextStyle', u'N', None, None, None, None, u'Identifier', None, u'Name of the style. The primary key of this table. This name is embedded in the texts to indicate a style change.'),
(u'TextStyle', u'FaceName', u'N', None, None, None, None, u'Text', None, u'A string indicating the name of the font used. Required. The string must be at most 31 characters long.'),
(u'TextStyle', u'Size', u'N', 0, 32767, None, None, None, None, u'The size of the font used. This size is given in our units (1/12 of the system font height). Assuming that the system font is set to 12 point size, this is equivalent to the point size.'),
(u'TextStyle', u'Color', u'Y', 0, 16777215, None, None, None, None, u'A long integer indicating the color of the string in the RGB format (Red, Green, Blue each 0-255, RGB = R + 256*G + 256^2*B).'),
(u'TextStyle', u'StyleBits', u'Y', 0, 15, None, None, None, None, u'A combination of style bits.'),
(u'UIText', u'Text', u'Y', None, None, None, None, u'Text', None, u'The localized version of the string.'),
(u'UIText', u'Key', u'N', None, None, None, None, u'Identifier', None, u'A unique key that identifies the particular string.'),
(u'_Validation', u'Table', u'N', None, None, None, None, u'Identifier', None, u'Name of table'),
(u'_Validation', u'Description', u'Y', None, None, None, None, u'Text', None, u'Description of column'),
(u'_Validation', u'Column', u'N', None, None, None, None, u'Identifier', None, u'Name of column'),
(u'_Validation', u'Nullable', u'N', None, None, None, None, None, u'Y;N;@', u'Whether the column is nullable'),
(u'_Validation', u'MinValue', u'Y', -2147483647, 2147483647, None, None, None, None, u'Minimum value allowed'),
(u'_Validation', u'MaxValue', u'Y', -2147483647, 2147483647, None, None, None, None, u'Maximum value allowed'),
(u'_Validation', u'KeyTable', u'Y', None, None, None, None, u'Identifier', None, u'For foreign key, Name of table to which data must link'),
(u'_Validation', u'KeyColumn', u'Y', 1, 32, None, None, None, None, u'Column to which foreign key connects'),
(u'_Validation', u'Category', u'Y', None, None, None, None, None, u'Text;Formatted;Template;Condition;Guid;Path;Version;Language;Identifier;Binary;UpperCase;LowerCase;Filename;Paths;AnyPath;WildCardFilename;RegPath;KeyFormatted;CustomSource;Property;Cabinet;Shortcut;URL', u'String category'),
(u'_Validation', u'Set', u'Y', None, None, None, None, u'Text', None, u'Set of values that are permitted'),
(u'AdvtUISequence', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Name of action to invoke, either in the engine or the handler DLL.'),
(u'AdvtUISequence', u'Sequence', u'Y', -4, 32767, None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.'),
(u'AdvtUISequence', u'Condition', u'Y', None, None, None, None, u'Condition', None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.'),
(u'AppId', u'AppId', u'N', None, None, None, None, u'Guid', None, None),
(u'AppId', u'ActivateAtStorage', u'Y', 0, 1, None, None, None, None, None),
(u'AppId', u'DllSurrogate', u'Y', None, None, None, None, u'Text', None, None),
(u'AppId', u'LocalService', u'Y', None, None, None, None, u'Text', None, None),
(u'AppId', u'RemoteServerName', u'Y', None, None, None, None, u'Formatted', None, None),
(u'AppId', u'RunAsInteractiveUser', u'Y', 0, 1, None, None, None, None, None),
(u'AppId', u'ServiceParameters', u'Y', None, None, None, None, u'Text', None, None),
(u'Feature', u'Attributes', u'N', None, None, None, None, None, u'0;1;2;4;5;6;8;9;10;16;17;18;20;21;22;24;25;26;32;33;34;36;37;38;48;49;50;52;53;54', u'Feature attributes'),
(u'Feature', u'Description', u'Y', None, None, None, None, u'Text', None, u'Longer descriptive text describing a visible feature item.'),
(u'Feature', u'Title', u'Y', None, None, None, None, u'Text', None, u'Short text identifying a visible feature item.'),
(u'Feature', u'Feature', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular feature record.'),
(u'Feature', u'Directory_', u'Y', None, None, u'Directory', 1, u'UpperCase', None, u'The name of the Directory that can be configured by the UI. A non-null value will enable the browse button.'),
(u'Feature', u'Level', u'N', 0, 32767, None, None, None, None, u'The install level at which record will be initially selected. An install level of 0 will disable an item and prevent its display.'),
(u'Feature', u'Display', u'Y', 0, 32767, None, None, None, None, u'Numeric sort order, used to force a specific display ordering.'),
(u'Feature', u'Feature_Parent', u'Y', None, None, u'Feature', 1, u'Identifier', None, u'Optional key of a parent record in the same table. If the parent is not selected, then the record will not be installed. Null indicates a root item.'),
(u'File', u'Sequence', u'N', 1, 32767, None, None, None, None, u'Sequence with respect to the media images; order must track cabinet order.'),
(u'File', u'Attributes', u'Y', 0, 32767, None, None, None, None, u'Integer containing bit flags representing file attributes (with the decimal value of each bit position in parentheses)'),
(u'File', u'File', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token, must match identifier in cabinet. For uncompressed files, this field is ignored.'),
(u'File', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the file.'),
(u'File', u'FileName', u'N', None, None, None, None, u'Filename', None, u'File name used for installation, may be localized. This may contain a "short name|long name" pair.'),
(u'File', u'FileSize', u'N', 0, 2147483647, None, None, None, None, u'Size of file in bytes (long integer).'),
(u'File', u'Language', u'Y', None, None, None, None, u'Language', None, u'List of decimal language Ids, comma-separated if more than one.'),
(u'File', u'Version', u'Y', None, None, u'File', 1, u'Version', None, u'Version string for versioned files; Blank for unversioned files.'),
(u'Class', u'Attributes', u'Y', None, 32767, None, None, None, None, u'Class registration attributes.'),
(u'Class', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.'),
(u'Class', u'Description', u'Y', None, None, None, None, u'Text', None, u'Localized description for the Class.'),
(u'Class', u'Argument', u'Y', None, None, None, None, u'Formatted', None, u'optional argument for LocalServers.'),
(u'Class', u'AppId_', u'Y', None, None, u'AppId', 1, u'Guid', None, u'Optional AppID containing DCOM information for associated application (string GUID).'),
(u'Class', u'CLSID', u'N', None, None, None, None, u'Guid', None, u'The CLSID of an OLE factory.'),
(u'Class', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.'),
(u'Class', u'Context', u'N', None, None, None, None, u'Identifier', None, u'The numeric server context for this server. CLSCTX_xxxx'),
(u'Class', u'DefInprocHandler', u'Y', None, None, None, None, u'Filename', u'1;2;3', u'Optional default inproc handler. Only optionally provided if Context=CLSCTX_LOCAL_SERVER. Typically "ole32.dll" or "mapi32.dll"'),
(u'Class', u'FileTypeMask', u'Y', None, None, None, None, u'Text', None, u'Optional string containing information for the HKCRthis CLSID) key. If multiple patterns exist, they must be delimited by a semicolon, and numeric subkeys will be generated: 0,1,2...'),
(u'Class', u'Icon_', u'Y', None, None, u'Icon', 1, u'Identifier', None, u'Optional foreign key into the Icon Table, specifying the icon file associated with this CLSID. Will be written under the DefaultIcon key.'),
(u'Class', u'IconIndex', u'Y', -32767, 32767, None, None, None, None, u'Optional icon index.'),
(u'Class', u'ProgId_Default', u'Y', None, None, u'ProgId', 1, u'Text', None, u'Optional ProgId associated with this CLSID.'),
(u'Component', u'Condition', u'Y', None, None, None, None, u'Condition', None, u"A conditional statement that will disable this component if the specified condition evaluates to the 'True' state. If a component is disabled, it will not be installed, regardless of the 'Action' state associated with the component."),
(u'Component', u'Attributes', u'N', None, None, None, None, None, None, u'Remote execution option, one of irsEnum'),
(u'Component', u'Component', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular component record.'),
(u'Component', u'ComponentId', u'Y', None, None, None, None, u'Guid', None, u'A string GUID unique to this component, version, and language.'),
(u'Component', u'Directory_', u'N', None, None, u'Directory', 1, u'Identifier', None, u'Required key of a Directory table record. This is actually a property name whose value contains the actual path, set either by the AppSearch action or with the default setting obtained from the Directory table.'),
(u'Component', u'KeyPath', u'Y', None, None, u'File;Registry;ODBCDataSource', 1, u'Identifier', None, u'Either the primary key into the File table, Registry table, or ODBCDataSource table. This extract path is stored when the component is installed, and is used to detect the presence of the component and to return the path to it.'),
(u'ProgId', u'Description', u'Y', None, None, None, None, u'Text', None, u'Localized description for the Program identifier.'),
(u'ProgId', u'Icon_', u'Y', None, None, u'Icon', 1, u'Identifier', None, u'Optional foreign key into the Icon Table, specifying the icon file associated with this ProgId. Will be written under the DefaultIcon key.'),
(u'ProgId', u'IconIndex', u'Y', -32767, 32767, None, None, None, None, u'Optional icon index.'),
(u'ProgId', u'ProgId', u'N', None, None, None, None, u'Text', None, u'The Program Identifier. Primary key.'),
(u'ProgId', u'Class_', u'Y', None, None, u'Class', 1, u'Guid', None, u'The CLSID of an OLE factory corresponding to the ProgId.'),
(u'ProgId', u'ProgId_Parent', u'Y', None, None, u'ProgId', 1, u'Text', None, u'The Parent Program Identifier. If specified, the ProgId column becomes a version independent prog id.'),
(u'CompLocator', u'Type', u'Y', 0, 1, None, None, None, None, u'A boolean value that determines if the registry value is a filename or a directory location.'),
(u'CompLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.'),
(u'CompLocator', u'ComponentId', u'N', None, None, None, None, u'Guid', None, u'A string GUID unique to this component, version, and language.'),
(u'Complus', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the ComPlus component.'),
(u'Complus', u'ExpType', u'Y', 0, 32767, None, None, None, None, u'ComPlus component attributes.'),
(u'Directory', u'Directory', u'N', None, None, None, None, u'Identifier', None, u'Unique identifier for directory entry, primary key. If a property by this name is defined, it contains the full path to the directory.'),
(u'Directory', u'DefaultDir', u'N', None, None, None, None, u'DefaultDir', None, u"The default sub-path under parent's path."),
(u'Directory', u'Directory_Parent', u'Y', None, None, u'Directory', 1, u'Identifier', None, u'Reference to the entry in this table specifying the default parent directory. A record parented to itself or with a Null parent represents a root of the install tree.'),
(u'CreateFolder', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table.'),
(u'CreateFolder', u'Directory_', u'N', None, None, u'Directory', 1, u'Identifier', None, u'Primary key, could be foreign key into the Directory table.'),
(u'CustomAction', u'Type', u'N', 1, 16383, None, None, None, None, u'The numeric custom action type, consisting of source location, code type, entry, option flags.'),
(u'CustomAction', u'Action', u'N', None, None, None, None, u'Identifier', None, u'Primary key, name of action, normally appears in sequence table unless private use.'),
(u'CustomAction', u'Source', u'Y', None, None, None, None, u'CustomSource', None, u'The table reference of the source of the code.'),
(u'CustomAction', u'Target', u'Y', None, None, None, None, u'Formatted', None, u'Excecution parameter, depends on the type of custom action'),
(u'DrLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature table.'),
(u'DrLocator', u'Path', u'Y', None, None, None, None, u'AnyPath', None, u'The path on the user system. This is a either a subpath below the value of the Parent or a full path. The path may contain properties enclosed within [ ] that will be expanded.'),
(u'DrLocator', u'Depth', u'Y', 0, 32767, None, None, None, None, u'The depth below the path to which the Signature_ is recursively searched. If absent, the depth is assumed to be 0.'),
(u'DrLocator', u'Parent', u'Y', None, None, None, None, u'Identifier', None, u'The parent file signature. It is also a foreign key in the Signature table. If null and the Path column does not expand to a full path, then all the fixed drives of the user system are searched using the Path.'),
(u'DuplicateFile', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Foreign key referencing the source file to be duplicated.'),
(u'DuplicateFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the duplicate file.'),
(u'DuplicateFile', u'DestFolder', u'Y', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full pathname to a destination folder.'),
(u'DuplicateFile', u'DestName', u'Y', None, None, None, None, u'Filename', None, u'Filename to be given to the duplicate file.'),
(u'DuplicateFile', u'FileKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular file entry'),
(u'Environment', u'Name', u'N', None, None, None, None, u'Text', None, u'The name of the environmental value.'),
(u'Environment', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The value to set in the environmental settings.'),
(u'Environment', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the installing of the environmental value.'),
(u'Environment', u'Environment', u'N', None, None, None, None, u'Identifier', None, u'Unique identifier for the environmental variable setting'),
(u'Error', u'Error', u'N', 0, 32767, None, None, None, None, u'Integer error number, obtained from header file IError(...) macros.'),
(u'Error', u'Message', u'Y', None, None, None, None, u'Template', None, u'Error formatting template, obtained from user ed. or localizers.'),
(u'Extension', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.'),
(u'Extension', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.'),
(u'Extension', u'Extension', u'N', None, None, None, None, u'Text', None, u'The extension associated with the table row.'),
(u'Extension', u'MIME_', u'Y', None, None, u'MIME', 1, u'Text', None, u'Optional Context identifier, typically "type/format" associated with the extension'),
(u'Extension', u'ProgId_', u'Y', None, None, u'ProgId', 1, u'Text', None, u'Optional ProgId associated with this extension.'),
(u'MIME', u'CLSID', u'Y', None, None, None, None, u'Guid', None, u'Optional associated CLSID.'),
(u'MIME', u'ContentType', u'N', None, None, None, None, u'Text', None, u'Primary key. Context identifier, typically "type/format".'),
(u'MIME', u'Extension_', u'N', None, None, u'Extension', 1, u'Text', None, u'Optional associated extension (without dot)'),
(u'FeatureComponents', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Foreign key into Feature table.'),
(u'FeatureComponents', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into Component table.'),
(u'FileSFPCatalog', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'File associated with the catalog'),
(u'FileSFPCatalog', u'SFPCatalog_', u'N', None, None, u'SFPCatalog', 1, u'Filename', None, u'Catalog associated with the file'),
(u'SFPCatalog', u'SFPCatalog', u'N', None, None, None, None, u'Filename', None, u'File name for the catalog.'),
(u'SFPCatalog', u'Catalog', u'N', None, None, None, None, u'Binary', None, u'SFP Catalog'),
(u'SFPCatalog', u'Dependency', u'Y', None, None, None, None, u'Formatted', None, u'Parent catalog - only used by SFP'),
(u'Font', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Primary key, foreign key into File table referencing font file.'),
(u'Font', u'FontTitle', u'Y', None, None, None, None, u'Text', None, u'Font name.'),
(u'IniFile', u'Action', u'N', None, None, None, None, None, u'0;1;3', u'The type of modification to be made, one of iifEnum'),
(u'IniFile', u'Value', u'N', None, None, None, None, u'Formatted', None, u'The value to be written.'),
(u'IniFile', u'Key', u'N', None, None, None, None, u'Formatted', None, u'The .INI file key below Section.'),
(u'IniFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the installing of the .INI value.'),
(u'IniFile', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The .INI file name in which to write the information'),
(u'IniFile', u'IniFile', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'IniFile', u'DirProperty', u'Y', None, None, None, None, u'Identifier', None, u'Foreign key into the Directory table denoting the directory where the .INI file is.'),
(u'IniFile', u'Section', u'N', None, None, None, None, u'Formatted', None, u'The .INI file Section.'),
(u'IniLocator', u'Type', u'Y', 0, 2, None, None, None, None, u'An integer value that determines if the .INI value read is a filename or a directory location or to be used as is w/o interpretation.'),
(u'IniLocator', u'Key', u'N', None, None, None, None, u'Text', None, u'Key value (followed by an equals sign in INI file).'),
(u'IniLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.'),
(u'IniLocator', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The .INI file name.'),
(u'IniLocator', u'Section', u'N', None, None, None, None, u'Text', None, u'Section name within in file (within square brackets in INI file).'),
(u'IniLocator', u'Field', u'Y', 0, 32767, None, None, None, None, u'The field in the .INI line. If Field is null or 0 the entire line is read.'),
(u'IsolatedComponent', u'Component_Application', u'N', None, None, u'Component', 1, u'Identifier', None, u'Key to Component table item for application'),
(u'IsolatedComponent', u'Component_Shared', u'N', None, None, u'Component', 1, u'Identifier', None, u'Key to Component table item to be isolated'),
(u'LaunchCondition', u'Condition', u'N', None, None, None, None, u'Condition', None, u'Expression which must evaluate to TRUE in order for install to commence.'),
(u'LaunchCondition', u'Description', u'N', None, None, None, None, u'Formatted', None, u'Localizable text to display when condition fails and install must abort.'),
(u'LockPermissions', u'Table', u'N', None, None, None, None, u'Identifier', u'Directory;File;Registry', u'Reference to another table name'),
(u'LockPermissions', u'Domain', u'Y', None, None, None, None, u'Formatted', None, u'Domain name for user whose permissions are being set. (usually a property)'),
(u'LockPermissions', u'LockObject', u'N', None, None, None, None, u'Identifier', None, u'Foreign key into Registry or File table'),
(u'LockPermissions', u'Permission', u'Y', -2147483647, 2147483647, None, None, None, None, u'Permission Access mask. Full Control = 268435456 (GENERIC_ALL = 0x10000000)'),
(u'LockPermissions', u'User', u'N', None, None, None, None, u'Formatted', None, u'User for permissions to be set. (usually a property)'),
(u'Media', u'Source', u'Y', None, None, None, None, u'Property', None, u'The property defining the location of the cabinet file.'),
(u'Media', u'Cabinet', u'Y', None, None, None, None, u'Cabinet', None, u'If some or all of the files stored on the media are compressed in a cabinet, the name of that cabinet.'),
(u'Media', u'DiskId', u'N', 1, 32767, None, None, None, None, u'Primary key, integer to determine sort order for table.'),
(u'Media', u'DiskPrompt', u'Y', None, None, None, None, u'Text', None, u'Disk name: the visible text actually printed on the disk. This will be used to prompt the user when this disk needs to be inserted.'),
(u'Media', u'LastSequence', u'N', 0, 32767, None, None, None, None, u'File sequence number for the last file for this media.'),
(u'Media', u'VolumeLabel', u'Y', None, None, None, None, u'Text', None, u'The label attributed to the volume.'),
(u'ModuleComponents', u'Component', u'N', None, None, u'Component', 1, u'Identifier', None, u'Component contained in the module.'),
(u'ModuleComponents', u'Language', u'N', None, None, u'ModuleSignature', 2, None, None, u'Default language ID for module (may be changed by transform).'),
(u'ModuleComponents', u'ModuleID', u'N', None, None, u'ModuleSignature', 1, u'Identifier', None, u'Module containing the component.'),
(u'ModuleSignature', u'Language', u'N', None, None, None, None, None, None, u'Default decimal language of module.'),
(u'ModuleSignature', u'Version', u'N', None, None, None, None, u'Version', None, u'Version of the module.'),
(u'ModuleSignature', u'ModuleID', u'N', None, None, None, None, u'Identifier', None, u'Module identifier (String.GUID).'),
(u'ModuleDependency', u'ModuleID', u'N', None, None, u'ModuleSignature', 1, u'Identifier', None, u'Module requiring the dependency.'),
(u'ModuleDependency', u'ModuleLanguage', u'N', None, None, u'ModuleSignature', 2, None, None, u'Language of module requiring the dependency.'),
(u'ModuleDependency', u'RequiredID', u'N', None, None, None, None, None, None, u'String.GUID of required module.'),
(u'ModuleDependency', u'RequiredLanguage', u'N', None, None, None, None, None, None, u'LanguageID of the required module.'),
(u'ModuleDependency', u'RequiredVersion', u'Y', None, None, None, None, u'Version', None, u'Version of the required version.'),
(u'ModuleExclusion', u'ModuleID', u'N', None, None, u'ModuleSignature', 1, u'Identifier', None, u'String.GUID of module with exclusion requirement.'),
(u'ModuleExclusion', u'ModuleLanguage', u'N', None, None, u'ModuleSignature', 2, None, None, u'LanguageID of module with exclusion requirement.'),
(u'ModuleExclusion', u'ExcludedID', u'N', None, None, None, None, None, None, u'String.GUID of excluded module.'),
(u'ModuleExclusion', u'ExcludedLanguage', u'N', None, None, None, None, None, None, u'Language of excluded module.'),
(u'ModuleExclusion', u'ExcludedMaxVersion', u'Y', None, None, None, None, u'Version', None, u'Maximum version of excluded module.'),
(u'ModuleExclusion', u'ExcludedMinVersion', u'Y', None, None, None, None, u'Version', None, u'Minimum version of excluded module.'),
(u'MoveFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'If this component is not "selected" for installation or removal, no action will be taken on the associated MoveFile entry'),
(u'MoveFile', u'DestFolder', u'N', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full path to the destination directory'),
(u'MoveFile', u'DestName', u'Y', None, None, None, None, u'Filename', None, u'Name to be given to the original file after it is moved or copied. If blank, the destination file will be given the same name as the source file'),
(u'MoveFile', u'FileKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key that uniquely identifies a particular MoveFile record'),
(u'MoveFile', u'Options', u'N', 0, 1, None, None, None, None, u'Integer value specifying the MoveFile operating mode, one of imfoEnum'),
(u'MoveFile', u'SourceFolder', u'Y', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full path to the source directory'),
(u'MoveFile', u'SourceName', u'Y', None, None, None, None, u'Text', None, u"Name of the source file(s) to be moved or copied. Can contain the '*' or '?' wildcards."),
(u'MsiAssembly', u'Attributes', u'Y', None, None, None, None, None, None, u'Assembly attributes'),
(u'MsiAssembly', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Foreign key into Feature table.'),
(u'MsiAssembly', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into Component table.'),
(u'MsiAssembly', u'File_Application', u'Y', None, None, u'File', 1, u'Identifier', None, u'Foreign key into File table, denoting the application context for private assemblies. Null for global assemblies.'),
(u'MsiAssembly', u'File_Manifest', u'Y', None, None, u'File', 1, u'Identifier', None, u'Foreign key into the File table denoting the manifest file for the assembly.'),
(u'MsiAssemblyName', u'Name', u'N', None, None, None, None, u'Text', None, u'The name part of the name-value pairs for the assembly name.'),
(u'MsiAssemblyName', u'Value', u'N', None, None, None, None, u'Text', None, u'The value part of the name-value pairs for the assembly name.'),
(u'MsiAssemblyName', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into Component table.'),
(u'MsiDigitalCertificate', u'CertData', u'N', None, None, None, None, u'Binary', None, u'A certificate context blob for a signer certificate'),
(u'MsiDigitalCertificate', u'DigitalCertificate', u'N', None, None, None, None, u'Identifier', None, u'A unique identifier for the row'),
(u'MsiDigitalSignature', u'Table', u'N', None, None, None, None, None, u'Media', u'Reference to another table name (only Media table is supported)'),
(u'MsiDigitalSignature', u'DigitalCertificate_', u'N', None, None, u'MsiDigitalCertificate', 1, u'Identifier', None, u'Foreign key to MsiDigitalCertificate table identifying the signer certificate'),
(u'MsiDigitalSignature', u'Hash', u'Y', None, None, None, None, u'Binary', None, u'The encoded hash blob from the digital signature'),
(u'MsiDigitalSignature', u'SignObject', u'N', None, None, None, None, u'Text', None, u'Foreign key to Media table'),
(u'MsiFileHash', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Primary key, foreign key into File table referencing file with this hash'),
(u'MsiFileHash', u'Options', u'N', 0, 32767, None, None, None, None, u'Various options and attributes for this hash.'),
(u'MsiFileHash', u'HashPart1', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'),
(u'MsiFileHash', u'HashPart2', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'),
(u'MsiFileHash', u'HashPart3', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'),
(u'MsiFileHash', u'HashPart4', u'N', None, None, None, None, None, None, u'Size of file in bytes (long integer).'),
(u'MsiPatchHeaders', u'StreamRef', u'N', None, None, None, None, u'Identifier', None, u'Primary key. A unique identifier for the row.'),
(u'MsiPatchHeaders', u'Header', u'N', None, None, None, None, u'Binary', None, u'Binary stream. The patch header, used for patch validation.'),
(u'ODBCAttribute', u'Value', u'Y', None, None, None, None, u'Text', None, u'Value for ODBC driver attribute'),
(u'ODBCAttribute', u'Attribute', u'N', None, None, None, None, u'Text', None, u'Name of ODBC driver attribute'),
(u'ODBCAttribute', u'Driver_', u'N', None, None, u'ODBCDriver', 1, u'Identifier', None, u'Reference to ODBC driver in ODBCDriver table'),
(u'ODBCDriver', u'Description', u'N', None, None, None, None, u'Text', None, u'Text used as registered name for driver, non-localized'),
(u'ODBCDriver', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Reference to key driver file'),
(u'ODBCDriver', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reference to associated component'),
(u'ODBCDriver', u'Driver', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized.internal token for driver'),
(u'ODBCDriver', u'File_Setup', u'Y', None, None, u'File', 1, u'Identifier', None, u'Optional reference to key driver setup DLL'),
(u'ODBCDataSource', u'Description', u'N', None, None, None, None, u'Text', None, u'Text used as registered name for data source'),
(u'ODBCDataSource', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reference to associated component'),
(u'ODBCDataSource', u'DataSource', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized.internal token for data source'),
(u'ODBCDataSource', u'DriverDescription', u'N', None, None, None, None, u'Text', None, u'Reference to driver description, may be existing driver'),
(u'ODBCDataSource', u'Registration', u'N', 0, 1, None, None, None, None, u'Registration option: 0=machine, 1=user, others t.b.d.'),
(u'ODBCSourceAttribute', u'Value', u'Y', None, None, None, None, u'Text', None, u'Value for ODBC data source attribute'),
(u'ODBCSourceAttribute', u'Attribute', u'N', None, None, None, None, u'Text', None, u'Name of ODBC data source attribute'),
(u'ODBCSourceAttribute', u'DataSource_', u'N', None, None, u'ODBCDataSource', 1, u'Identifier', None, u'Reference to ODBC data source in ODBCDataSource table'),
(u'ODBCTranslator', u'Description', u'N', None, None, None, None, u'Text', None, u'Text used as registered name for translator'),
(u'ODBCTranslator', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Reference to key translator file'),
(u'ODBCTranslator', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reference to associated component'),
(u'ODBCTranslator', u'File_Setup', u'Y', None, None, u'File', 1, u'Identifier', None, u'Optional reference to key translator setup DLL'),
(u'ODBCTranslator', u'Translator', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized.internal token for translator'),
(u'Patch', u'Sequence', u'N', 0, 32767, None, None, None, None, u'Primary key, sequence with respect to the media images; order must track cabinet order.'),
(u'Patch', u'Attributes', u'N', 0, 32767, None, None, None, None, u'Integer containing bit flags representing patch attributes'),
(u'Patch', u'File_', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token, foreign key to File table, must match identifier in cabinet.'),
(u'Patch', u'Header', u'Y', None, None, None, None, u'Binary', None, u'Binary stream. The patch header, used for patch validation.'),
(u'Patch', u'PatchSize', u'N', 0, 2147483647, None, None, None, None, u'Size of patch in bytes (long integer).'),
(u'Patch', u'StreamRef_', u'Y', None, None, None, None, u'Identifier', None, u'Identifier. Foreign key to the StreamRef column of the MsiPatchHeaders table.'),
(u'PatchPackage', u'Media_', u'N', 0, 32767, None, None, None, None, u'Foreign key to DiskId column of Media table. Indicates the disk containing the patch package.'),
(u'PatchPackage', u'PatchId', u'N', None, None, None, None, u'Guid', None, u'A unique string GUID representing this patch.'),
(u'PublishComponent', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Foreign key into the Feature table.'),
(u'PublishComponent', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table.'),
(u'PublishComponent', u'ComponentId', u'N', None, None, None, None, u'Guid', None, u'A string GUID that represents the component id that will be requested by the alien product.'),
(u'PublishComponent', u'AppData', u'Y', None, None, None, None, u'Text', None, u'This is localisable Application specific data that can be associated with a Qualified Component.'),
(u'PublishComponent', u'Qualifier', u'N', None, None, None, None, u'Text', None, u'This is defined only when the ComponentId column is an Qualified Component Id. This is the Qualifier for ProvideComponentIndirect.'),
(u'Registry', u'Name', u'Y', None, None, None, None, u'Formatted', None, u'The registry value name.'),
(u'Registry', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The registry value.'),
(u'Registry', u'Key', u'N', None, None, None, None, u'RegPath', None, u'The key for the registry value.'),
(u'Registry', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the installing of the registry value.'),
(u'Registry', u'Registry', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'Registry', u'Root', u'N', -1, 3, None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum.'),
(u'RegLocator', u'Name', u'Y', None, None, None, None, u'Formatted', None, u'The registry value name.'),
(u'RegLocator', u'Type', u'Y', 0, 18, None, None, None, None, u'An integer value that determines if the registry value is a filename or a directory location or to be used as is w/o interpretation.'),
(u'RegLocator', u'Key', u'N', None, None, None, None, u'RegPath', None, u'The key for the registry value.'),
(u'RegLocator', u'Signature_', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table. If the type is 0, the registry values refers a directory, and _Signature is not a foreign key.'),
(u'RegLocator', u'Root', u'N', 0, 3, None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum.'),
(u'RemoveFile', u'InstallMode', u'N', None, None, None, None, None, u'1;2;3', u'Installation option, one of iimEnum.'),
(u'RemoveFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key referencing Component that controls the file to be removed.'),
(u'RemoveFile', u'FileKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key used to identify a particular file entry'),
(u'RemoveFile', u'FileName', u'Y', None, None, None, None, u'WildCardFilename', None, u'Name of the file to be removed.'),
(u'RemoveFile', u'DirProperty', u'N', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full pathname to the folder of the file to be removed.'),
(u'RemoveIniFile', u'Action', u'N', None, None, None, None, None, u'2;4', u'The type of modification to be made, one of iifEnum.'),
(u'RemoveIniFile', u'Value', u'Y', None, None, None, None, u'Formatted', None, u'The value to be deleted. The value is required when Action is iifIniRemoveTag'),
(u'RemoveIniFile', u'Key', u'N', None, None, None, None, u'Formatted', None, u'The .INI file key below Section.'),
(u'RemoveIniFile', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the deletion of the .INI value.'),
(u'RemoveIniFile', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The .INI file name in which to delete the information'),
(u'RemoveIniFile', u'DirProperty', u'Y', None, None, None, None, u'Identifier', None, u'Foreign key into the Directory table denoting the directory where the .INI file is.'),
(u'RemoveIniFile', u'Section', u'N', None, None, None, None, u'Formatted', None, u'The .INI file Section.'),
(u'RemoveIniFile', u'RemoveIniFile', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'RemoveRegistry', u'Name', u'Y', None, None, None, None, u'Formatted', None, u'The registry value name.'),
(u'RemoveRegistry', u'Key', u'N', None, None, None, None, u'RegPath', None, u'The key for the registry value.'),
(u'RemoveRegistry', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table referencing component that controls the deletion of the registry value.'),
(u'RemoveRegistry', u'Root', u'N', -1, 3, None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum'),
(u'RemoveRegistry', u'RemoveRegistry', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'ReserveCost', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Reserve a specified amount of space if this component is to be installed.'),
(u'ReserveCost', u'ReserveFolder', u'Y', None, None, None, None, u'Identifier', None, u'Name of a property whose value is assumed to resolve to the full path to the destination directory'),
(u'ReserveCost', u'ReserveKey', u'N', None, None, None, None, u'Identifier', None, u'Primary key that uniquely identifies a particular ReserveCost record'),
(u'ReserveCost', u'ReserveLocal', u'N', 0, 2147483647, None, None, None, None, u'Disk space to reserve if linked component is installed locally.'),
(u'ReserveCost', u'ReserveSource', u'N', 0, 2147483647, None, None, None, None, u'Disk space to reserve if linked component is installed to run from the source location.'),
(u'SelfReg', u'File_', u'N', None, None, u'File', 1, u'Identifier', None, u'Foreign key into the File table denoting the module that needs to be registered.'),
(u'SelfReg', u'Cost', u'Y', 0, 32767, None, None, None, None, u'The cost of registering the module.'),
(u'ServiceControl', u'Name', u'N', None, None, None, None, u'Formatted', None, u'Name of a service. /, \\, comma and space are invalid'),
(u'ServiceControl', u'Event', u'N', 0, 187, None, None, None, None, u'Bit field: Install: 0x1 = Start, 0x2 = Stop, 0x8 = Delete, Uninstall: 0x10 = Start, 0x20 = Stop, 0x80 = Delete'),
(u'ServiceControl', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table that controls the startup of the service'),
(u'ServiceControl', u'ServiceControl', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'ServiceControl', u'Arguments', u'Y', None, None, None, None, u'Formatted', None, u'Arguments for the service. Separate by [~].'),
(u'ServiceControl', u'Wait', u'Y', 0, 1, None, None, None, None, u'Boolean for whether to wait for the service to fully start'),
(u'ServiceInstall', u'Name', u'N', None, None, None, None, u'Formatted', None, u'Internal Name of the Service'),
(u'ServiceInstall', u'Description', u'Y', None, None, None, None, u'Text', None, u'Description of service.'),
(u'ServiceInstall', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table that controls the startup of the service'),
(u'ServiceInstall', u'Arguments', u'Y', None, None, None, None, u'Formatted', None, u'Arguments to include in every start of the service, passed to WinMain'),
(u'ServiceInstall', u'ServiceInstall', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'ServiceInstall', u'Dependencies', u'Y', None, None, None, None, u'Formatted', None, u'Other services this depends on to start. Separate by [~], and end with [~][~]'),
(u'ServiceInstall', u'DisplayName', u'Y', None, None, None, None, u'Formatted', None, u'External Name of the Service'),
(u'ServiceInstall', u'ErrorControl', u'N', -2147483647, 2147483647, None, None, None, None, u'Severity of error if service fails to start'),
(u'ServiceInstall', u'LoadOrderGroup', u'Y', None, None, None, None, u'Formatted', None, u'LoadOrderGroup'),
(u'ServiceInstall', u'Password', u'Y', None, None, None, None, u'Formatted', None, u'password to run service with. (with StartName)'),
(u'ServiceInstall', u'ServiceType', u'N', -2147483647, 2147483647, None, None, None, None, u'Type of the service'),
(u'ServiceInstall', u'StartName', u'Y', None, None, None, None, u'Formatted', None, u'User or object name to run service as'),
(u'ServiceInstall', u'StartType', u'N', 0, 4, None, None, None, None, u'Type of the service'),
(u'Shortcut', u'Name', u'N', None, None, None, None, u'Filename', None, u'The name of the shortcut to be created.'),
(u'Shortcut', u'Description', u'Y', None, None, None, None, u'Text', None, u'The description for the shortcut.'),
(u'Shortcut', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Foreign key into the Component table denoting the component whose selection gates the the shortcut creation/deletion.'),
(u'Shortcut', u'Icon_', u'Y', None, None, u'Icon', 1, u'Identifier', None, u'Foreign key into the File table denoting the external icon file for the shortcut.'),
(u'Shortcut', u'IconIndex', u'Y', -32767, 32767, None, None, None, None, u'The icon index for the shortcut.'),
(u'Shortcut', u'Directory_', u'N', None, None, u'Directory', 1, u'Identifier', None, u'Foreign key into the Directory table denoting the directory where the shortcut file is created.'),
(u'Shortcut', u'Target', u'N', None, None, None, None, u'Shortcut', None, u'The shortcut target. This is usually a property that is expanded to a file or a folder that the shortcut points to.'),
(u'Shortcut', u'Arguments', u'Y', None, None, None, None, u'Formatted', None, u'The command-line arguments for the shortcut.'),
(u'Shortcut', u'Shortcut', u'N', None, None, None, None, u'Identifier', None, u'Primary key, non-localized token.'),
(u'Shortcut', u'Hotkey', u'Y', 0, 32767, None, None, None, None, u'The hotkey for the shortcut. It has the virtual-key code for the key in the low-order byte, and the modifier flags in the high-order byte. '),
(u'Shortcut', u'ShowCmd', u'Y', None, None, None, None, None, u'1;3;7', u'The show command for the application window.The following values may be used.'),
(u'Shortcut', u'WkDir', u'Y', None, None, None, None, u'Identifier', None, u'Name of property defining location of working directory.'),
(u'Signature', u'FileName', u'N', None, None, None, None, u'Filename', None, u'The name of the file. This may contain a "short name|long name" pair.'),
(u'Signature', u'Signature', u'N', None, None, None, None, u'Identifier', None, u'The table key. The Signature represents a unique file signature.'),
(u'Signature', u'Languages', u'Y', None, None, None, None, u'Language', None, u'The languages supported by the file.'),
(u'Signature', u'MaxDate', u'Y', 0, 2147483647, None, None, None, None, u'The maximum creation date of the file.'),
(u'Signature', u'MaxSize', u'Y', 0, 2147483647, None, None, None, None, u'The maximum size of the file. '),
(u'Signature', u'MaxVersion', u'Y', None, None, None, None, u'Text', None, u'The maximum version of the file.'),
(u'Signature', u'MinDate', u'Y', 0, 2147483647, None, None, None, None, u'The minimum creation date of the file.'),
(u'Signature', u'MinSize', u'Y', 0, 2147483647, None, None, None, None, u'The minimum size of the file.'),
(u'Signature', u'MinVersion', u'Y', None, None, None, None, u'Text', None, u'The minimum version of the file.'),
(u'TypeLib', u'Feature_', u'N', None, None, u'Feature', 1, u'Identifier', None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the type library to be operational.'),
(u'TypeLib', u'Description', u'Y', None, None, None, None, u'Text', None, None),
(u'TypeLib', u'Component_', u'N', None, None, u'Component', 1, u'Identifier', None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.'),
(u'TypeLib', u'Directory_', u'Y', None, None, u'Directory', 1, u'Identifier', None, u'Optional. The foreign key into the Directory table denoting the path to the help file for the type library.'),
(u'TypeLib', u'Language', u'N', 0, 32767, None, None, None, None, u'The language of the library.'),
(u'TypeLib', u'Version', u'Y', 0, 16777215, None, None, None, None, u'The version of the library. The minor version is in the lower 8 bits of the integer. The major version is in the next 16 bits. '),
(u'TypeLib', u'Cost', u'Y', 0, 2147483647, None, None, None, None, u'The cost associated with the registration of the typelib. This column is currently optional.'),
(u'TypeLib', u'LibID', u'N', None, None, None, None, u'Guid', None, u'The GUID that represents the library.'),
(u'Upgrade', u'Attributes', u'N', 0, 2147483647, None, None, None, None, u'The attributes of this product set.'),
(u'Upgrade', u'Remove', u'Y', None, None, None, None, u'Formatted', None, u'The list of features to remove when uninstalling a product from this set. The default is "ALL".'),
(u'Upgrade', u'Language', u'Y', None, None, None, None, u'Language', None, u'A comma-separated list of languages for either products in this set or products not in this set.'),
(u'Upgrade', u'ActionProperty', u'N', None, None, None, None, u'UpperCase', None, u'The property to set when a product in this set is found.'),
(u'Upgrade', u'UpgradeCode', u'N', None, None, None, None, u'Guid', None, u'The UpgradeCode GUID belonging to the products in this set.'),
(u'Upgrade', u'VersionMax', u'Y', None, None, None, None, u'Text', None, u'The maximum ProductVersion of the products in this set. The set may or may not include products with this particular version.'),
(u'Upgrade', u'VersionMin', u'Y', None, None, None, None, u'Text', None, u'The minimum ProductVersion of the products in this set. The set may or may not include products with this particular version.'),
(u'Verb', u'Sequence', u'Y', 0, 32767, None, None, None, None, u'Order within the verbs for a particular extension. Also used simply to specify the default verb.'),
(u'Verb', u'Argument', u'Y', None, None, None, None, u'Formatted', None, u'Optional value for the command arguments.'),
(u'Verb', u'Extension_', u'N', None, None, u'Extension', 1, u'Text', None, u'The extension associated with the table row.'),
(u'Verb', u'Verb', u'N', None, None, None, None, u'Text', None, u'The verb for the command.'),
(u'Verb', u'Command', u'Y', None, None, None, None, u'Formatted', None, u'The command text.'),
]
Error = [
(0, u'{{Fatal error: }}'),
(1, u'{{Error [1]. }}'),
(2, u'Warning [1]. '),
(3, None),
(4, u'Info [1]. '),
(5, u'The installer has encountered an unexpected error installing this package. This may indicate a problem with this package. The error code is [1]. {{The arguments are: [2], [3], [4]}}'),
(6, None),
(7, u'{{Disk full: }}'),
(8, u'Action [Time]: [1]. [2]'),
(9, u'[ProductName]'),
(10, u'{[2]}{, [3]}{, [4]}'),
(11, u'Message type: [1], Argument: [2]'),
(12, u'=== Logging started: [Date] [Time] ==='),
(13, u'=== Logging stopped: [Date] [Time] ==='),
(14, u'Action start [Time]: [1].'),
(15, u'Action ended [Time]: [1]. Return value [2].'),
(16, u'Time remaining: {[1] minutes }{[2] seconds}'),
(17, u'Out of memory. Shut down other applications before retrying.'),
(18, u'Installer is no longer responding.'),
(19, u'Installer stopped prematurely.'),
(20, u'Please wait while Windows configures [ProductName]'),
(21, u'Gathering required information...'),
(22, u'Removing older versions of this application...'),
(23, u'Preparing to remove older versions of this application...'),
(32, u'{[ProductName] }Setup completed successfully.'),
(33, u'{[ProductName] }Setup failed.'),
(1101, u'Error reading from file: [2]. {{ System error [3].}} Verify that the file exists and that you can access it.'),
(1301, u"Cannot create the file '[2]'. A directory with this name already exists. Cancel the install and try installing to a different location."),
(1302, u'Please insert the disk: [2]'),
(1303, u'The installer has insufficient privileges to access this directory: [2]. The installation cannot continue. Log on as administrator or contact your system administrator.'),
(1304, u'Error writing to file: [2]. Verify that you have access to that directory.'),
(1305, u'Error reading from file [2]. {{ System error [3].}} Verify that the file exists and that you can access it.'),
(1306, u"Another application has exclusive access to the file '[2]'. Please shut down all other applications, then click Retry."),
(1307, u'There is not enough disk space to install this file: [2]. Free some disk space and click Retry, or click Cancel to exit.'),
(1308, u'Source file not found: [2]. Verify that the file exists and that you can access it.'),
(1309, u'Error reading from file: [3]. {{ System error [2].}} Verify that the file exists and that you can access it.'),
(1310, u'Error writing to file: [3]. {{ System error [2].}} Verify that you have access to that directory.'),
(1311, u'Source file not found{{(cabinet)}}: [2]. Verify that the file exists and that you can access it.'),
(1312, u"Cannot create the directory '[2]'. A file with this name already exists. Please rename or remove the file and click retry, or click Cancel to exit."),
(1313, u'The volume [2] is currently unavailable. Please select another.'),
(1314, u"The specified path '[2]' is unavailable."),
(1315, u'Unable to write to the specified folder: [2].'),
(1316, u'A network error occurred while attempting to read from the file: [2]'),
(1317, u'An error occurred while attempting to create the directory: [2]'),
(1318, u'A network error occurred while attempting to create the directory: [2]'),
(1319, u'A network error occurred while attempting to open the source file cabinet: [2]'),
(1320, u'The specified path is too long: [2]'),
(1321, u'The Installer has insufficient privileges to modify this file: [2].'),
(1322, u"A portion of the folder path '[2]' is invalid. It is either empty or exceeds the length allowed by the system."),
(1323, u"The folder path '[2]' contains words that are not valid in folder paths."),
(1324, u"The folder path '[2]' contains an invalid character."),
(1325, u"'[2]' is not a valid short file name."),
(1326, u'Error getting file security: [3] GetLastError: [2]'),
(1327, u'Invalid Drive: [2]'),
(1328, u'Error applying patch to file [2]. It has probably been updated by other means, and can no longer be modified by this patch. For more information contact your patch vendor. {{System Error: [3]}}'),
(1329, u'A file that is required cannot be installed because the cabinet file [2] is not digitally signed. This may indicate that the cabinet file is corrupt.'),
(1330, u'A file that is required cannot be installed because the cabinet file [2] has an invalid digital signature. This may indicate that the cabinet file is corrupt.{{ Error [3] was returned by WinVerifyTrust.}}'),
(1331, u'Failed to correctly copy [2] file: CRC error.'),
(1332, u'Failed to correctly move [2] file: CRC error.'),
(1333, u'Failed to correctly patch [2] file: CRC error.'),
(1334, u"The file '[2]' cannot be installed because the file cannot be found in cabinet file '[3]'. This could indicate a network error, an error reading from the CD-ROM, or a problem with this package."),
(1335, u"The cabinet file '[2]' required for this installation is corrupt and cannot be used. This could indicate a network error, an error reading from the CD-ROM, or a problem with this package."),
(1336, u'There was an error creating a temporary file that is needed to complete this installation.{{ Folder: [3]. System error code: [2]}}'),
(1401, u'Could not create key: [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel. '),
(1402, u'Could not open key: [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel. '),
(1403, u'Could not delete value [2] from key [3]. {{ System error [4].}} Verify that you have sufficient access to that key, or contact your support personnel. '),
(1404, u'Could not delete key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel. '),
(1405, u'Could not read value [2] from key [3]. {{ System error [4].}} Verify that you have sufficient access to that key, or contact your support personnel. '),
(1406, u'Could not write value [2] to key [3]. {{ System error [4].}} Verify that you have sufficient access to that key, or contact your support personnel.'),
(1407, u'Could not get value names for key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel.'),
(1408, u'Could not get sub key names for key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel.'),
(1409, u'Could not read security information for key [2]. {{ System error [3].}} Verify that you have sufficient access to that key, or contact your support personnel.'),
(1410, u'Could not increase the available registry space. [2] KB of free registry space is required for the installation of this application.'),
(1500, u'Another installation is in progress. You must complete that installation before continuing this one.'),
(1501, u'Error accessing secured data. Please make sure the Windows Installer is configured properly and try the install again.'),
(1502, u"User '[2]' has previously initiated an install for product '[3]'. That user will need to run that install again before they can use that product. Your current install will now continue."),
(1503, u"User '[2]' has previously initiated an install for product '[3]'. That user will need to run that install again before they can use that product."),
(1601, u"Out of disk space -- Volume: '[2]'; required space: [3] KB; available space: [4] KB. Free some disk space and retry."),
(1602, u'Are you sure you want to cancel?'),
(1603, u"The file [2][3] is being held in use{ by the following process: Name: [4], Id: [5], Window Title: '[6]'}. Close that application and retry."),
(1604, u"The product '[2]' is already installed, preventing the installation of this product. The two products are incompatible."),
(1605, u"There is not enough disk space on the volume '[2]' to continue the install with recovery enabled. [3] KB are required, but only [4] KB are available. Click Ignore to continue the install without saving recovery information, click Retry to check for available space again, or click Cancel to quit the installation."),
(1606, u'Could not access network location [2].'),
(1607, u'The following applications should be closed before continuing the install:'),
(1608, u'Could not find any previously installed compliant products on the machine for installing this product.'),
(1609, u"An error occurred while applying security settings. [2] is not a valid user or group. This could be a problem with the package, or a problem connecting to a domain controller on the network. Check your network connection and click Retry, or Cancel to end the install. {{Unable to locate the user's SID, system error [3]}}"),
(1701, u'The key [2] is not valid. Verify that you entered the correct key.'),
(1702, u'The installer must restart your system before configuration of [2] can continue. Click Yes to restart now or No if you plan to manually restart later.'),
(1703, u'You must restart your system for the configuration changes made to [2] to take effect. Click Yes to restart now or No if you plan to manually restart later.'),
(1704, u'An installation for [2] is currently suspended. You must undo the changes made by that installation to continue. Do you want to undo those changes?'),
(1705, u'A previous installation for this product is in progress. You must undo the changes made by that installation to continue. Do you want to undo those changes?'),
(1706, u"An installation package for the product [2] cannot be found. Try the installation again using a valid copy of the installation package '[3]'."),
(1707, u'Installation completed successfully.'),
(1708, u'Installation failed.'),
(1709, u'Product: [2] -- [3]'),
(1710, u'You may either restore your computer to its previous state or continue the install later. Would you like to restore?'),
(1711, u'An error occurred while writing installation information to disk. Check to make sure enough disk space is available, and click Retry, or Cancel to end the install.'),
(1712, u'One or more of the files required to restore your computer to its previous state could not be found. Restoration will not be possible.'),
(1713, u'[2] cannot install one of its required products. Contact your technical support group. {{System Error: [3].}}'),
(1714, u'The older version of [2] cannot be removed. Contact your technical support group. {{System Error [3].}}'),
(1715, u'Installed [2]'),
(1716, u'Configured [2]'),
(1717, u'Removed [2]'),
(1718, u'File [2] was rejected by digital signature policy.'),
(1719, u'The Windows Installer Service could not be accessed. This can occur if you are running Windows in safe mode, or if the Windows Installer is not correctly installed. Contact your support personnel for assistance.'),
(1720, u'There is a problem with this Windows Installer package. A script required for this install to complete could not be run. Contact your support personnel or package vendor. {{Custom action [2] script error [3], [4]: [5] Line [6], Column [7], [8] }}'),
(1721, u'There is a problem with this Windows Installer package. A program required for this install to complete could not be run. Contact your support personnel or package vendor. {{Action: [2], location: [3], command: [4] }}'),
(1722, u'There is a problem with this Windows Installer package. A program run as part of the setup did not finish as expected. Contact your support personnel or package vendor. {{Action [2], location: [3], command: [4] }}'),
(1723, u'There is a problem with this Windows Installer package. A DLL required for this install to complete could not be run. Contact your support personnel or package vendor. {{Action [2], entry: [3], library: [4] }}'),
(1724, u'Removal completed successfully.'),
(1725, u'Removal failed.'),
(1726, u'Advertisement completed successfully.'),
(1727, u'Advertisement failed.'),
(1728, u'Configuration completed successfully.'),
(1729, u'Configuration failed.'),
(1730, u'You must be an Administrator to remove this application. To remove this application, you can log on as an Administrator, or contact your technical support group for assistance.'),
(1801, u'The path [2] is not valid. Please specify a valid path.'),
(1802, u'Out of memory. Shut down other applications before retrying.'),
(1803, u'There is no disk in drive [2]. Please insert one and click Retry, or click Cancel to go back to the previously selected volume.'),
(1804, u'There is no disk in drive [2]. Please insert one and click Retry, or click Cancel to return to the browse dialog and select a different volume.'),
(1805, u'The folder [2] does not exist. Please enter a path to an existing folder.'),
(1806, u'You have insufficient privileges to read this folder.'),
(1807, u'A valid destination folder for the install could not be determined.'),
(1901, u'Error attempting to read from the source install database: [2].'),
(1902, u'Scheduling reboot operation: Renaming file [2] to [3]. Must reboot to complete operation.'),
(1903, u'Scheduling reboot operation: Deleting file [2]. Must reboot to complete operation.'),
(1904, u'Module [2] failed to register. HRESULT [3]. Contact your support personnel.'),
(1905, u'Module [2] failed to unregister. HRESULT [3]. Contact your support personnel.'),
(1906, u'Failed to cache package [2]. Error: [3]. Contact your support personnel.'),
(1907, u'Could not register font [2]. Verify that you have sufficient permissions to install fonts, and that the system supports this font.'),
(1908, u'Could not unregister font [2]. Verify that you that you have sufficient permissions to remove fonts.'),
(1909, u'Could not create Shortcut [2]. Verify that the destination folder exists and that you can access it.'),
(1910, u'Could not remove Shortcut [2]. Verify that the shortcut file exists and that you can access it.'),
(1911, u'Could not register type library for file [2]. Contact your support personnel.'),
(1912, u'Could not unregister type library for file [2]. Contact your support personnel.'),
(1913, u'Could not update the ini file [2][3]. Verify that the file exists and that you can access it.'),
(1914, u'Could not schedule file [2] to replace file [3] on reboot. Verify that you have write permissions to file [3].'),
(1915, u'Error removing ODBC driver manager, ODBC error [2]: [3]. Contact your support personnel.'),
(1916, u'Error installing ODBC driver manager, ODBC error [2]: [3]. Contact your support personnel.'),
(1917, u'Error removing ODBC driver: [4], ODBC error [2]: [3]. Verify that you have sufficient privileges to remove ODBC drivers.'),
(1918, u'Error installing ODBC driver: [4], ODBC error [2]: [3]. Verify that the file [4] exists and that you can access it.'),
(1919, u'Error configuring ODBC data source: [4], ODBC error [2]: [3]. Verify that the file [4] exists and that you can access it.'),
(1920, u"Service '[2]' ([3]) failed to start. Verify that you have sufficient privileges to start system services."),
(1921, u"Service '[2]' ([3]) could not be stopped. Verify that you have sufficient privileges to stop system services."),
(1922, u"Service '[2]' ([3]) could not be deleted. Verify that you have sufficient privileges to remove system services."),
(1923, u"Service '[2]' ([3]) could not be installed. Verify that you have sufficient privileges to install system services."),
(1924, u"Could not update environment variable '[2]'. Verify that you have sufficient privileges to modify environment variables."),
(1925, u'You do not have sufficient privileges to complete this installation for all users of the machine. Log on as administrator and then retry this installation.'),
(1926, u"Could not set file security for file '[3]'. Error: [2]. Verify that you have sufficient privileges to modify the security permissions for this file."),
(1927, u'Component Services (COM+ 1.0) are not installed on this computer. This installation requires Component Services in order to complete successfully. Component Services are available on Windows 2000.'),
(1928, u'Error registering COM+ Application. Contact your support personnel for more information.'),
(1929, u'Error unregistering COM+ Application. Contact your support personnel for more information.'),
(1930, u"The description for service '[2]' ([3]) could not be changed."),
(1931, u'The Windows Installer service cannot update the system file [2] because the file is protected by Windows. You may need to update your operating system for this program to work correctly. {{Package version: [3], OS Protected version: [4]}}'),
(1932, u'The Windows Installer service cannot update the protected Windows file [2]. {{Package version: [3], OS Protected version: [4], SFP Error: [5]}}'),
(1933, u'The Windows Installer service cannot update one or more protected Windows files. {{SFP Error: [2]. List of protected files:\\r\\n[3]}}'),
(1934, u'User installations are disabled via policy on the machine.'),
(1935, u'An error occurred during the installation of assembly component [2]. HRESULT: [3]. {{assembly interface: [4], function: [5], assembly name: [6]}}'),
]
tables=['AdminExecuteSequence', 'AdminUISequence', 'AdvtExecuteSequence', 'BBControl', 'Billboard', 'Binary', 'CheckBox', 'Property', 'ComboBox', 'Control', 'ListBox', 'ActionText', 'ControlCondition', 'ControlEvent', 'Dialog', 'EventMapping', 'InstallExecuteSequence', 'InstallUISequence', 'ListView', 'RadioButton', 'TextStyle', 'UIText', '_Validation', 'Error']
|
onceuponatimeforever/oh-mainline
|
refs/heads/master
|
vendor/packages/twisted/doc/web/howto/listings/render_1.py
|
19
|
from twisted.web.template import flattenString
from element_1 import ExampleElement
def renderDone(output):
print output
flattenString(None, ExampleElement()).addCallback(renderDone)
|
shineyear/catawampus
|
refs/heads/master
|
tr/cwmpdate_test.py
|
6
|
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# unittest requires method names starting in 'test'
#pylint: disable-msg=C6409
"""Unit tests for cwmpdate.py."""
__author__ = 'dgentry@google.com (Denton Gentry)'
import calendar
import datetime
import unittest
import google3
import cwmpdate
class UTC(datetime.tzinfo):
def utcoffset(self, dt):
return datetime.timedelta(0)
def tzname(self, dt):
return 'UTC'
def dst(self, dt):
return datetime.timedelta(0)
class OTH(datetime.tzinfo):
def utcoffset(self, dt):
return datetime.timedelta(0, 3600)
def tzname(self, dt):
return 'OTH'
def dst(self, dt):
return datetime.timedelta(0, 3600)
class CwmpDateTest(unittest.TestCase):
"""Tests for date formatting."""
def testDatetimeNone(self):
self.assertEqual('0001-01-01T00:00:00Z', cwmpdate.format(None))
def testDatetimeNaive(self):
dt = datetime.datetime(1999, 12, 31, 23, 59, 58, 999999)
self.assertEqual('1999-12-31T23:59:58.999999Z', cwmpdate.format(dt))
dt2 = datetime.datetime(1999, 12, 31, 23, 59, 58)
self.assertEqual('1999-12-31T23:59:58Z', cwmpdate.format(dt2))
def testDatetimeUTC(self):
dt = datetime.datetime(1999, 12, 31, 23, 59, 58, 999999, tzinfo=UTC())
self.assertEqual('1999-12-31T23:59:58.999999Z', cwmpdate.format(dt))
dt2 = datetime.datetime(1999, 12, 31, 23, 59, 58, tzinfo=UTC())
self.assertEqual('1999-12-31T23:59:58Z', cwmpdate.format(dt2))
def testDatetimeOTH(self):
dt = datetime.datetime(1999, 12, 31, 23, 59, 58, 999999, tzinfo=OTH())
self.assertEqual('1999-12-31T23:59:58.999999+01:00',
cwmpdate.format(dt))
def testTimedelta(self):
t = 1234567890.987654
self.assertEqual('2009-02-13T23:31:30.987654Z', cwmpdate.format(t))
def testParse(self):
dt = cwmpdate.parse('2012-01-12T00:20:03.217691Z')
timestamp = calendar.timegm(dt.timetuple())
self.assertEqual(timestamp, 1326327603.0)
def testValid(self):
self.assertTrue(cwmpdate.valid('2009-02-13T23:31:30.987654Z'))
self.assertTrue(cwmpdate.valid('2009-02-13T23:31:30Z'))
self.assertFalse(cwmpdate.valid('2009-02-13T23:31:30'))
self.assertFalse(cwmpdate.valid('booga'))
if __name__ == '__main__':
unittest.main()
|
tlakshman26/cinder-new-branch
|
refs/heads/master
|
cinder/tests/unit/objects/test_volume_type.py
|
9
|
# Copyright 2015 SimpliVity Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from cinder import context
from cinder import objects
from cinder.tests.unit import fake_volume
from cinder.tests.unit import objects as test_objects
class TestVolumeType(test_objects.BaseObjectsTestCase):
def setUp(self):
super(TestVolumeType, self).setUp()
# NOTE (e0ne): base tests contains original RequestContext from
# oslo_context. We change it to our RequestContext implementation
# to have 'elevated' method
self.context = context.RequestContext(self.user_id, self.project_id,
is_admin=False)
@staticmethod
def _compare(test, db, obj):
for field, value in db.items():
test.assertEqual(db[field], obj[field])
@mock.patch('cinder.db.volume_type_get')
def test_get_by_id(self, volume_type_get):
db_volume_type = fake_volume.fake_db_volume_type()
volume_type_get.return_value = db_volume_type
volume_type = objects.VolumeType.get_by_id(self.context, '1')
self._compare(self, db_volume_type, volume_type)
@mock.patch('cinder.volume.volume_types.create')
def test_create(self, volume_type_create):
db_volume_type = fake_volume.fake_db_volume_type()
volume_type_create.return_value = db_volume_type
volume_type = objects.VolumeType(context=self.context)
volume_type.name = db_volume_type['name']
volume_type.extra_specs = db_volume_type['extra_specs']
volume_type.is_public = db_volume_type['is_public']
volume_type.projects = db_volume_type['projects']
volume_type.description = db_volume_type['description']
volume_type.create()
volume_type_create.assert_called_once_with(
self.context, db_volume_type['name'],
db_volume_type['extra_specs'], db_volume_type['is_public'],
db_volume_type['projects'], db_volume_type['description'])
@mock.patch('cinder.volume.volume_types.update')
def test_save(self, volume_type_update):
db_volume_type = fake_volume.fake_db_volume_type()
volume_type = objects.VolumeType._from_db_object(self.context,
objects.VolumeType(),
db_volume_type)
volume_type.description = 'foobar'
volume_type.save()
volume_type_update.assert_called_once_with(self.context,
volume_type.id,
volume_type.name,
volume_type.description)
@mock.patch('cinder.volume.volume_types.destroy')
def test_destroy(self, volume_type_destroy):
db_volume_type = fake_volume.fake_db_volume_type()
volume_type = objects.VolumeType._from_db_object(self.context,
objects.VolumeType(),
db_volume_type)
volume_type.destroy()
self.assertTrue(volume_type_destroy.called)
admin_context = volume_type_destroy.call_args[0][0]
self.assertTrue(admin_context.is_admin)
class TestVolumeTypeList(test_objects.BaseObjectsTestCase):
@mock.patch('cinder.volume.volume_types.get_all_types')
def test_get_all(self, get_all_types):
db_volume_type = fake_volume.fake_db_volume_type()
get_all_types.return_value = [db_volume_type]
volume_types = objects.VolumeTypeList.get_all(self.context)
self.assertEqual(1, len(volume_types))
TestVolumeType._compare(self, db_volume_type, volume_types[0])
|
AkA84/edx-platform
|
refs/heads/master
|
cms/djangoapps/contentstore/views/xblock.py
|
70
|
"""
Views dedicated to rendering xblocks.
"""
from __future__ import absolute_import
import logging
import mimetypes
from xblock.core import XBlock
from django.conf import settings
from django.http import Http404, HttpResponse
log = logging.getLogger(__name__)
def xblock_resource(request, block_type, uri): # pylint: disable=unused-argument
"""
Return a package resource for the specified XBlock.
"""
try:
xblock_class = XBlock.load_class(block_type, select=settings.XBLOCK_SELECT_FUNCTION)
content = xblock_class.open_local_resource(uri)
except IOError:
log.info('Failed to load xblock resource', exc_info=True)
raise Http404
except Exception: # pylint: disable=broad-except
log.error('Failed to load xblock resource', exc_info=True)
raise Http404
mimetype, _ = mimetypes.guess_type(uri)
return HttpResponse(content, mimetype=mimetype)
|
Milli4272/MoneyCoin
|
refs/heads/master
|
share/qt/clean_mac_info_plist.py
|
229
|
#!/usr/bin/env python
# Jonas Schnelli, 2013
# make sure the Litecoin-Qt.app contains the right plist (including the right version)
# fix made because of serval bugs in Qt mac deployment (https://bugreports.qt-project.org/browse/QTBUG-21267)
from string import Template
from datetime import date
bitcoinDir = "./";
inFile = bitcoinDir+"/share/qt/Info.plist"
outFile = "Litecoin-Qt.app/Contents/Info.plist"
version = "unknown";
fileForGrabbingVersion = bitcoinDir+"bitcoin-qt.pro"
for line in open(fileForGrabbingVersion):
lineArr = line.replace(" ", "").split("=");
if lineArr[0].startswith("VERSION"):
version = lineArr[1].replace("\n", "");
fIn = open(inFile, "r")
fileContent = fIn.read()
s = Template(fileContent)
newFileContent = s.substitute(VERSION=version,YEAR=date.today().year)
fOut = open(outFile, "w");
fOut.write(newFileContent);
print "Info.plist fresh created"
|
jbenden/ansible
|
refs/heads/devel
|
lib/ansible/plugins/terminal/__init__.py
|
71
|
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
from abc import ABCMeta, abstractmethod
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils.six import with_metaclass
class TerminalBase(with_metaclass(ABCMeta, object)):
'''
A base class for implementing cli connections
.. note:: Unlike most of Ansible, nearly all strings in
:class:`TerminalBase` plugins are byte strings. This is because of
how close to the underlying platform these plugins operate. Remember
to mark literal strings as byte string (``b"string"``) and to use
:func:`~ansible.module_utils._text.to_bytes` and
:func:`~ansible.module_utils._text.to_text` to avoid unexpected
problems.
'''
#: compiled bytes regular expressions as stdout
terminal_stdout_re = []
#: compiled bytes regular expressions as stderr
terminal_stderr_re = []
#: compiled bytes regular expressions to remove ANSI codes
ansi_re = [
re.compile(br'(\x1b\[\?1h\x1b=)'),
re.compile(br'\x08.')
]
def __init__(self, connection):
self._connection = connection
def _exec_cli_command(self, cmd, check_rc=True):
"""
Executes a CLI command on the device
:arg cmd: Byte string consisting of the command to execute
:kwarg check_rc: If True, the default, raise an
:exc:`AnsibleConnectionFailure` if the return code from the
command is nonzero
:returns: A tuple of return code, stdout, and stderr from running the
command. stdout and stderr are both byte strings.
"""
rc, out, err = self._connection.exec_command(cmd)
if check_rc and rc != 0:
raise AnsibleConnectionFailure(err)
return rc, out, err
def _get_prompt(self):
"""
Returns the current prompt from the device
:returns: A byte string of the prompt
"""
for cmd in (b'\n', b'prompt()'):
rc, out, err = self._exec_cli_command(cmd)
return out
def on_open_shell(self):
"""Called after the SSH session is established
This method is called right after the invoke_shell() is called from
the Paramiko SSHClient instance. It provides an opportunity to setup
terminal parameters such as disbling paging for instance.
"""
pass
def on_close_shell(self):
"""Called before the connection is closed
This method gets called once the connection close has been requested
but before the connection is actually closed. It provides an
opportunity to clean up any terminal resources before the shell is
actually closed
"""
pass
def on_authorize(self, passwd=None):
"""Called when privilege escalation is requested
:kwarg passwd: String containing the password
This method is called when the privilege is requested to be elevated
in the play context by setting become to True. It is the responsibility
of the terminal plugin to actually do the privilege escalation such
as entering `enable` mode for instance
"""
pass
def on_deauthorize(self):
"""Called when privilege deescalation is requested
This method is called when the privilege changed from escalated
(become=True) to non escalated (become=False). It is the responsibility
of this method to actually perform the deauthorization procedure
"""
pass
|
quanvm009/codev7
|
refs/heads/master
|
openerp/addons/purchase/company.py
|
51
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv,fields
class company(osv.osv):
_inherit = 'res.company'
_columns = {
'po_lead': fields.float(
'Purchase Lead Time', required=True,
help="Margin of error for supplier lead times. When the system"\
"generates Purchase Orders for procuring products,"\
"they will be scheduled that many days earlier "\
"to cope with unexpected supplier delays."),
}
_defaults = {
'po_lead': lambda *a: 1.0,
}
company()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
arvinsingla/CouchPotatoServer
|
refs/heads/master
|
libs/suds/xsd/doctor.py
|
205
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
The I{doctor} module provides classes for fixing broken (sick)
schema(s).
"""
from logging import getLogger
from suds.sax import splitPrefix, Namespace
from suds.sax.element import Element
from suds.plugin import DocumentPlugin, DocumentContext
log = getLogger(__name__)
class Doctor:
"""
Schema Doctor.
"""
def examine(self, root):
"""
Examine and repair the schema (if necessary).
@param root: A schema root element.
@type root: L{Element}
"""
pass
class Practice(Doctor):
"""
A collection of doctors.
@ivar doctors: A list of doctors.
@type doctors: list
"""
def __init__(self):
self.doctors = []
def add(self, doctor):
"""
Add a doctor to the practice
@param doctor: A doctor to add.
@type doctor: L{Doctor}
"""
self.doctors.append(doctor)
def examine(self, root):
for d in self.doctors:
d.examine(root)
return root
class TnsFilter:
"""
Target Namespace filter.
@ivar tns: A list of target namespaces.
@type tns: [str,...]
"""
def __init__(self, *tns):
"""
@param tns: A list of target namespaces.
@type tns: [str,...]
"""
self.tns = []
self.add(*tns)
def add(self, *tns):
"""
Add I{targetNamesapces} to be added.
@param tns: A list of target namespaces.
@type tns: [str,...]
"""
self.tns += tns
def match(self, root, ns):
"""
Match by I{targetNamespace} excluding those that
are equal to the specified namespace to prevent
adding an import to itself.
@param root: A schema root.
@type root: L{Element}
"""
tns = root.get('targetNamespace')
if len(self.tns):
matched = ( tns in self.tns )
else:
matched = 1
itself = ( ns == tns )
return ( matched and not itself )
class Import:
"""
An <xs:import/> to be applied.
@cvar xsdns: The XSD namespace.
@type xsdns: (p,u)
@ivar ns: An import namespace.
@type ns: str
@ivar location: An optional I{schemaLocation}.
@type location: str
@ivar filter: A filter used to restrict application to
a particular schema.
@type filter: L{TnsFilter}
"""
xsdns = Namespace.xsdns
def __init__(self, ns, location=None):
"""
@param ns: An import namespace.
@type ns: str
@param location: An optional I{schemaLocation}.
@type location: str
"""
self.ns = ns
self.location = location
self.filter = TnsFilter()
def setfilter(self, filter):
"""
Set the filter.
@param filter: A filter to set.
@type filter: L{TnsFilter}
"""
self.filter = filter
def apply(self, root):
"""
Apply the import (rule) to the specified schema.
If the schema does not already contain an import for the
I{namespace} specified here, it is added.
@param root: A schema root.
@type root: L{Element}
"""
if not self.filter.match(root, self.ns):
return
if self.exists(root):
return
node = Element('import', ns=self.xsdns)
node.set('namespace', self.ns)
if self.location is not None:
node.set('schemaLocation', self.location)
log.debug('inserting: %s', node)
root.insert(node)
def add(self, root):
"""
Add an <xs:import/> to the specified schema root.
@param root: A schema root.
@type root: L{Element}
"""
node = Element('import', ns=self.xsdns)
node.set('namespace', self.ns)
if self.location is not None:
node.set('schemaLocation', self.location)
log.debug('%s inserted', node)
root.insert(node)
def exists(self, root):
"""
Check to see if the <xs:import/> already exists
in the specified schema root by matching I{namesapce}.
@param root: A schema root.
@type root: L{Element}
"""
for node in root.children:
if node.name != 'import':
continue
ns = node.get('namespace')
if self.ns == ns:
return 1
return 0
class ImportDoctor(Doctor, DocumentPlugin):
"""
Doctor used to fix missing imports.
@ivar imports: A list of imports to apply.
@type imports: [L{Import},...]
"""
def __init__(self, *imports):
"""
"""
self.imports = []
self.add(*imports)
def add(self, *imports):
"""
Add a namesapce to be checked.
@param imports: A list of L{Import} objects.
@type imports: [L{Import},..]
"""
self.imports += imports
def examine(self, node):
for imp in self.imports:
imp.apply(node)
def parsed(self, context):
node = context.document
# xsd root
if node.name == 'schema' and Namespace.xsd(node.namespace()):
self.examine(node)
return
# look deeper
context = DocumentContext()
for child in node:
context.document = child
self.parsed(context)
|
thedrow/cython
|
refs/heads/master
|
Cython/Compiler/Tests/__init__.py
|
1472
|
# empty file
|
shannonjlove/namebench
|
refs/heads/master
|
tools/ordered-uniq.py
|
175
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Like uniq, but does not require sort-order to change."""
import sys
seen = {}
for full_line in sys.stdin:
line = full_line.rstrip()
if line not in seen:
sys.stdout.write(full_line)
seen[line] = 1
sys.stdout.close()
sys.stdin.close()
|
deepakiam/bot
|
refs/heads/master
|
bot/server/ctrl_server.py
|
1
|
#!/usr/bin/env python
"""Server that accepts and executes control-type commands on the bot."""
import sys
import os
from inspect import getmembers, ismethod
from simplejson.decoder import JSONDecodeError
import zmq
import signal
# This is required to make imports work
sys.path = [os.getcwd()] + sys.path
import bot.lib.lib as lib
import pub_server as pub_server_mod
import bot.lib.messages as msgs
from bot.driver.mec_driver import MecDriver
def is_api_method(obj, name):
"""Tests whether named method exists in obj and is flagged for API export.
:param obj: API-exported object to search for the given method on.
:type ojb: string
:param name: Name of method to check for.
:type name: string
:returns: True if given method is on given obj and is exported, else False.
"""
try:
method = getattr(obj, name)
except AttributeError:
return False
return (ismethod(method) and hasattr(method, "__api_call"))
class CtrlServer(object):
"""Exports bot control via ZMQ.
Most functionally exported by CtrlServer is in the form of methods
exposed by the API. @lib.api_call decorators can be added to bot
systems, which tags them for export. They can then be called
remotely via CtrlClient, which is typically owned by an interface
like the CLI, which typically accepts commands from an agent like
a human.
Some control is exported directly by CtrlServer, not through the
API. For example, CtrlServer responds directly to ping messages,
list messages (which give the objects/methods exposed by the API),
and exit messages.
CtrlServer is the primary owner of bot resources, which we call
systems. For example, it's CtrlServer that instantiates gunner
and follower. Through those two, CtrlServer owns the gun, the
IR hub, the turret and basically every other bot system.
The messages that CtrlServer accepts and responds with are fully
specified in lib.messages. Make any changes to messages there.
CtrlServer can be instructed (via the API) to spawn a new thread
for a PubServer. When that happens, CtrlServer passes its systems
to PubServer, which can read their state and publish it over a
ZMQ PUB socket.
"""
def __init__(self, testing=None, config_file="bot/config.yaml"):
"""Build ZMQ REP socket and instantiate bot systems.
:param testing: True if running on simulated HW, False if on bot.
:type testing: boolean
:param config_file: Name of file to read configuration from.
:type config_file: string
"""
# Register signal handler, shut down cleanly (think motors)
signal.signal(signal.SIGINT, self.signal_handler)
# Load configuration and logger
self.config = lib.get_config(config_file)
self.logger = lib.get_logger()
# Testing flag will cause objects to run on simulated hardware
if testing is True or testing == "True":
self.logger.info("CtrlServer running in test mode")
lib.set_testing(True)
elif testing is None:
self.logger.info(
"Defaulting to config testing flag: {}".format(
self.config["testing"]))
lib.set_testing(self.config["testing"])
else:
self.logger.info("CtrlServer running in non-test mode")
lib.set_testing(False)
# Build socket to listen for requests
self.context = zmq.Context()
self.ctrl_sock = self.context.socket(zmq.REP)
self.server_bind_addr = "{protocol}://{host}:{port}".format(
protocol=self.config["server_protocol"],
host=self.config["server_bind_host"],
port=self.config["ctrl_server_port"])
try:
self.ctrl_sock.bind(self.server_bind_addr)
except zmq.ZMQError:
self.logger.error("ZMQ error. Is a server already running?")
self.logger.warning("May be connected to an old server instance.")
sys.exit(1)
self.systems = self.assign_subsystems()
self.logger.info("Control server initialized")
# Don't spawn pub_server until told to
self.pub_server = None
def signal_handler(self, signal, frame):
self.logger.info("Caught SIGINT (Ctrl+C), closing cleanly")
self.clean_up()
self.logger.info("Cleaned up bot, exiting...")
sys.exit(0)
def assign_subsystems(self):
"""Instantiates and stores references to bot subsystems.
:returns: Dict of subsystems, maps system name to instantiated object.
"""
self.driver = MecDriver()
systems = {}
systems["ctrl"] = self
systems["driver"] = self.driver
self.logger.debug("Systems: {}".format(systems))
return systems
def listen(self):
"""Perpetually listen for messages, pass them to generic handler."""
self.logger.info("Control server: {}".format(self.server_bind_addr))
while True:
try:
msg = self.ctrl_sock.recv_json()
reply = self.handle_msg(msg)
self.logger.debug("Sending: {}".format(reply))
self.ctrl_sock.send_json(reply)
except JSONDecodeError:
err_msg = "Not a JSON message!"
self.logger.warning(err_msg)
self.ctrl_sock.send_json(msgs.error(err_msg))
except KeyboardInterrupt:
self.logger.info("Exiting control server. Bye!")
self.clean_up()
sys.exit(0)
def handle_msg(self, msg):
"""Generic message handler. Hands-off based on type of message.
:param msg: Message, received via ZMQ from client, to handle.
:type msg: dict
:returns: An appropriate message reply dict, from lib.messages.
"""
self.logger.debug("Received: {}".format(msg))
try:
msg_type = msg["type"]
except KeyError as e:
return msgs.error(e)
if msg_type == "ping_req":
reply = msgs.ping_reply()
elif msg_type == "list_req":
reply = self.list_callables()
elif msg_type == "call_req":
try:
obj_name = msg["obj_name"]
method = msg["method"]
params = msg["params"]
reply = self.call_method(obj_name, method, params)
except KeyError as e:
return msgs.error(e)
elif msg_type == "exit_req":
self.logger.info("Received message to die. Bye!")
reply = msgs.exit_reply()
# Need to actually send reply here as we're about to exit
self.logger.debug("Sending: {}".format(reply))
self.ctrl_sock.send_json(reply)
self.clean_up()
sys.exit(0)
else:
err_msg = "Unrecognized message: {}".format(msg)
self.logger.warning(err_msg)
reply = msgs.error(err_msg)
return reply
def list_callables(self):
"""Build list of callable methods on each exported subsystem object.
Uses introspection to create a list of callable methods for each
registered subsystem object. Only methods which are flagged using the
@lib.api_call decorator will be included.
:returns: list_reply message with callable objects and their methods.
"""
self.logger.debug("List of callable API objects requested")
# Dict of subsystem object names to their callable methods.
callables = {}
for name, obj in self.systems.items():
methods = []
# Filter out methods which are not explicitly flagged for export
for member in getmembers(obj):
if is_api_method(obj, member[0]):
methods.append(member[0])
callables[name] = methods
return msgs.list_reply(callables)
def call_method(self, name, method, params):
"""Call a previously registered subsystem method by name. Only
methods tagged with the @api_call decorator can be called.
:param name: Assigned name of the registered subsystem.
:type name: string
:param method: Subsystem method to be called.
:type method: string
:param params: Additional parameters for the called method.
:type params: dict
:returns: call_reply or error message dict to be sent to caller.
"""
self.logger.debug("API call: {}.{}({})".format(name, method, params))
if name in self.systems:
obj = self.systems[name]
if is_api_method(obj, method):
try:
# Calls given obj.method, unpacking and passing params dict
call_return = getattr(obj, method)(**params)
msg = "Called {}.{}".format(name, method)
self.logger.debug(msg + ",returned:{}".format(call_return))
return msgs.call_reply(msg, call_return)
except TypeError:
# Raised when we have a mismatch of the method's kwargs
# TODO: Return argspec here?
err_msg = "Invalid params for {}.{}".format(name, method)
self.logger.warning(err_msg)
return msgs.error(err_msg)
except Exception as e:
# Catch exception raised by called method, notify client
err_msg = "Exception: '{}'".format(str(e))
self.logger.warning(err_msg)
return msgs.error(err_msg)
else:
err_msg = "Invalid method: '{}.{}'".format(name, method)
self.logger.warning(err_msg)
return msgs.error(err_msg)
else:
err_msg = "Invalid object: '{}'".format(name)
self.logger.warning(err_msg)
return msgs.error(err_msg)
@lib.api_call
def echo(self, msg=None):
"""Echo a message back to the caller.
:param msg: Message to be echoed back to caller, default is None.
:returns: Message given by param, defaults to None.
"""
return msg
@lib.api_call
def exception(self):
"""Raise a test exception which will be returned to the caller."""
raise Exception("Exception test")
@lib.api_call
def spawn_pub_server(self):
"""Spawn publisher thread."""
if self.pub_server is None:
self.pub_server = pub_server_mod.PubServer(self.systems)
# Prevent pub_server thread from blocking the process from closing
self.pub_server.setDaemon(True)
self.pub_server.start()
msg = "Spawned pub server"
self.logger.info(msg)
return msg
else:
err_msg = "PubServer is already running"
self.logger.warning(err_msg)
return err_msg
@lib.api_call
def stop_full(self):
"""Stop all drive and gun motors, set turret to safe state."""
self.systems["driver"].move(0, 0)
def clean_up(self):
"""Tear down ZMQ socket."""
self.stop_full()
self.ctrl_sock.close()
self.context.term()
if __name__ == "__main__":
if len(sys.argv) == 2:
server = CtrlServer(sys.argv[1])
else:
server = CtrlServer()
server.listen()
|
eviljeff/olympia
|
refs/heads/master
|
src/olympia/scanners/migrations/0010_auto_20191023_0908.py
|
6
|
# Generated by Django 2.2.6 on 2019-10-23 09:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [('scanners', '0009_auto_20191023_0906')]
# Nothing to do...
operations = []
|
JeyZeta/Dangerous
|
refs/heads/master
|
Dangerous/Golismero/tools/sqlmap/plugins/dbms/firebird/__init__.py
|
8
|
#!/usr/bin/env python
"""
Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
from lib.core.enums import DBMS
from lib.core.settings import FIREBIRD_SYSTEM_DBS
from lib.core.unescaper import unescaper
from plugins.dbms.firebird.enumeration import Enumeration
from plugins.dbms.firebird.filesystem import Filesystem
from plugins.dbms.firebird.fingerprint import Fingerprint
from plugins.dbms.firebird.syntax import Syntax
from plugins.dbms.firebird.takeover import Takeover
from plugins.generic.misc import Miscellaneous
class FirebirdMap(Syntax, Fingerprint, Enumeration, Filesystem, Miscellaneous, Takeover):
"""
This class defines Firebird methods
"""
def __init__(self):
self.excludeDbsList = FIREBIRD_SYSTEM_DBS
Syntax.__init__(self)
Fingerprint.__init__(self)
Enumeration.__init__(self)
Filesystem.__init__(self)
Miscellaneous.__init__(self)
Takeover.__init__(self)
unescaper[DBMS.FIREBIRD] = Syntax.escape
|
OpenRCE/sulley
|
refs/heads/master
|
sulley/pedrpc.py
|
15
|
import sys
import struct
import time
import socket
import cPickle
########################################################################################################################
class client:
def __init__ (self, host, port):
self.__host = host
self.__port = port
self.__dbg_flag = False
self.__server_sock = None
self.__retry = 0
self.NOLINGER = struct.pack('ii', 1, 0)
####################################################################################################################
def __getattr__ (self, method_name):
'''
This routine is called by default when a requested attribute (or method) is accessed that has no definition.
Unfortunately __getattr__ only passes the requested method name and not the arguments. So we extend the
functionality with a little lambda magic to the routine method_missing(). Which is actually how Ruby handles
missing methods by default ... with arguments. Now we are just as cool as Ruby.
@type method_name: String
@param method_name: The name of the requested and undefined attribute (or method in our case).
@rtype: Lambda
@return: Lambda magic passing control (and in turn the arguments we want) to self.method_missing().
'''
return lambda *args, **kwargs: self.__method_missing(method_name, *args, **kwargs)
####################################################################################################################
def __connect (self):
'''
Connect to the PED-RPC server.
'''
# if we have a pre-existing server socket, ensure it's closed.
self.__disconnect()
# connect to the server, timeout on failure.
try:
self.__server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.__server_sock.settimeout(3.0)
self.__server_sock.connect((self.__host, self.__port))
except:
if self.__retry != 5:
self.__retry += 1
time.sleep(5)
self.__connect()
else:
sys.stderr.write("PED-RPC> unable to connect to server %s:%d\n" % (self.__host, self.__port))
raise Exception
# disable timeouts and lingering.
self.__server_sock.settimeout(None)
self.__server_sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, self.NOLINGER)
####################################################################################################################
def __disconnect (self):
'''
Ensure the socket is torn down.
'''
if self.__server_sock != None:
self.__debug("closing server socket")
self.__server_sock.close()
self.__server_sock = None
####################################################################################################################
def __debug (self, msg):
if self.__dbg_flag:
print "PED-RPC> %s" % msg
####################################################################################################################
def __method_missing (self, method_name, *args, **kwargs):
'''
See the notes for __getattr__ for related notes. This method is called, in the Ruby fashion, with the method
name and arguments for any requested but undefined class method.
@type method_name: String
@param method_name: The name of the requested and undefined attribute (or method in our case).
@type *args: Tuple
@param *args: Tuple of arguments.
@type **kwargs Dictionary
@param **kwargs: Dictioanry of arguments.
@rtype: Mixed
@return: Return value of the mirrored method.
'''
# return a value so lines of code like the following work:
# x = pedrpc.client(host, port)
# if x:
# x.do_something()
if method_name == "__nonzero__":
return 1
# ignore all other attempts to access a private member.
if method_name.startswith("__"):
return
# connect to the PED-RPC server.
self.__connect()
# transmit the method name and arguments.
while 1:
try:
self.__pickle_send((method_name, (args, kwargs)))
break
except:
# re-connect to the PED-RPC server if the sock died.
self.__connect()
# snag the return value.
ret = self.__pickle_recv()
# close the sock and return.
self.__disconnect()
return ret
####################################################################################################################
def __pickle_recv (self):
'''
This routine is used for marshaling arbitrary data from the PyDbg server. We can send pretty much anything here.
For example a tuple containing integers, strings, arbitrary objects and structures. Our "protocol" is a simple
length-value protocol where each datagram is prefixed by a 4-byte length of the data to be received.
@raise pdx: An exception is raised if the connection was severed.
@rtype: Mixed
@return: Whatever is received over the socket.
'''
try:
# TODO: this should NEVER fail, but alas, it does and for the time being i can't figure out why.
# it gets worse. you would think that simply returning here would break things, but it doesn't.
# gotta track this down at some point.
length = struct.unpack("<L", self.__server_sock.recv(4))[0]
except:
return
try:
received = ""
while length:
chunk = self.__server_sock.recv(length)
received += chunk
length -= len(chunk)
except:
sys.stderr.write("PED-RPC> connection to server severed during recv()\n")
raise Exception
return cPickle.loads(received)
####################################################################################################################
def __pickle_send (self, data):
'''
This routine is used for marshaling arbitrary data to the PyDbg server. We can send pretty much anything here.
For example a tuple containing integers, strings, arbitrary objects and structures. Our "protocol" is a simple
length-value protocol where each datagram is prefixed by a 4-byte length of the data to be received.
@type data: Mixed
@param data: Data to marshal and transmit. Data can *pretty much* contain anything you throw at it.
@raise pdx: An exception is raised if the connection was severed.
'''
data = cPickle.dumps(data, protocol=2)
self.__debug("sending %d bytes" % len(data))
try:
self.__server_sock.send(struct.pack("<L", len(data)))
self.__server_sock.send(data)
except:
sys.stderr.write("PED-RPC> connection to server severed during send()\n")
raise Exception
########################################################################################################################
class server:
def __init__ (self, host, port):
self.__host = host
self.__port = port
self.__dbg_flag = False
self.__client_sock = None
self.__client_address = None
try:
# create a socket and bind to the specified port.
self.__server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.__server.settimeout(None)
self.__server.bind((host, port))
self.__server.listen(1)
except:
sys.stderr.write("unable to bind to %s:%d\n" % (host, port))
sys.exit(1)
####################################################################################################################
def __disconnect (self):
'''
Ensure the socket is torn down.
'''
if self.__client_sock != None:
self.__debug("closing client socket")
self.__client_sock.close()
self.__client_sock = None
####################################################################################################################
def __debug (self, msg):
if self.__dbg_flag:
print "PED-RPC> %s" % msg
####################################################################################################################
def __pickle_recv (self):
'''
This routine is used for marshaling arbitrary data from the PyDbg server. We can send pretty much anything here.
For example a tuple containing integers, strings, arbitrary objects and structures. Our "protocol" is a simple
length-value protocol where each datagram is prefixed by a 4-byte length of the data to be received.
@raise pdx: An exception is raised if the connection was severed.
@rtype: Mixed
@return: Whatever is received over the socket.
'''
try:
length = struct.unpack("<L", self.__client_sock.recv(4))[0]
received = ""
while length:
chunk = self.__client_sock.recv(length)
received += chunk
length -= len(chunk)
except:
sys.stderr.write("PED-RPC> connection client severed during recv()\n")
raise Exception
return cPickle.loads(received)
####################################################################################################################
def __pickle_send (self, data):
'''
This routine is used for marshaling arbitrary data to the PyDbg server. We can send pretty much anything here.
For example a tuple containing integers, strings, arbitrary objects and structures. Our "protocol" is a simple
length-value protocol where each datagram is prefixed by a 4-byte length of the data to be received.
@type data: Mixed
@param data: Data to marshal and transmit. Data can *pretty much* contain anything you throw at it.
@raise pdx: An exception is raised if the connection was severed.
'''
data = cPickle.dumps(data, protocol=2)
self.__debug("sending %d bytes" % len(data))
try:
self.__client_sock.send(struct.pack("<L", len(data)))
self.__client_sock.send(data)
except:
sys.stderr.write("PED-RPC> connection to client severed during send()\n")
raise Exception
####################################################################################################################
def serve_forever (self):
self.__debug("serving up a storm")
while 1:
# close any pre-existing socket.
self.__disconnect()
# accept a client connection.
(self.__client_sock, self.__client_address) = self.__server.accept()
self.__debug("accepted connection from %s:%d" % (self.__client_address[0], self.__client_address[1]))
# recieve the method name and arguments, continue on socket disconnect.
try:
(method_name, (args, kwargs)) = self.__pickle_recv()
self.__debug("%s(args=%s, kwargs=%s)" % (method_name, args, kwargs))
except:
continue
try:
# resolve a pointer to the requested method and call it.
exec("method_pointer = self.%s" % method_name)
ret = method_pointer(*args, **kwargs)
except AttributeError:
# if the method can't be found notify the user and raise an error
sys.stderr.write("PED-RPC> remote method %s cannot be found\n" % method_name)
continue
# transmit the return value to the client, continue on socket disconnect.
try:
self.__pickle_send(ret)
except:
continue
|
andela-earinde/bellatrix-py
|
refs/heads/master
|
app/js/lib/lib/modules/distutils/command/check.py
|
98
|
"""distutils.command.check
Implements the Distutils 'check' command.
"""
__revision__ = "$Id$"
from distutils.core import Command
from distutils.dist import PKG_INFO_ENCODING
from distutils.errors import DistutilsSetupError
try:
# docutils is installed
from docutils.utils import Reporter
from docutils.parsers.rst import Parser
from docutils import frontend
from docutils import nodes
from StringIO import StringIO
class SilentReporter(Reporter):
def __init__(self, source, report_level, halt_level, stream=None,
debug=0, encoding='ascii', error_handler='replace'):
self.messages = []
Reporter.__init__(self, source, report_level, halt_level, stream,
debug, encoding, error_handler)
def system_message(self, level, message, *children, **kwargs):
self.messages.append((level, message, children, kwargs))
return nodes.system_message(message, level=level,
type=self.levels[level],
*children, **kwargs)
HAS_DOCUTILS = True
except ImportError:
# docutils is not installed
HAS_DOCUTILS = False
class check(Command):
"""This command checks the meta-data of the package.
"""
description = ("perform some checks on the package")
user_options = [('metadata', 'm', 'Verify meta-data'),
('restructuredtext', 'r',
('Checks if long string meta-data syntax '
'are reStructuredText-compliant')),
('strict', 's',
'Will exit with an error if a check fails')]
boolean_options = ['metadata', 'restructuredtext', 'strict']
def initialize_options(self):
"""Sets default values for options."""
self.restructuredtext = 0
self.metadata = 1
self.strict = 0
self._warnings = 0
def finalize_options(self):
pass
def warn(self, msg):
"""Counts the number of warnings that occurs."""
self._warnings += 1
return Command.warn(self, msg)
def run(self):
"""Runs the command."""
# perform the various tests
if self.metadata:
self.check_metadata()
if self.restructuredtext:
if HAS_DOCUTILS:
self.check_restructuredtext()
elif self.strict:
raise DistutilsSetupError('The docutils package is needed.')
# let's raise an error in strict mode, if we have at least
# one warning
if self.strict and self._warnings > 0:
raise DistutilsSetupError('Please correct your package.')
def check_metadata(self):
"""Ensures that all required elements of meta-data are supplied.
name, version, URL, (author and author_email) or
(maintainer and maintainer_email)).
Warns if any are missing.
"""
metadata = self.distribution.metadata
missing = []
for attr in ('name', 'version', 'url'):
if not (hasattr(metadata, attr) and getattr(metadata, attr)):
missing.append(attr)
if missing:
self.warn("missing required meta-data: %s" % ', '.join(missing))
if metadata.author:
if not metadata.author_email:
self.warn("missing meta-data: if 'author' supplied, " +
"'author_email' must be supplied too")
elif metadata.maintainer:
if not metadata.maintainer_email:
self.warn("missing meta-data: if 'maintainer' supplied, " +
"'maintainer_email' must be supplied too")
else:
self.warn("missing meta-data: either (author and author_email) " +
"or (maintainer and maintainer_email) " +
"must be supplied")
def check_restructuredtext(self):
"""Checks if the long string fields are reST-compliant."""
data = self.distribution.get_long_description()
if not isinstance(data, unicode):
data = data.decode(PKG_INFO_ENCODING)
for warning in self._check_rst_data(data):
line = warning[-1].get('line')
if line is None:
warning = warning[1]
else:
warning = '%s (line %s)' % (warning[1], line)
self.warn(warning)
def _check_rst_data(self, data):
"""Returns warnings when the provided data doesn't compile."""
source_path = StringIO()
parser = Parser()
settings = frontend.OptionParser().get_default_values()
settings.tab_width = 4
settings.pep_references = None
settings.rfc_references = None
reporter = SilentReporter(source_path,
settings.report_level,
settings.halt_level,
stream=settings.warning_stream,
debug=settings.debug,
encoding=settings.error_encoding,
error_handler=settings.error_encoding_error_handler)
document = nodes.document(settings, reporter, source=source_path)
document.note_source(source_path, -1)
try:
parser.parse(data, document)
except AttributeError:
reporter.messages.append((-1, 'Could not finish the parsing.',
'', {}))
return reporter.messages
|
brijeshkesariya/odoo
|
refs/heads/8.0
|
addons/hr_timesheet/__openerp__.py
|
257
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Time Tracking',
'version': '1.0',
'category': 'Human Resources',
'sequence': 23,
'description': """
This module implements a timesheet system.
==========================================
Each employee can encode and track their time spent on the different projects.
A project is an analytic account and the time spent on a project generates costs on
the analytic account.
Lots of reporting on time and employee tracking are provided.
It is completely integrated with the cost accounting module. It allows you to set
up a management by affair.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/employees',
'depends': ['account', 'hr', 'base', 'hr_attendance'],
'data': [
'security/ir.model.access.csv',
'security/hr_timesheet_security.xml',
'hr_timesheet_view.xml',
'wizard/hr_timesheet_sign_in_out_view.xml',
'report/hr_timesheet_report_view.xml',
'hr_timesheet_installer.xml',
'hr_timesheet_data.xml'
],
'demo': ['hr_timesheet_demo.xml'],
'test': [
'test/hr_timesheet_users.yml',
'test/test_hr_timesheet.yml',
'test/hr_timesheet_demo.yml',
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
cntnboys/410Lab6
|
refs/heads/master
|
build/django/build/lib.linux-x86_64-2.7/django/contrib/sites/__init__.py
|
808
|
default_app_config = 'django.contrib.sites.apps.SitesConfig'
|
ChameleonCloud/horizon
|
refs/heads/chameleoncloud/train
|
openstack_auth/exceptions.py
|
2
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class KeystoneAuthException(Exception):
"""Generic error class to identify and catch our own errors."""
class KeystoneTokenExpiredException(KeystoneAuthException):
"""The authentication token issued by the Identity service has expired."""
class KeystoneNoBackendException(KeystoneAuthException):
"""No backend could be determined to handle the provided credentials."""
class KeystoneNoProjectsException(KeystoneAuthException):
"""You are not authorized for any projects or domains."""
class KeystoneRetrieveProjectsException(KeystoneAuthException):
"""Unable to retrieve authorized projects."""
class KeystoneRetrieveDomainsException(KeystoneAuthException):
"""Unable to retrieve authorized domains."""
class KeystoneConnectionException(KeystoneAuthException):
"""Unable to establish connection to keystone endpoint."""
class KeystoneCredentialsException(KeystoneAuthException):
"""Invalid credentials."""
class KeystonePassExpiredException(KeystoneAuthException):
"""The password is expired and needs to be changed."""
|
7ing/kubernetes
|
refs/heads/master
|
examples/cluster-dns/images/backend/server.py
|
504
|
#!/usr/bin/env python
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
PORT_NUMBER = 8000
# This class will handles any incoming request.
class HTTPHandler(BaseHTTPRequestHandler):
# Handler for the GET requests
def do_GET(self):
self.send_response(200)
self.send_header('Content-type','text/html')
self.end_headers()
self.wfile.write("Hello World!")
try:
# Create a web server and define the handler to manage the incoming request.
server = HTTPServer(('', PORT_NUMBER), HTTPHandler)
print 'Started httpserver on port ' , PORT_NUMBER
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down the web server'
server.socket.close()
|
watspidererik/testenv
|
refs/heads/master
|
flask/lib/python2.7/site-packages/pip/_vendor/colorama/winterm.py
|
523
|
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
from . import win32
# from wincon.h
class WinColor(object):
BLACK = 0
BLUE = 1
GREEN = 2
CYAN = 3
RED = 4
MAGENTA = 5
YELLOW = 6
GREY = 7
# from wincon.h
class WinStyle(object):
NORMAL = 0x00 # dim text, dim background
BRIGHT = 0x08 # bright text, dim background
class WinTerm(object):
def __init__(self):
self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
self.set_attrs(self._default)
self._default_fore = self._fore
self._default_back = self._back
self._default_style = self._style
def get_attrs(self):
return self._fore + self._back * 16 + self._style
def set_attrs(self, value):
self._fore = value & 7
self._back = (value >> 4) & 7
self._style = value & WinStyle.BRIGHT
def reset_all(self, on_stderr=None):
self.set_attrs(self._default)
self.set_console(attrs=self._default)
def fore(self, fore=None, on_stderr=False):
if fore is None:
fore = self._default_fore
self._fore = fore
self.set_console(on_stderr=on_stderr)
def back(self, back=None, on_stderr=False):
if back is None:
back = self._default_back
self._back = back
self.set_console(on_stderr=on_stderr)
def style(self, style=None, on_stderr=False):
if style is None:
style = self._default_style
self._style = style
self.set_console(on_stderr=on_stderr)
def set_console(self, attrs=None, on_stderr=False):
if attrs is None:
attrs = self.get_attrs()
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleTextAttribute(handle, attrs)
def get_position(self, handle):
position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
# Because Windows coordinates are 0-based,
# and win32.SetConsoleCursorPosition expects 1-based.
position.X += 1
position.Y += 1
return position
def set_cursor_position(self, position=None, on_stderr=False):
if position is None:
#I'm not currently tracking the position, so there is no default.
#position = self.get_position()
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleCursorPosition(handle, position)
def cursor_up(self, num_rows=0, on_stderr=False):
if num_rows == 0:
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
position = self.get_position(handle)
adjusted_position = (position.Y - num_rows, position.X)
self.set_cursor_position(adjusted_position, on_stderr)
def erase_data(self, mode=0, on_stderr=False):
# 0 (or None) should clear from the cursor to the end of the screen.
# 1 should clear from the cursor to the beginning of the screen.
# 2 should clear the entire screen. (And maybe move cursor to (1,1)?)
#
# At the moment, I only support mode 2. From looking at the API, it
# should be possible to calculate a different number of bytes to clear,
# and to do so relative to the cursor position.
if mode[0] not in (2,):
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
# here's where we'll home the cursor
coord_screen = win32.COORD(0,0)
csbi = win32.GetConsoleScreenBufferInfo(handle)
# get the number of character cells in the current buffer
dw_con_size = csbi.dwSize.X * csbi.dwSize.Y
# fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ' ', dw_con_size, coord_screen)
# now set the buffer's attributes accordingly
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), dw_con_size, coord_screen );
# put the cursor at (0, 0)
win32.SetConsoleCursorPosition(handle, (coord_screen.X, coord_screen.Y))
|
jicruz/heroku-bot
|
refs/heads/master
|
lib/urllib3/packages/ssl_match_hostname/_implementation.py
|
285
|
"""The match_hostname() function from Python 3.3.3, essential when using SSL."""
# Note: This file is under the PSF license as the code comes from the python
# stdlib. http://docs.python.org/3/license.html
import re
import sys
# ipaddress has been backported to 2.6+ in pypi. If it is installed on the
# system, use it to handle IPAddress ServerAltnames (this was added in
# python-3.5) otherwise only do DNS matching. This allows
# backports.ssl_match_hostname to continue to be used all the way back to
# python-2.4.
try:
import ipaddress
except ImportError:
ipaddress = None
__version__ = '3.5.0.1'
class CertificateError(ValueError):
pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def _to_unicode(obj):
if isinstance(obj, str) and sys.version_info < (3,):
obj = unicode(obj, encoding='ascii', errors='strict')
return obj
def _ipaddress_match(ipname, host_ip):
"""Exact matching of IP addresses.
RFC 6125 explicitly doesn't define an algorithm for this
(section 1.7.2 - "Out of Scope").
"""
# OpenSSL may add a trailing newline to a subjectAltName's IP address
# Divergence from upstream: ipaddress can't handle byte str
ip = ipaddress.ip_address(_to_unicode(ipname).rstrip())
return ip == host_ip
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate, match_hostname needs a "
"SSL socket or SSL context with either "
"CERT_OPTIONAL or CERT_REQUIRED")
try:
# Divergence from upstream: ipaddress can't handle byte str
host_ip = ipaddress.ip_address(_to_unicode(hostname))
except ValueError:
# Not an IP address (common case)
host_ip = None
except UnicodeError:
# Divergence from upstream: Have to deal with ipaddress not taking
# byte strings. addresses should be all ascii, so we consider it not
# an ipaddress in this case
host_ip = None
except AttributeError:
# Divergence from upstream: Make ipaddress library optional
if ipaddress is None:
host_ip = None
else:
raise
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if host_ip is None and _dnsname_match(value, hostname):
return
dnsnames.append(value)
elif key == 'IP Address':
if host_ip is not None and _ipaddress_match(value, host_ip):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
|
AlexRobson/scikit-learn
|
refs/heads/master
|
sklearn/externals/joblib/parallel.py
|
86
|
"""
Helpers for embarrassingly parallel code.
"""
# Author: Gael Varoquaux < gael dot varoquaux at normalesup dot org >
# Copyright: 2010, Gael Varoquaux
# License: BSD 3 clause
from __future__ import division
import os
import sys
import gc
import warnings
from math import sqrt
import functools
import time
import threading
import itertools
from numbers import Integral
try:
import cPickle as pickle
except:
import pickle
from ._multiprocessing_helpers import mp
if mp is not None:
from .pool import MemmapingPool
from multiprocessing.pool import ThreadPool
from .format_stack import format_exc, format_outer_frames
from .logger import Logger, short_format_time
from .my_exceptions import TransportableException, _mk_exception
from .disk import memstr_to_kbytes
from ._compat import _basestring
VALID_BACKENDS = ['multiprocessing', 'threading']
# Environment variables to protect against bad situations when nesting
JOBLIB_SPAWNED_PROCESS = "__JOBLIB_SPAWNED_PARALLEL__"
# In seconds, should be big enough to hide multiprocessing dispatching
# overhead.
# This settings was found by running benchmarks/bench_auto_batching.py
# with various parameters on various platforms.
MIN_IDEAL_BATCH_DURATION = .2
# Should not be too high to avoid stragglers: long jobs running alone
# on a single worker while other workers have no work to process any more.
MAX_IDEAL_BATCH_DURATION = 2
class BatchedCalls(object):
"""Wrap a sequence of (func, args, kwargs) tuples as a single callable"""
def __init__(self, iterator_slice):
self.items = list(iterator_slice)
self._size = len(self.items)
def __call__(self):
return [func(*args, **kwargs) for func, args, kwargs in self.items]
def __len__(self):
return self._size
###############################################################################
# CPU count that works also when multiprocessing has been disabled via
# the JOBLIB_MULTIPROCESSING environment variable
def cpu_count():
""" Return the number of CPUs.
"""
if mp is None:
return 1
return mp.cpu_count()
###############################################################################
# For verbosity
def _verbosity_filter(index, verbose):
""" Returns False for indices increasingly apart, the distance
depending on the value of verbose.
We use a lag increasing as the square of index
"""
if not verbose:
return True
elif verbose > 10:
return False
if index == 0:
return False
verbose = .5 * (11 - verbose) ** 2
scale = sqrt(index / verbose)
next_scale = sqrt((index + 1) / verbose)
return (int(next_scale) == int(scale))
###############################################################################
class WorkerInterrupt(Exception):
""" An exception that is not KeyboardInterrupt to allow subprocesses
to be interrupted.
"""
pass
###############################################################################
class SafeFunction(object):
""" Wraps a function to make it exception with full traceback in
their representation.
Useful for parallel computing with multiprocessing, for which
exceptions cannot be captured.
"""
def __init__(self, func):
self.func = func
def __call__(self, *args, **kwargs):
try:
return self.func(*args, **kwargs)
except KeyboardInterrupt:
# We capture the KeyboardInterrupt and reraise it as
# something different, as multiprocessing does not
# interrupt processing for a KeyboardInterrupt
raise WorkerInterrupt()
except:
e_type, e_value, e_tb = sys.exc_info()
text = format_exc(e_type, e_value, e_tb, context=10,
tb_offset=1)
if issubclass(e_type, TransportableException):
raise
else:
raise TransportableException(text, e_type)
###############################################################################
def delayed(function, check_pickle=True):
"""Decorator used to capture the arguments of a function.
Pass `check_pickle=False` when:
- performing a possibly repeated check is too costly and has been done
already once outside of the call to delayed.
- when used in conjunction `Parallel(backend='threading')`.
"""
# Try to pickle the input function, to catch the problems early when
# using with multiprocessing:
if check_pickle:
pickle.dumps(function)
def delayed_function(*args, **kwargs):
return function, args, kwargs
try:
delayed_function = functools.wraps(function)(delayed_function)
except AttributeError:
" functools.wraps fails on some callable objects "
return delayed_function
###############################################################################
class ImmediateComputeBatch(object):
"""Sequential computation of a batch of tasks.
This replicates the async computation API but actually does not delay
the computations when joblib.Parallel runs in sequential mode.
"""
def __init__(self, batch):
# Don't delay the application, to avoid keeping the input
# arguments in memory
self.results = batch()
def get(self):
return self.results
###############################################################################
class BatchCompletionCallBack(object):
"""Callback used by joblib.Parallel's multiprocessing backend.
This callable is executed by the parent process whenever a worker process
has returned the results of a batch of tasks.
It is used for progress reporting, to update estimate of the batch
processing duration and to schedule the next batch of tasks to be
processed.
"""
def __init__(self, dispatch_timestamp, batch_size, parallel):
self.dispatch_timestamp = dispatch_timestamp
self.batch_size = batch_size
self.parallel = parallel
def __call__(self, out):
self.parallel.n_completed_tasks += self.batch_size
this_batch_duration = time.time() - self.dispatch_timestamp
if (self.parallel.batch_size == 'auto'
and self.batch_size == self.parallel._effective_batch_size):
# Update the smoothed streaming estimate of the duration of a batch
# from dispatch to completion
old_duration = self.parallel._smoothed_batch_duration
if old_duration == 0:
# First record of duration for this batch size after the last
# reset.
new_duration = this_batch_duration
else:
# Update the exponentially weighted average of the duration of
# batch for the current effective size.
new_duration = 0.8 * old_duration + 0.2 * this_batch_duration
self.parallel._smoothed_batch_duration = new_duration
self.parallel.print_progress()
if self.parallel._original_iterator is not None:
self.parallel.dispatch_next()
###############################################################################
class Parallel(Logger):
''' Helper class for readable parallel mapping.
Parameters
-----------
n_jobs: int, default: 1
The maximum number of concurrently running jobs, such as the number
of Python worker processes when backend="multiprocessing"
or the size of the thread-pool when backend="threading".
If -1 all CPUs are used. If 1 is given, no parallel computing code
is used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all
CPUs but one are used.
backend: str or None, default: 'multiprocessing'
Specify the parallelization backend implementation.
Supported backends are:
- "multiprocessing" used by default, can induce some
communication and memory overhead when exchanging input and
output data with the with the worker Python processes.
- "threading" is a very low-overhead backend but it suffers
from the Python Global Interpreter Lock if the called function
relies a lot on Python objects. "threading" is mostly useful
when the execution bottleneck is a compiled extension that
explicitly releases the GIL (for instance a Cython loop wrapped
in a "with nogil" block or an expensive call to a library such
as NumPy).
verbose: int, optional
The verbosity level: if non zero, progress messages are
printed. Above 50, the output is sent to stdout.
The frequency of the messages increases with the verbosity level.
If it more than 10, all iterations are reported.
pre_dispatch: {'all', integer, or expression, as in '3*n_jobs'}
The number of batches (of tasks) to be pre-dispatched.
Default is '2*n_jobs'. When batch_size="auto" this is reasonable
default and the multiprocessing workers shoud never starve.
batch_size: int or 'auto', default: 'auto'
The number of atomic tasks to dispatch at once to each
worker. When individual evaluations are very fast, multiprocessing
can be slower than sequential computation because of the overhead.
Batching fast computations together can mitigate this.
The ``'auto'`` strategy keeps track of the time it takes for a batch
to complete, and dynamically adjusts the batch size to keep the time
on the order of half a second, using a heuristic. The initial batch
size is 1.
``batch_size="auto"`` with ``backend="threading"`` will dispatch
batches of a single task at a time as the threading backend has
very little overhead and using larger batch size has not proved to
bring any gain in that case.
temp_folder: str, optional
Folder to be used by the pool for memmaping large arrays
for sharing memory with worker processes. If None, this will try in
order:
- a folder pointed by the JOBLIB_TEMP_FOLDER environment variable,
- /dev/shm if the folder exists and is writable: this is a RAMdisk
filesystem available by default on modern Linux distributions,
- the default system temporary folder that can be overridden
with TMP, TMPDIR or TEMP environment variables, typically /tmp
under Unix operating systems.
Only active when backend="multiprocessing".
max_nbytes int, str, or None, optional, 1M by default
Threshold on the size of arrays passed to the workers that
triggers automated memory mapping in temp_folder. Can be an int
in Bytes, or a human-readable string, e.g., '1M' for 1 megabyte.
Use None to disable memmaping of large arrays.
Only active when backend="multiprocessing".
Notes
-----
This object uses the multiprocessing module to compute in
parallel the application of a function to many different
arguments. The main functionality it brings in addition to
using the raw multiprocessing API are (see examples for details):
* More readable code, in particular since it avoids
constructing list of arguments.
* Easier debugging:
- informative tracebacks even when the error happens on
the client side
- using 'n_jobs=1' enables to turn off parallel computing
for debugging without changing the codepath
- early capture of pickling errors
* An optional progress meter.
* Interruption of multiprocesses jobs with 'Ctrl-C'
* Flexible pickling control for the communication to and from
the worker processes.
* Ability to use shared memory efficiently with worker
processes for large numpy-based datastructures.
Examples
--------
A simple example:
>>> from math import sqrt
>>> from sklearn.externals.joblib import Parallel, delayed
>>> Parallel(n_jobs=1)(delayed(sqrt)(i**2) for i in range(10))
[0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]
Reshaping the output when the function has several return
values:
>>> from math import modf
>>> from sklearn.externals.joblib import Parallel, delayed
>>> r = Parallel(n_jobs=1)(delayed(modf)(i/2.) for i in range(10))
>>> res, i = zip(*r)
>>> res
(0.0, 0.5, 0.0, 0.5, 0.0, 0.5, 0.0, 0.5, 0.0, 0.5)
>>> i
(0.0, 0.0, 1.0, 1.0, 2.0, 2.0, 3.0, 3.0, 4.0, 4.0)
The progress meter: the higher the value of `verbose`, the more
messages::
>>> from time import sleep
>>> from sklearn.externals.joblib import Parallel, delayed
>>> r = Parallel(n_jobs=2, verbose=5)(delayed(sleep)(.1) for _ in range(10)) #doctest: +SKIP
[Parallel(n_jobs=2)]: Done 1 out of 10 | elapsed: 0.1s remaining: 0.9s
[Parallel(n_jobs=2)]: Done 3 out of 10 | elapsed: 0.2s remaining: 0.5s
[Parallel(n_jobs=2)]: Done 6 out of 10 | elapsed: 0.3s remaining: 0.2s
[Parallel(n_jobs=2)]: Done 9 out of 10 | elapsed: 0.5s remaining: 0.1s
[Parallel(n_jobs=2)]: Done 10 out of 10 | elapsed: 0.5s finished
Traceback example, note how the line of the error is indicated
as well as the values of the parameter passed to the function that
triggered the exception, even though the traceback happens in the
child process::
>>> from heapq import nlargest
>>> from sklearn.externals.joblib import Parallel, delayed
>>> Parallel(n_jobs=2)(delayed(nlargest)(2, n) for n in (range(4), 'abcde', 3)) #doctest: +SKIP
#...
---------------------------------------------------------------------------
Sub-process traceback:
---------------------------------------------------------------------------
TypeError Mon Nov 12 11:37:46 2012
PID: 12934 Python 2.7.3: /usr/bin/python
...........................................................................
/usr/lib/python2.7/heapq.pyc in nlargest(n=2, iterable=3, key=None)
419 if n >= size:
420 return sorted(iterable, key=key, reverse=True)[:n]
421
422 # When key is none, use simpler decoration
423 if key is None:
--> 424 it = izip(iterable, count(0,-1)) # decorate
425 result = _nlargest(n, it)
426 return map(itemgetter(0), result) # undecorate
427
428 # General case, slowest method
TypeError: izip argument #1 must support iteration
___________________________________________________________________________
Using pre_dispatch in a producer/consumer situation, where the
data is generated on the fly. Note how the producer is first
called a 3 times before the parallel loop is initiated, and then
called to generate new data on the fly. In this case the total
number of iterations cannot be reported in the progress messages::
>>> from math import sqrt
>>> from sklearn.externals.joblib import Parallel, delayed
>>> def producer():
... for i in range(6):
... print('Produced %s' % i)
... yield i
>>> out = Parallel(n_jobs=2, verbose=100, pre_dispatch='1.5*n_jobs')(
... delayed(sqrt)(i) for i in producer()) #doctest: +SKIP
Produced 0
Produced 1
Produced 2
[Parallel(n_jobs=2)]: Done 1 jobs | elapsed: 0.0s
Produced 3
[Parallel(n_jobs=2)]: Done 2 jobs | elapsed: 0.0s
Produced 4
[Parallel(n_jobs=2)]: Done 3 jobs | elapsed: 0.0s
Produced 5
[Parallel(n_jobs=2)]: Done 4 jobs | elapsed: 0.0s
[Parallel(n_jobs=2)]: Done 5 out of 6 | elapsed: 0.0s remaining: 0.0s
[Parallel(n_jobs=2)]: Done 6 out of 6 | elapsed: 0.0s finished
'''
def __init__(self, n_jobs=1, backend='multiprocessing', verbose=0,
pre_dispatch='2 * n_jobs', batch_size='auto', temp_folder=None,
max_nbytes='1M', mmap_mode='r'):
self.verbose = verbose
self._mp_context = None
if backend is None:
# `backend=None` was supported in 0.8.2 with this effect
backend = "multiprocessing"
elif hasattr(backend, 'Pool') and hasattr(backend, 'Lock'):
# Make it possible to pass a custom multiprocessing context as
# backend to change the start method to forkserver or spawn or
# preload modules on the forkserver helper process.
self._mp_context = backend
backend = "multiprocessing"
if backend not in VALID_BACKENDS:
raise ValueError("Invalid backend: %s, expected one of %r"
% (backend, VALID_BACKENDS))
self.backend = backend
self.n_jobs = n_jobs
if (batch_size == 'auto'
or isinstance(batch_size, Integral) and batch_size > 0):
self.batch_size = batch_size
else:
raise ValueError(
"batch_size must be 'auto' or a positive integer, got: %r"
% batch_size)
self.pre_dispatch = pre_dispatch
self._temp_folder = temp_folder
if isinstance(max_nbytes, _basestring):
self._max_nbytes = 1024 * memstr_to_kbytes(max_nbytes)
else:
self._max_nbytes = max_nbytes
self._mmap_mode = mmap_mode
# Not starting the pool in the __init__ is a design decision, to be
# able to close it ASAP, and not burden the user with closing it
# unless they choose to use the context manager API with a with block.
self._pool = None
self._output = None
self._jobs = list()
self._managed_pool = False
# This lock is used coordinate the main thread of this process with
# the async callback thread of our the pool.
self._lock = threading.Lock()
def __enter__(self):
self._managed_pool = True
self._initialize_pool()
return self
def __exit__(self, exc_type, exc_value, traceback):
self._terminate_pool()
self._managed_pool = False
def _effective_n_jobs(self):
n_jobs = self.n_jobs
if n_jobs == 0:
raise ValueError('n_jobs == 0 in Parallel has no meaning')
elif mp is None or n_jobs is None:
# multiprocessing is not available or disabled, fallback
# to sequential mode
return 1
elif n_jobs < 0:
n_jobs = max(mp.cpu_count() + 1 + n_jobs, 1)
return n_jobs
def _initialize_pool(self):
"""Build a process or thread pool and return the number of workers"""
n_jobs = self._effective_n_jobs()
# The list of exceptions that we will capture
self.exceptions = [TransportableException]
if n_jobs == 1:
# Sequential mode: do not use a pool instance to avoid any
# useless dispatching overhead
self._pool = None
elif self.backend == 'threading':
self._pool = ThreadPool(n_jobs)
elif self.backend == 'multiprocessing':
if mp.current_process().daemon:
# Daemonic processes cannot have children
self._pool = None
warnings.warn(
'Multiprocessing-backed parallel loops cannot be nested,'
' setting n_jobs=1',
stacklevel=3)
return 1
elif threading.current_thread().name != 'MainThread':
# Prevent posix fork inside in non-main posix threads
self._pool = None
warnings.warn(
'Multiprocessing backed parallel loops cannot be nested'
' below threads, setting n_jobs=1',
stacklevel=3)
return 1
else:
already_forked = int(os.environ.get(JOBLIB_SPAWNED_PROCESS, 0))
if already_forked:
raise ImportError('[joblib] Attempting to do parallel computing '
'without protecting your import on a system that does '
'not support forking. To use parallel-computing in a '
'script, you must protect your main loop using "if '
"__name__ == '__main__'"
'". Please see the joblib documentation on Parallel '
'for more information'
)
# Set an environment variable to avoid infinite loops
os.environ[JOBLIB_SPAWNED_PROCESS] = '1'
# Make sure to free as much memory as possible before forking
gc.collect()
poolargs = dict(
max_nbytes=self._max_nbytes,
mmap_mode=self._mmap_mode,
temp_folder=self._temp_folder,
verbose=max(0, self.verbose - 50),
context_id=0, # the pool is used only for one call
)
if self._mp_context is not None:
# Use Python 3.4+ multiprocessing context isolation
poolargs['context'] = self._mp_context
self._pool = MemmapingPool(n_jobs, **poolargs)
# We are using multiprocessing, we also want to capture
# KeyboardInterrupts
self.exceptions.extend([KeyboardInterrupt, WorkerInterrupt])
else:
raise ValueError("Unsupported backend: %s" % self.backend)
return n_jobs
def _terminate_pool(self):
if self._pool is not None:
self._pool.close()
self._pool.terminate() # terminate does a join()
self._pool = None
if self.backend == 'multiprocessing':
os.environ.pop(JOBLIB_SPAWNED_PROCESS, 0)
def _dispatch(self, batch):
"""Queue the batch for computing, with or without multiprocessing
WARNING: this method is not thread-safe: it should be only called
indirectly via dispatch_one_batch.
"""
# If job.get() catches an exception, it closes the queue:
if self._aborting:
return
if self._pool is None:
job = ImmediateComputeBatch(batch)
self._jobs.append(job)
self.n_dispatched_batches += 1
self.n_dispatched_tasks += len(batch)
self.n_completed_tasks += len(batch)
if not _verbosity_filter(self.n_dispatched_batches, self.verbose):
self._print('Done %3i tasks | elapsed: %s',
(self.n_completed_tasks,
short_format_time(time.time() - self._start_time)
))
else:
dispatch_timestamp = time.time()
cb = BatchCompletionCallBack(dispatch_timestamp, len(batch), self)
job = self._pool.apply_async(SafeFunction(batch), callback=cb)
self._jobs.append(job)
self.n_dispatched_tasks += len(batch)
self.n_dispatched_batches += 1
def dispatch_next(self):
"""Dispatch more data for parallel processing
This method is meant to be called concurrently by the multiprocessing
callback. We rely on the thread-safety of dispatch_one_batch to protect
against concurrent consumption of the unprotected iterator.
"""
if not self.dispatch_one_batch(self._original_iterator):
self._iterating = False
self._original_iterator = None
def dispatch_one_batch(self, iterator):
"""Prefetch the tasks for the next batch and dispatch them.
The effective size of the batch is computed here.
If there are no more jobs to dispatch, return False, else return True.
The iterator consumption and dispatching is protected by the same
lock so calling this function should be thread safe.
"""
if self.batch_size == 'auto' and self.backend == 'threading':
# Batching is never beneficial with the threading backend
batch_size = 1
elif self.batch_size == 'auto':
old_batch_size = self._effective_batch_size
batch_duration = self._smoothed_batch_duration
if (batch_duration > 0 and
batch_duration < MIN_IDEAL_BATCH_DURATION):
# The current batch size is too small: the duration of the
# processing of a batch of task is not large enough to hide
# the scheduling overhead.
ideal_batch_size = int(
old_batch_size * MIN_IDEAL_BATCH_DURATION / batch_duration)
# Multiply by two to limit oscilations between min and max.
batch_size = max(2 * ideal_batch_size, 1)
self._effective_batch_size = batch_size
if self.verbose >= 10:
self._print("Batch computation too fast (%.4fs.) "
"Setting batch_size=%d.", (
batch_duration, batch_size))
elif (batch_duration > MAX_IDEAL_BATCH_DURATION and
old_batch_size >= 2):
# The current batch size is too big. If we schedule overly long
# running batches some CPUs might wait with nothing left to do
# while a couple of CPUs a left processing a few long running
# batches. Better reduce the batch size a bit to limit the
# likelihood of scheduling such stragglers.
self._effective_batch_size = batch_size = old_batch_size // 2
if self.verbose >= 10:
self._print("Batch computation too slow (%.2fs.) "
"Setting batch_size=%d.", (
batch_duration, batch_size))
else:
# No batch size adjustment
batch_size = old_batch_size
if batch_size != old_batch_size:
# Reset estimation of the smoothed mean batch duration: this
# estimate is updated in the multiprocessing apply_async
# CallBack as long as the batch_size is constant. Therefore
# we need to reset the estimate whenever we re-tune the batch
# size.
self._smoothed_batch_duration = 0
else:
# Fixed batch size strategy
batch_size = self.batch_size
with self._lock:
tasks = BatchedCalls(itertools.islice(iterator, batch_size))
if not tasks:
# No more tasks available in the iterator: tell caller to stop.
return False
else:
self._dispatch(tasks)
return True
def _print(self, msg, msg_args):
"""Display the message on stout or stderr depending on verbosity"""
# XXX: Not using the logger framework: need to
# learn to use logger better.
if not self.verbose:
return
if self.verbose < 50:
writer = sys.stderr.write
else:
writer = sys.stdout.write
msg = msg % msg_args
writer('[%s]: %s\n' % (self, msg))
def print_progress(self):
"""Display the process of the parallel execution only a fraction
of time, controlled by self.verbose.
"""
if not self.verbose:
return
elapsed_time = time.time() - self._start_time
# This is heuristic code to print only 'verbose' times a messages
# The challenge is that we may not know the queue length
if self._original_iterator:
if _verbosity_filter(self.n_dispatched_batches, self.verbose):
return
self._print('Done %3i tasks | elapsed: %s',
(self.n_completed_tasks,
short_format_time(elapsed_time),
))
else:
index = self.n_dispatched_batches
# We are finished dispatching
total_tasks = self.n_dispatched_tasks
# We always display the first loop
if not index == 0:
# Display depending on the number of remaining items
# A message as soon as we finish dispatching, cursor is 0
cursor = (total_tasks - index + 1
- self._pre_dispatch_amount)
frequency = (total_tasks // self.verbose) + 1
is_last_item = (index + 1 == total_tasks)
if (is_last_item or cursor % frequency):
return
remaining_time = (elapsed_time / (index + 1) *
(self.n_dispatched_tasks - index - 1.))
self._print('Done %3i out of %3i | elapsed: %s remaining: %s',
(index + 1,
total_tasks,
short_format_time(elapsed_time),
short_format_time(remaining_time),
))
def retrieve(self):
self._output = list()
while self._iterating or len(self._jobs) > 0:
if len(self._jobs) == 0:
# Wait for an async callback to dispatch new jobs
time.sleep(0.01)
continue
# We need to be careful: the job list can be filling up as
# we empty it and Python list are not thread-safe by default hence
# the use of the lock
with self._lock:
job = self._jobs.pop(0)
try:
self._output.extend(job.get())
except tuple(self.exceptions) as exception:
# Stop dispatching any new job in the async callback thread
self._aborting = True
if isinstance(exception, TransportableException):
# Capture exception to add information on the local
# stack in addition to the distant stack
this_report = format_outer_frames(context=10,
stack_start=1)
report = """Multiprocessing exception:
%s
---------------------------------------------------------------------------
Sub-process traceback:
---------------------------------------------------------------------------
%s""" % (this_report, exception.message)
# Convert this to a JoblibException
exception_type = _mk_exception(exception.etype)[0]
exception = exception_type(report)
# Kill remaining running processes without waiting for
# the results as we will raise the exception we got back
# to the caller instead of returning any result.
with self._lock:
self._terminate_pool()
if self._managed_pool:
# In case we had to terminate a managed pool, let
# us start a new one to ensure that subsequent calls
# to __call__ on the same Parallel instance will get
# a working pool as they expect.
self._initialize_pool()
raise exception
def __call__(self, iterable):
if self._jobs:
raise ValueError('This Parallel instance is already running')
# A flag used to abort the dispatching of jobs in case an
# exception is found
self._aborting = False
if not self._managed_pool:
n_jobs = self._initialize_pool()
else:
n_jobs = self._effective_n_jobs()
if self.batch_size == 'auto':
self._effective_batch_size = 1
iterator = iter(iterable)
pre_dispatch = self.pre_dispatch
if pre_dispatch == 'all' or n_jobs == 1:
# prevent further dispatch via multiprocessing callback thread
self._original_iterator = None
self._pre_dispatch_amount = 0
else:
self._original_iterator = iterator
if hasattr(pre_dispatch, 'endswith'):
pre_dispatch = eval(pre_dispatch)
self._pre_dispatch_amount = pre_dispatch = int(pre_dispatch)
# The main thread will consume the first pre_dispatch items and
# the remaining items will later be lazily dispatched by async
# callbacks upon task completions.
iterator = itertools.islice(iterator, pre_dispatch)
self._start_time = time.time()
self.n_dispatched_batches = 0
self.n_dispatched_tasks = 0
self.n_completed_tasks = 0
self._smoothed_batch_duration = 0.0
try:
self._iterating = True
while self.dispatch_one_batch(iterator):
pass
if pre_dispatch == "all" or n_jobs == 1:
# The iterable was consumed all at once by the above for loop.
# No need to wait for async callbacks to trigger to
# consumption.
self._iterating = False
self.retrieve()
# Make sure that we get a last message telling us we are done
elapsed_time = time.time() - self._start_time
self._print('Done %3i out of %3i | elapsed: %s finished',
(len(self._output), len(self._output),
short_format_time(elapsed_time)))
finally:
if not self._managed_pool:
self._terminate_pool()
self._jobs = list()
output = self._output
self._output = None
return output
def __repr__(self):
return '%s(n_jobs=%s)' % (self.__class__.__name__, self.n_jobs)
|
JuBra/GEMEditor
|
refs/heads/master
|
GEMEditor/solution/base.py
|
1
|
from cobra.core import LegacySolution, Solution
def set_status_to_label(label, status):
""" Set the status to label
Parameters
----------
label: QLabel
status: str
Returns
-------
"""
if status == "optimal":
label.setStyleSheet("color: ForestGreen; font-weight: bold;")
else:
label.setStyleSheet("")
label.setText(status)
def set_objective_to_label(label, objective):
""" Set objective to label
Parameters
----------
label: QLabel
objective: float or str
Returns
-------
None
"""
try:
label.setText("{0:.2f}".format(objective))
except (TypeError, ValueError):
label.setText("{0!s}".format(objective))
def status_objective_from_solution(solution):
""" Get the status and objective from solution
Parameters
----------
solution: Solution or LegacySolution
Returns
-------
status: str
objective: str or float
"""
status, objective = "NA", "NA"
if isinstance(solution, LegacySolution):
status, objective = solution.status, solution.f
elif isinstance(solution, Solution):
status, objective = solution.status, solution.objective_value
return str(status), objective
def fluxes_from_solution(solution):
""" Get the stored flux values from solution
Parameters
----------
solution: Solution or LegacySolution
Returns
-------
fluxes: dict or panda.Series
"""
if isinstance(solution, LegacySolution):
fluxes = solution.x_dict
elif isinstance(solution, Solution):
fluxes = solution.fluxes
else:
raise TypeError("Expected LegacySolution or Solution object")
return fluxes
def shadow_prices_from_solution(solution):
""" Get the shadow prices from solution
Parameters
----------
solution: Solution or LegacySolution
Returns
-------
prices: dict or panda.Series
"""
if isinstance(solution, LegacySolution):
prices = solution.y_dict
elif isinstance(solution, Solution):
prices = solution.shadow_prices
else:
raise TypeError("Expected LegacySolution or Solution object")
return prices
|
deKupini/erp
|
refs/heads/master
|
addons/hr_holidays/hr_holidays.py
|
3
|
# -*- coding: utf-8 -*-
##################################################################################
#
# Copyright (c) 2005-2006 Axelor SARL. (http://www.axelor.com)
# and 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# $Id: hr.py 4656 2006-11-24 09:58:42Z Cyp $
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import calendar
import datetime
from datetime import date
import math
import time
from operator import attrgetter
from openerp.exceptions import UserError, AccessError
from openerp import tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
class hr_holidays_status(osv.osv):
_name = "hr.holidays.status"
_description = "Leave Type"
def get_days(self, cr, uid, ids, employee_id, context=None):
result = dict((id, dict(max_leaves=0, leaves_taken=0, remaining_leaves=0,
virtual_remaining_leaves=0)) for id in ids)
holiday_ids = self.pool['hr.holidays'].search(cr, uid, [('employee_id', '=', employee_id),
('state', 'in', ['confirm', 'validate1', 'validate']),
('holiday_status_id', 'in', ids)
], context=context)
for holiday in self.pool['hr.holidays'].browse(cr, uid, holiday_ids, context=context):
status_dict = result[holiday.holiday_status_id.id]
if holiday.type == 'add':
if holiday.state == 'validate':
# note: add only validated allocation even for the virtual
# count; otherwise pending then refused allocation allow
# the employee to create more leaves than possible
status_dict['virtual_remaining_leaves'] += holiday.number_of_days_temp
status_dict['max_leaves'] += holiday.number_of_days_temp
status_dict['remaining_leaves'] += holiday.number_of_days_temp
elif holiday.type == 'remove': # number of days is negative
status_dict['virtual_remaining_leaves'] -= holiday.number_of_days_temp
if holiday.state == 'validate':
status_dict['leaves_taken'] += holiday.number_of_days_temp
status_dict['remaining_leaves'] -= holiday.number_of_days_temp
return result
def _user_left_days(self, cr, uid, ids, name, args, context=None):
employee_id = False
if context and 'employee_id' in context:
employee_id = context['employee_id']
else:
employee_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)], context=context)
if employee_ids:
employee_id = employee_ids[0]
if employee_id:
res = self.get_days(cr, uid, ids, employee_id, context=context)
else:
res = dict((res_id, {'leaves_taken': 0, 'remaining_leaves': 0, 'max_leaves': 0}) for res_id in ids)
return res
_columns = {
'name': fields.char('Leave Type', size=64, required=True, translate=True),
'categ_id': fields.many2one('calendar.event.type', 'Meeting Type',
help='Once a leave is validated, Odoo will create a corresponding meeting of this type in the calendar.'),
'color_name': fields.selection([('red', 'Red'),('blue','Blue'), ('lightgreen', 'Light Green'), ('lightblue','Light Blue'), ('lightyellow', 'Light Yellow'), ('magenta', 'Magenta'),('lightcyan', 'Light Cyan'),('black', 'Black'),('lightpink', 'Light Pink'),('brown', 'Brown'),('violet', 'Violet'),('lightcoral', 'Light Coral'),('lightsalmon', 'Light Salmon'),('lavender', 'Lavender'),('wheat', 'Wheat'),('ivory', 'Ivory')],'Color in Report', required=True, help='This color will be used in the leaves summary located in Reporting\Leaves by Department.'),
'limit': fields.boolean('Allow to Override Limit', help='If you select this check box, the system allows the employees to take more leaves than the available ones for this type and will not take them into account for the "Remaining Legal Leaves" defined on the employee form.'),
'active': fields.boolean('Active', help="If the active field is set to false, it will allow you to hide the leave type without removing it."),
'max_leaves': fields.function(_user_left_days, string='Maximum Allowed', help='This value is given by the sum of all holidays requests with a positive value.', multi='user_left_days'),
'leaves_taken': fields.function(_user_left_days, string='Leaves Already Taken', help='This value is given by the sum of all holidays requests with a negative value.', multi='user_left_days'),
'remaining_leaves': fields.function(_user_left_days, string='Remaining Leaves', help='Maximum Leaves Allowed - Leaves Already Taken', multi='user_left_days'),
'virtual_remaining_leaves': fields.function(_user_left_days, string='Virtual Remaining Leaves', help='Maximum Leaves Allowed - Leaves Already Taken - Leaves Waiting Approval', multi='user_left_days'),
'double_validation': fields.boolean('Apply Double Validation', help="When selected, the Allocation/Leave Requests for this type require a second validation to be approved."),
}
_defaults = {
'color_name': 'red',
'active': True,
}
def name_get(self, cr, uid, ids, context=None):
if context is None:
context = {}
if not context.get('employee_id'):
# leave counts is based on employee_id, would be inaccurate if not based on correct employee
return super(hr_holidays_status, self).name_get(cr, uid, ids, context=context)
res = []
for record in self.browse(cr, uid, ids, context=context):
name = record.name
if not record.limit:
name = name + (' (%g/%g)' % (record.virtual_remaining_leaves or 0.0, record.max_leaves or 0.0))
res.append((record.id, name))
return res
def _search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
""" Override _search to order the results, according to some employee.
The order is the following
- limit (limited leaves first, such as Legal Leaves)
- virtual remaining leaves (higher the better, so using reverse on sorted)
This override is necessary because those fields are not stored and depends
on an employee_id given in context. This sort will be done when there
is an employee_id in context and that no other order has been given
to the method. """
if context is None:
context = {}
ids = super(hr_holidays_status, self)._search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=count, access_rights_uid=access_rights_uid)
if not count and not order and context.get('employee_id'):
leaves = self.browse(cr, uid, ids, context=context)
# perform a 2-steps sort because sorting on reminaing leaves is reversed
# sorted keep the order and is stable so 2-steps
sorted_leaves = leaves.sorted(key=attrgetter('limit'))
sorted_leaves = leaves.sorted(key=attrgetter('virtual_remaining_leaves'), reverse=True)
return [leave.id for leave in sorted_leaves]
return ids
class hr_holidays(osv.osv):
_name = "hr.holidays"
_description = "Leave"
_order = "type desc, date_from asc"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_track = {
'state': {
'hr_holidays.mt_holidays_confirmed': lambda self, cr, uid, obj, ctx=None: obj.state == 'confirm',
'hr_holidays.mt_holidays_first_validated': lambda self, cr, uid, obj, ctx=None: obj.state == 'validate1',
'hr_holidays.mt_holidays_approved': lambda self, cr, uid, obj, ctx=None: obj.state == 'validate',
'hr_holidays.mt_holidays_refused': lambda self, cr, uid, obj, ctx=None: obj.state == 'refuse',
},
}
def _employee_get(self, cr, uid, context=None):
emp_id = context.get('default_employee_id', False)
if emp_id:
return emp_id
ids = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)], context=context)
if ids:
return ids[0]
return False
def _compute_number_of_days(self, cr, uid, ids, name, args, context=None):
result = {}
for hol in self.browse(cr, uid, ids, context=context):
if hol.type=='remove':
result[hol.id] = -hol.number_of_days_temp
else:
result[hol.id] = hol.number_of_days_temp
return result
def _get_can_reset(self, cr, uid, ids, name, arg, context=None):
"""User can reset a leave request if it is its own leave request or if
he is an Hr Manager. """
user = self.pool['res.users'].browse(cr, uid, uid, context=context)
group_hr_manager_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'group_hr_manager')[1]
if group_hr_manager_id in [g.id for g in user.groups_id]:
return dict.fromkeys(ids, True)
result = dict.fromkeys(ids, False)
for holiday in self.browse(cr, uid, ids, context=context):
if holiday.employee_id and holiday.employee_id.user_id and holiday.employee_id.user_id.id == uid:
result[holiday.id] = True
return result
def _check_date(self, cr, uid, ids, context=None):
for holiday in self.browse(cr, uid, ids, context=context):
domain = [
('date_from', '<=', holiday.date_to),
('date_to', '>=', holiday.date_from),
('employee_id', '=', holiday.employee_id.id),
('id', '!=', holiday.id),
('state', 'not in', ['cancel', 'refuse']),
]
nholidays = self.search_count(cr, uid, domain, context=context)
if nholidays:
return False
return True
_check_holidays = lambda self, cr, uid, ids, context=None: self.check_holidays(cr, uid, ids, context=context)
_columns = {
'name': fields.char('Description', size=64),
'state': fields.selection([('draft', 'To Submit'), ('cancel', 'Cancelled'),('confirm', 'To Approve'), ('refuse', 'Refused'), ('validate1', 'Second Approval'), ('validate', 'Approved')],
'Status', readonly=True, track_visibility='onchange', copy=False,
help='The status is set to \'To Submit\', when a holiday request is created.\
\nThe status is \'To Approve\', when holiday request is confirmed by user.\
\nThe status is \'Refused\', when holiday request is refused by manager.\
\nThe status is \'Approved\', when holiday request is approved by manager.'),
'payslip_status': fields.boolean(string='Reported in last payslips',
help='Green this button when the leave has been taken into account in the payslip.'),
'report_note': fields.text('HR Comments'),
'user_id':fields.related('employee_id', 'user_id', type='many2one', relation='res.users', string='User', store=True),
'date_from': fields.datetime('Start Date', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, select=True, copy=False),
'date_to': fields.datetime('End Date', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, copy=False),
'holiday_status_id': fields.many2one("hr.holidays.status", "Leave Type", required=True,readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'employee_id': fields.many2one('hr.employee', "Employee", select=True, invisible=False, readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'manager_id': fields.many2one('hr.employee', 'First Approval', invisible=False, readonly=True, copy=False,
help='This area is automatically filled by the user who validate the leave'),
'notes': fields.text('Reasons',readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'number_of_days_temp': fields.float('Allocation', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, copy=False),
'number_of_days': fields.function(_compute_number_of_days, string='Number of Days', store=True),
'meeting_id': fields.many2one('calendar.event', 'Meeting'),
'type': fields.selection([('remove','Leave Request'),('add','Allocation Request')], 'Request Type', required=True, readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, help="Choose 'Leave Request' if someone wants to take an off-day. \nChoose 'Allocation Request' if you want to increase the number of leaves available for someone", select=True),
'parent_id': fields.many2one('hr.holidays', 'Parent'),
'linked_request_ids': fields.one2many('hr.holidays', 'parent_id', 'Linked Requests',),
'department_id':fields.related('employee_id', 'department_id', string='Department', type='many2one', relation='hr.department', readonly=True, store=True),
'category_id': fields.many2one('hr.employee.category', "Employee Tag", help='Category of Employee', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}),
'holiday_type': fields.selection([('employee','By Employee'),('category','By Employee Tag')], 'Allocation Mode', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, help='By Employee: Allocation/Request for individual Employee, By Employee Tag: Allocation/Request for group of employees in category', required=True),
'manager_id2': fields.many2one('hr.employee', 'Second Approval', readonly=True, copy=False,
help='This area is automaticly filled by the user who validate the leave with second level (If Leave type need second validation)'),
'double_validation': fields.related('holiday_status_id', 'double_validation', type='boolean', relation='hr.holidays.status', string='Apply Double Validation'),
'can_reset': fields.function(
_get_can_reset, string="Can reset",
type='boolean'),
}
_defaults = {
'employee_id': _employee_get,
'state': 'confirm',
'type': 'remove',
'user_id': lambda obj, cr, uid, context: uid,
'holiday_type': 'employee',
'payslip_status': False,
}
_constraints = [
(_check_date, 'You can not have 2 leaves that overlaps on same day!', ['date_from', 'date_to']),
(_check_holidays, 'The number of remaining leaves is not sufficient for this leave type.\n'
'Please verify also the leaves waiting for validation.', ['state', 'number_of_days_temp'])
]
_sql_constraints = [
('type_value', "CHECK( (holiday_type='employee' AND employee_id IS NOT NULL) or (holiday_type='category' AND category_id IS NOT NULL))",
"The employee or employee category of this request is missing. Please make sure that your user login is linked to an employee."),
('date_check2', "CHECK ( (type='add') OR (date_from <= date_to))", "The start date must be anterior to the end date."),
('date_check', "CHECK ( number_of_days_temp >= 0 )", "The number of days must be greater than 0."),
]
def _create_resource_leave(self, cr, uid, leaves, context=None):
'''This method will create entry in resource calendar leave object at the time of holidays validated '''
obj_res_leave = self.pool.get('resource.calendar.leaves')
for leave in leaves:
vals = {
'name': leave.name,
'date_from': leave.date_from,
'holiday_id': leave.id,
'date_to': leave.date_to,
'resource_id': leave.employee_id.resource_id.id,
'calendar_id': leave.employee_id.resource_id.calendar_id.id
}
obj_res_leave.create(cr, uid, vals, context=context)
return True
def _remove_resource_leave(self, cr, uid, ids, context=None):
'''This method will create entry in resource calendar leave object at the time of holidays cancel/removed'''
obj_res_leave = self.pool.get('resource.calendar.leaves')
leave_ids = obj_res_leave.search(cr, uid, [('holiday_id', 'in', ids)], context=context)
return obj_res_leave.unlink(cr, uid, leave_ids, context=context)
def onchange_type(self, cr, uid, ids, holiday_type, employee_id=False, context=None):
result = {}
if holiday_type == 'employee' and not employee_id:
ids_employee = self.pool.get('hr.employee').search(cr, uid, [('user_id','=', uid)])
if ids_employee:
result['value'] = {
'employee_id': ids_employee[0]
}
elif holiday_type != 'employee':
result['value'] = {
'employee_id': False
}
return result
def onchange_employee(self, cr, uid, ids, employee_id):
result = {'value': {'department_id': False}}
if employee_id:
employee = self.pool.get('hr.employee').browse(cr, uid, employee_id)
result['value'] = {'department_id': employee.department_id.id}
return result
# TODO: can be improved using resource calendar method
def _get_number_of_days(self, date_from, date_to):
"""Returns a float equals to the timedelta between two dates given as string."""
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
from_dt = datetime.datetime.strptime(date_from, DATETIME_FORMAT)
to_dt = datetime.datetime.strptime(date_to, DATETIME_FORMAT)
timedelta = to_dt - from_dt
diff_day = timedelta.days + float(timedelta.seconds) / 86400
return diff_day
def unlink(self, cr, uid, ids, context=None):
for rec in self.browse(cr, uid, ids, context=context):
if rec.state not in ['draft', 'cancel', 'confirm']:
raise UserError(_('You cannot delete a leave which is in %s state.') % (rec.state,))
return super(hr_holidays, self).unlink(cr, uid, ids, context)
def onchange_date_from(self, cr, uid, ids, date_to, date_from):
"""
If there are no date set for date_to, automatically set one 8 hours later than
the date_from.
Also update the number_of_days.
"""
# date_to has to be greater than date_from
if (date_from and date_to) and (date_from > date_to):
raise UserError(_('The start date must be anterior to the end date.'))
result = {'value': {}}
# No date_to set so far: automatically compute one 8 hours later
if date_from and not date_to:
date_to_with_delta = datetime.datetime.strptime(date_from, tools.DEFAULT_SERVER_DATETIME_FORMAT) + datetime.timedelta(hours=8)
result['value']['date_to'] = str(date_to_with_delta)
# Compute and update the number of days
if (date_to and date_from) and (date_from <= date_to):
diff_day = self._get_number_of_days(date_from, date_to)
result['value']['number_of_days_temp'] = round(math.floor(diff_day))+1
else:
result['value']['number_of_days_temp'] = 0
return result
def onchange_date_to(self, cr, uid, ids, date_to, date_from):
"""
Update the number_of_days.
"""
# date_to has to be greater than date_from
if (date_from and date_to) and (date_from > date_to):
raise UserError(_('The start date must be anterior to the end date.'))
result = {'value': {}}
# Compute and update the number of days
if (date_to and date_from) and (date_from <= date_to):
diff_day = self._get_number_of_days(date_from, date_to)
result['value']['number_of_days_temp'] = round(math.floor(diff_day))+1
else:
result['value']['number_of_days_temp'] = 0
return result
def create(self, cr, uid, values, context=None):
""" Override to avoid automatic logging of creation """
if context is None:
context = {}
context = dict(context, mail_create_nolog=True)
if values.get('state') and values['state'] not in ['draft', 'confirm', 'cancel'] and not self.pool['res.users'].has_group(cr, uid, 'base.group_hr_user'):
raise AccessError(_('You cannot set a leave request as \'%s\'. Contact a human resource manager.') % values.get('state'))
return super(hr_holidays, self).create(cr, uid, values, context=context)
def write(self, cr, uid, ids, vals, context=None):
if vals.get('state') and vals['state'] not in ['draft', 'confirm', 'cancel'] and not self.pool['res.users'].has_group(cr, uid, 'base.group_hr_user'):
raise AccessError(_('You cannot set a leave request as \'%s\'. Contact a human resource manager.') % vals.get('state'))
return super(hr_holidays, self).write(cr, uid, ids, vals, context=context)
def holidays_reset(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {
'state': 'draft',
'manager_id': False,
'manager_id2': False,
})
to_unlink = []
for record in self.browse(cr, uid, ids, context=context):
for record2 in record.linked_request_ids:
self.holidays_reset(cr, uid, [record2.id], context=context)
to_unlink.append(record2.id)
if to_unlink:
self.unlink(cr, uid, to_unlink, context=context)
return True
def holidays_first_validate(self, cr, uid, ids, context=None):
obj_emp = self.pool.get('hr.employee')
ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)])
manager = ids2 and ids2[0] or False
return self.write(cr, uid, ids, {'state': 'validate1', 'manager_id': manager}, context=context)
def holidays_validate(self, cr, uid, ids, context=None):
obj_emp = self.pool.get('hr.employee')
ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)])
manager = ids2 and ids2[0] or False
self.write(cr, uid, ids, {'state': 'validate'}, context=context)
data_holiday = self.browse(cr, uid, ids)
for record in data_holiday:
if record.double_validation:
self.write(cr, uid, [record.id], {'manager_id2': manager})
else:
self.write(cr, uid, [record.id], {'manager_id': manager})
if record.holiday_type == 'employee' and record.type == 'remove':
meeting_obj = self.pool.get('calendar.event')
meeting_vals = {
'name': record.name or _('Leave Request'),
'categ_ids': record.holiday_status_id.categ_id and [(6,0,[record.holiday_status_id.categ_id.id])] or [],
'duration': record.number_of_days_temp * 8,
'description': record.notes,
'user_id': record.user_id.id,
'start': record.date_from,
'stop': record.date_to,
'allday': False,
'state': 'open', # to block that meeting date in the calendar
'class': 'confidential'
}
#Add the partner_id (if exist) as an attendee
if record.user_id and record.user_id.partner_id:
meeting_vals['partner_ids'] = [(4,record.user_id.partner_id.id)]
ctx_no_email = dict(context or {}, no_email=True)
meeting_id = meeting_obj.create(cr, uid, meeting_vals, context=ctx_no_email)
self._create_resource_leave(cr, uid, [record], context=context)
self.write(cr, uid, ids, {'meeting_id': meeting_id})
elif record.holiday_type == 'category':
emp_ids = obj_emp.search(cr, uid, [('category_ids', 'child_of', [record.category_id.id])])
leave_ids = []
for emp in obj_emp.browse(cr, uid, emp_ids):
vals = {
'name': record.name,
'type': record.type,
'holiday_type': 'employee',
'holiday_status_id': record.holiday_status_id.id,
'date_from': record.date_from,
'date_to': record.date_to,
'notes': record.notes,
'number_of_days_temp': record.number_of_days_temp,
'parent_id': record.id,
'employee_id': emp.id
}
leave_ids.append(self.create(cr, uid, vals, context=None))
for leave_id in leave_ids:
# TODO is it necessary to interleave the calls?
for sig in ('confirm', 'validate', 'second_validate'):
self.signal_workflow(cr, uid, [leave_id], sig)
return True
def holidays_confirm(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids, context=context):
if record.employee_id and record.employee_id.parent_id and record.employee_id.parent_id.user_id:
self.message_subscribe_users(cr, uid, [record.id], user_ids=[record.employee_id.parent_id.user_id.id], context=context)
return self.write(cr, uid, ids, {'state': 'confirm'})
def holidays_refuse(self, cr, uid, ids, context=None):
obj_emp = self.pool.get('hr.employee')
ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)])
manager = ids2 and ids2[0] or False
for holiday in self.browse(cr, uid, ids, context=context):
if holiday.state == 'validate1':
self.write(cr, uid, [holiday.id], {'state': 'refuse', 'manager_id': manager})
else:
self.write(cr, uid, [holiday.id], {'state': 'refuse', 'manager_id2': manager})
self.holidays_cancel(cr, uid, ids, context=context)
return True
def holidays_cancel(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids):
# Delete the meeting
if record.meeting_id:
record.meeting_id.unlink()
# If a category that created several holidays, cancel all related
self.signal_workflow(cr, uid, map(attrgetter('id'), record.linked_request_ids or []), 'refuse')
self._remove_resource_leave(cr, uid, ids, context=context)
return True
def check_holidays(self, cr, uid, ids, context=None):
for record in self.browse(cr, uid, ids, context=context):
if record.holiday_type != 'employee' or record.type != 'remove' or not record.employee_id or record.holiday_status_id.limit:
continue
leave_days = self.pool.get('hr.holidays.status').get_days(cr, uid, [record.holiday_status_id.id], record.employee_id.id, context=context)[record.holiday_status_id.id]
if leave_days['remaining_leaves'] < 0 or leave_days['virtual_remaining_leaves'] < 0:
return False
return True
def toggle_payslip_status(self, cr, uid, ids, context=None):
ids_to_set_true = self.search(cr, uid, [('id', 'in', ids), ('payslip_status', '=', False)], context=context)
ids_to_set_false = list(set(ids) - set(ids_to_set_true))
return self.write(cr, uid, ids_to_set_true, {'payslip_status': True}, context=context) and self.write(cr, uid, ids_to_set_false, {'payslip_status': False}, context=context)
class resource_calendar_leaves(osv.osv):
_inherit = "resource.calendar.leaves"
_description = "Leave Detail"
_columns = {
'holiday_id': fields.many2one("hr.holidays", "Leave Request"),
}
class hr_employee(osv.Model):
_inherit = "hr.employee"
def create(self, cr, uid, vals, context=None):
# don't pass the value of remaining leave if it's 0 at the creation time, otherwise it will trigger the inverse
# function _set_remaining_days and the system may not be configured for. Note that we don't have this problem on
# the write because the clients only send the fields that have been modified.
if 'remaining_leaves' in vals and not vals['remaining_leaves']:
del(vals['remaining_leaves'])
return super(hr_employee, self).create(cr, uid, vals, context=context)
def _set_remaining_days(self, cr, uid, empl_id, name, value, arg, context=None):
employee = self.browse(cr, uid, empl_id, context=context)
diff = value - employee.remaining_leaves
type_obj = self.pool.get('hr.holidays.status')
holiday_obj = self.pool.get('hr.holidays')
# Find for holidays status
status_ids = type_obj.search(cr, uid, [('limit', '=', False)], context=context)
if len(status_ids) != 1 :
raise UserError(_("The feature behind the field 'Remaining Legal Leaves' can only be used when there is only one leave type with the option 'Allow to Override Limit' unchecked. (%s Found). Otherwise, the update is ambiguous as we cannot decide on which leave type the update has to be done. \nYou may prefer to use the classic menus 'Leave Requests' and 'Allocation Requests' located in 'Human Resources \ Leaves' to manage the leave days of the employees if the configuration does not allow to use this field.") % (len(status_ids)))
status_id = status_ids and status_ids[0] or False
if not status_id:
return False
if diff > 0:
leave_id = holiday_obj.create(cr, uid, {'name': _('Allocation for %s') % employee.name, 'employee_id': employee.id, 'holiday_status_id': status_id, 'type': 'add', 'holiday_type': 'employee', 'number_of_days_temp': diff}, context=context)
elif diff < 0:
raise UserError(_('You cannot reduce validated allocation requests'))
else:
return False
for sig in ('confirm', 'validate', 'second_validate'):
holiday_obj.signal_workflow(cr, uid, [leave_id], sig)
return True
def _get_remaining_days(self, cr, uid, ids, name, args, context=None):
cr.execute("""SELECT
sum(h.number_of_days) as days,
h.employee_id
from
hr_holidays h
join hr_holidays_status s on (s.id=h.holiday_status_id)
where
h.state='validate' and
s.limit=False and
h.employee_id in %s
group by h.employee_id""", (tuple(ids),))
res = cr.dictfetchall()
remaining = {}
for r in res:
remaining[r['employee_id']] = r['days']
for employee_id in ids:
if not remaining.get(employee_id):
remaining[employee_id] = 0.0
return remaining
def _get_leave_status(self, cr, uid, ids, name, args, context=None):
holidays_obj = self.pool.get('hr.holidays')
holidays_id = holidays_obj.search(cr, uid,
[('employee_id', 'in', ids), ('date_from','<=',time.strftime('%Y-%m-%d %H:%M:%S')),
('date_to','>=',time.strftime('%Y-%m-%d 23:59:59')),('type','=','remove'),('state','not in',('cancel','refuse'))],
context=context)
result = {}
for id in ids:
result[id] = {
'current_leave_state': False,
'current_leave_id': False,
'leave_date_from':False,
'leave_date_to':False,
}
for holiday in self.pool.get('hr.holidays').browse(cr, uid, holidays_id, context=context):
result[holiday.employee_id.id]['leave_date_from'] = holiday.date_from
result[holiday.employee_id.id]['leave_date_to'] = holiday.date_to
result[holiday.employee_id.id]['current_leave_state'] = holiday.state
result[holiday.employee_id.id]['current_leave_id'] = holiday.holiday_status_id.id
return result
def _leaves_count(self, cr, uid, ids, field_name, arg, context=None):
res = {}
Holidays = self.pool['hr.holidays']
date_begin = date.today().replace(day=1)
date_end = date_begin.replace(day=calendar.monthrange(date_begin.year, date_begin.month)[1])
for employee_id in ids:
leaves = Holidays.search_count(cr, uid, [('employee_id', '=', employee_id), ('type', '=', 'remove')], context=context)
approved_leaves = Holidays.search_count(cr, uid, [('employee_id', '=', employee_id), ('type', '=', 'remove'), ('date_from', '>=', date_begin.strftime(tools.DEFAULT_SERVER_DATE_FORMAT)), ('date_from', '<=', date_end.strftime(tools.DEFAULT_SERVER_DATE_FORMAT)), ('state', '=', 'validate'), ('payslip_status', '=', False)], context=context)
res[employee_id] = {'leaves_count': leaves, 'approved_leaves_count': approved_leaves}
return res
_columns = {
'remaining_leaves': fields.function(_get_remaining_days, string='Remaining Legal Leaves', fnct_inv=_set_remaining_days, type="float", help='Total number of legal leaves allocated to this employee, change this value to create allocation/leave request. Total based on all the leave types without overriding limit.'),
'current_leave_state': fields.function(
_get_leave_status, multi="leave_status", string="Current Leave Status", type="selection",
selection=[('draft', 'New'), ('confirm', 'Waiting Approval'), ('refuse', 'Refused'),
('validate1', 'Waiting Second Approval'), ('validate', 'Approved'), ('cancel', 'Cancelled')]),
'current_leave_id': fields.function(_get_leave_status, multi="leave_status", string="Current Leave Type", type='many2one', relation='hr.holidays.status'),
'leave_date_from': fields.function(_get_leave_status, multi='leave_status', type='date', string='From Date'),
'leave_date_to': fields.function(_get_leave_status, multi='leave_status', type='date', string='To Date'),
'leaves_count': fields.function(_leaves_count, multi='_leaves_count', type='integer', string='Number of Leaves (current month)'),
'approved_leaves_count': fields.function(_leaves_count, multi='_leaves_count', type='integer', string='Approved Leaves not in Payslip', help="These leaves are approved but not taken into account for payslip"),
}
|
devs1991/test_edx_docmode
|
refs/heads/master
|
venv/lib/python2.7/site-packages/scipy/io/harwell_boeing/setup.py
|
128
|
#!/usr/bin/env python
from __future__ import division, print_function, absolute_import
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('harwell_boeing',parent_package,top_path)
config.add_data_dir('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
adelina-t/nova
|
refs/heads/master
|
nova/api/openstack/compute/schemas/v3/agents.py
|
104
|
# Copyright 2013 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
create = {
'type': 'object',
'properties': {
'agent': {
'type': 'object',
'properties': {
'hypervisor': {
'type': 'string', 'minLength': 0, 'maxLength': 255,
'pattern': '^[a-zA-Z0-9-._ ]*$'
},
'os': {
'type': 'string', 'minLength': 0, 'maxLength': 255,
'pattern': '^[a-zA-Z0-9-._ ]*$'
},
'architecture': {
'type': 'string', 'minLength': 0, 'maxLength': 255,
'pattern': '^[a-zA-Z0-9-._ ]*$'
},
'version': {
'type': 'string', 'minLength': 0, 'maxLength': 255,
'pattern': '^[a-zA-Z0-9-._ ]*$'
},
'url': {
'type': 'string', 'minLength': 0, 'maxLength': 255,
'format': 'uri'
},
'md5hash': {
'type': 'string', 'minLength': 0, 'maxLength': 255,
'pattern': '^[a-fA-F0-9]*$'
},
},
'required': ['hypervisor', 'os', 'architecture', 'version',
'url', 'md5hash'],
'additionalProperties': False,
},
},
'required': ['agent'],
'additionalProperties': False,
}
update = {
'type': 'object',
'properties': {
'para': {
'type': 'object',
'properties': {
'version': {
'type': 'string', 'minLength': 0, 'maxLength': 255,
'pattern': '^[a-zA-Z0-9-._ ]*$'
},
'url': {
'type': 'string', 'minLength': 0, 'maxLength': 255,
'format': 'uri'
},
'md5hash': {
'type': 'string', 'minLength': 0, 'maxLength': 255,
'pattern': '^[a-fA-F0-9]*$'
},
},
'required': ['version', 'url', 'md5hash'],
'additionalProperties': False,
},
},
'required': ['para'],
'additionalProperties': False,
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.