repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
pombredanne/drf_tada | drf_tada/urls.py | 2 | 1698 | from django.conf.urls import patterns, include, url
from django.contrib import admin
from user.views import (UserDetailApiView, UserListApiView,
UserChangePasswordApiView)
from todo.views import (TodoBucketListApiView, TodoBucketDetailApiView)
from task.views import (TaskListApiView, TaskDetailApiView, NoteListApiView,
NoteDetailApiView)
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'drsf_task.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
#url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^users/(?P<pk>[0-9]+)/change_password/$', UserChangePasswordApiView.as_view(), name='change-password'),
url(r'^users/(?P<pk>[0-9]+)/$', UserDetailApiView.as_view(), name='user-detail'),
url(r'^todo_bucket/$', TodoBucketListApiView.as_view(),
name='todo-bucket-list'),
url(r'^todo_bucket/(?P<todo_bucket_pk>[0-9]+)/$', TodoBucketDetailApiView.as_view(),
name='todo-bucket-detail'),
url(r'^todo_bucket/(?P<todo_bucket_pk>[0-9]+)/tasks/$',
TaskListApiView.as_view(),
name='task-list'),
url(r'^todo_bucket/(?P<todo_bucket_pk>[0-9]+)/tasks/(?P<task_pk>[0-9]+)/$',
TaskDetailApiView.as_view(),
name='task-detail'),
url(r'^todo_bucket/(?P<todo_bucket_pk>[0-9]+)/tasks/(?P<task_pk>[0-9]+)/notes/$',
NoteListApiView.as_view(),
name='note-list'),
url(r'^todo_bucket/(?P<todo_bucket_pk>[0-9]+)/tasks/(?P<task_pk>[0-9]+)/notes/(?P<note_pk>[0-9]+)/$',
NoteDetailApiView.as_view(),
name='note-detail'),
)
| bsd-3-clause |
gangadharkadam/smrterp | erpnext/stock/doctype/landed_cost_voucher/landed_cost_voucher.py | 8 | 3806 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt
from frappe.model.document import Document
from erpnext.stock.utils import get_valuation_method
from erpnext.stock.stock_ledger import get_previous_sle
class LandedCostVoucher(Document):
def get_items_from_purchase_receipts(self):
self.set("landed_cost_items", [])
for pr in self.get("landed_cost_purchase_receipts"):
pr_items = frappe.db.sql("""select pr_item.item_code, pr_item.description,
pr_item.qty, pr_item.rate, pr_item.amount, pr_item.name
from `tabPurchase Receipt Item` pr_item where parent = %s
and exists(select name from tabItem where name = pr_item.item_code and is_stock_item = 'Yes')""",
pr.purchase_receipt, as_dict=True)
for d in pr_items:
item = self.append("landed_cost_items")
item.item_code = d.item_code
item.description = d.description
item.qty = d.qty
item.rate = d.rate
item.amount = d.amount
item.purchase_receipt = pr.purchase_receipt
item.purchase_receipt_item = d.name
if self.get("landed_cost_taxes_and_charges"):
self.set_applicable_charges_for_item()
def validate(self):
self.check_mandatory()
self.validate_purchase_receipts()
self.set_total_taxes_and_charges()
if not self.get("landed_cost_items"):
self.get_items_from_purchase_receipts()
else:
self.set_applicable_charges_for_item()
def check_mandatory(self):
if not self.get("landed_cost_purchase_receipts"):
frappe.throw(_("Please enter Purchase Receipts"))
if not self.get("landed_cost_taxes_and_charges"):
frappe.throw(_("Please enter Taxes and Charges"))
def validate_purchase_receipts(self):
purchase_receipts = []
for d in self.get("landed_cost_purchase_receipts"):
if frappe.db.get_value("Purchase Receipt", d.purchase_receipt, "docstatus") != 1:
frappe.throw(_("Purchase Receipt must be submitted"))
else:
purchase_receipts.append(d.purchase_receipt)
for item in self.get("landed_cost_items"):
if not item.purchase_receipt:
frappe.throw(_("Item must be added using 'Get Items from Purchase Receipts' button"))
elif item.purchase_receipt not in purchase_receipts:
frappe.throw(_("Item Row {0}: Purchase Receipt {1} does not exist in above 'Purchase Receipts' table")
.format(item.idx, item.purchase_receipt))
def set_total_taxes_and_charges(self):
self.total_taxes_and_charges = sum([flt(d.amount) for d in self.get("landed_cost_taxes_and_charges")])
def set_applicable_charges_for_item(self):
total_item_cost = sum([flt(d.amount) for d in self.get("landed_cost_items")])
for item in self.get("landed_cost_items"):
item.applicable_charges = flt(item.amount) * flt(self.total_taxes_and_charges) / flt(total_item_cost)
def on_submit(self):
self.update_landed_cost()
def on_cancel(self):
self.update_landed_cost()
def update_landed_cost(self):
purchase_receipts = list(set([d.purchase_receipt for d in self.get("landed_cost_items")]))
for purchase_receipt in purchase_receipts:
pr = frappe.get_doc("Purchase Receipt", purchase_receipt)
# set landed cost voucher amount in pr item
pr.set_landed_cost_voucher_amount()
# set valuation amount in pr item
pr.update_valuation_rate("purchase_receipt_details")
# save will update landed_cost_voucher_amount and voucher_amount in PR,
# as those fields are ellowed to edit after submit
pr.save()
# update stock & gl entries for cancelled state of PR
pr.docstatus = 2
pr.update_stock()
pr.make_gl_entries_on_cancel()
# update stock & gl entries for submit state of PR
pr.docstatus = 1
pr.update_stock()
pr.make_gl_entries()
| agpl-3.0 |
haripradhan/MissionPlanner | Lib/site-packages/numpy/lib/benchmarks/bench_arraysetops.py | 65 | 1615 | import numpy as np
import time
from numpy.lib.arraysetops import *
def bench_unique1d( plot_results = False ):
exponents = np.linspace( 2, 7, 9 )
ratios = []
nItems = []
dt1s = []
dt2s = []
for ii in exponents:
nItem = 10 ** ii
print 'using %d items:' % nItem
a = np.fix( nItem / 10 * np.random.random( nItem ) )
print 'unique:'
tt = time.clock()
b = np.unique( a )
dt1 = time.clock() - tt
print dt1
print 'unique1d:'
tt = time.clock()
c = unique1d( a )
dt2 = time.clock() - tt
print dt2
if dt1 < 1e-8:
ratio = 'ND'
else:
ratio = dt2 / dt1
print 'ratio:', ratio
print 'nUnique: %d == %d\n' % (len( b ), len( c ))
nItems.append( nItem )
ratios.append( ratio )
dt1s.append( dt1 )
dt2s.append( dt2 )
assert np.alltrue( b == c )
print nItems
print dt1s
print dt2s
print ratios
if plot_results:
import pylab
def plotMe( fig, fun, nItems, dt1s, dt2s ):
pylab.figure( fig )
fun( nItems, dt1s, 'g-o', linewidth = 2, markersize = 8 )
fun( nItems, dt2s, 'b-x', linewidth = 2, markersize = 8 )
pylab.legend( ('unique', 'unique1d' ) )
pylab.xlabel( 'nItem' )
pylab.ylabel( 'time [s]' )
plotMe( 1, pylab.loglog, nItems, dt1s, dt2s )
plotMe( 2, pylab.plot, nItems, dt1s, dt2s )
pylab.show()
if __name__ == '__main__':
bench_unique1d( plot_results = True )
| gpl-3.0 |
Salat-Cx65/python-for-android | python3-alpha/python3-src/Lib/msilib/schema.py | 48 | 81587 | from . import Table
_Validation = Table('_Validation')
_Validation.add_field(1,'Table',11552)
_Validation.add_field(2,'Column',11552)
_Validation.add_field(3,'Nullable',3332)
_Validation.add_field(4,'MinValue',4356)
_Validation.add_field(5,'MaxValue',4356)
_Validation.add_field(6,'KeyTable',7679)
_Validation.add_field(7,'KeyColumn',5378)
_Validation.add_field(8,'Category',7456)
_Validation.add_field(9,'Set',7679)
_Validation.add_field(10,'Description',7679)
ActionText = Table('ActionText')
ActionText.add_field(1,'Action',11592)
ActionText.add_field(2,'Description',7936)
ActionText.add_field(3,'Template',7936)
AdminExecuteSequence = Table('AdminExecuteSequence')
AdminExecuteSequence.add_field(1,'Action',11592)
AdminExecuteSequence.add_field(2,'Condition',7679)
AdminExecuteSequence.add_field(3,'Sequence',5378)
Condition = Table('Condition')
Condition.add_field(1,'Feature_',11558)
Condition.add_field(2,'Level',9474)
Condition.add_field(3,'Condition',7679)
AdminUISequence = Table('AdminUISequence')
AdminUISequence.add_field(1,'Action',11592)
AdminUISequence.add_field(2,'Condition',7679)
AdminUISequence.add_field(3,'Sequence',5378)
AdvtExecuteSequence = Table('AdvtExecuteSequence')
AdvtExecuteSequence.add_field(1,'Action',11592)
AdvtExecuteSequence.add_field(2,'Condition',7679)
AdvtExecuteSequence.add_field(3,'Sequence',5378)
AdvtUISequence = Table('AdvtUISequence')
AdvtUISequence.add_field(1,'Action',11592)
AdvtUISequence.add_field(2,'Condition',7679)
AdvtUISequence.add_field(3,'Sequence',5378)
AppId = Table('AppId')
AppId.add_field(1,'AppId',11558)
AppId.add_field(2,'RemoteServerName',7679)
AppId.add_field(3,'LocalService',7679)
AppId.add_field(4,'ServiceParameters',7679)
AppId.add_field(5,'DllSurrogate',7679)
AppId.add_field(6,'ActivateAtStorage',5378)
AppId.add_field(7,'RunAsInteractiveUser',5378)
AppSearch = Table('AppSearch')
AppSearch.add_field(1,'Property',11592)
AppSearch.add_field(2,'Signature_',11592)
Property = Table('Property')
Property.add_field(1,'Property',11592)
Property.add_field(2,'Value',3840)
BBControl = Table('BBControl')
BBControl.add_field(1,'Billboard_',11570)
BBControl.add_field(2,'BBControl',11570)
BBControl.add_field(3,'Type',3378)
BBControl.add_field(4,'X',1282)
BBControl.add_field(5,'Y',1282)
BBControl.add_field(6,'Width',1282)
BBControl.add_field(7,'Height',1282)
BBControl.add_field(8,'Attributes',4356)
BBControl.add_field(9,'Text',7986)
Billboard = Table('Billboard')
Billboard.add_field(1,'Billboard',11570)
Billboard.add_field(2,'Feature_',3366)
Billboard.add_field(3,'Action',7474)
Billboard.add_field(4,'Ordering',5378)
Feature = Table('Feature')
Feature.add_field(1,'Feature',11558)
Feature.add_field(2,'Feature_Parent',7462)
Feature.add_field(3,'Title',8000)
Feature.add_field(4,'Description',8191)
Feature.add_field(5,'Display',5378)
Feature.add_field(6,'Level',1282)
Feature.add_field(7,'Directory_',7496)
Feature.add_field(8,'Attributes',1282)
Binary = Table('Binary')
Binary.add_field(1,'Name',11592)
Binary.add_field(2,'Data',2304)
BindImage = Table('BindImage')
BindImage.add_field(1,'File_',11592)
BindImage.add_field(2,'Path',7679)
File = Table('File')
File.add_field(1,'File',11592)
File.add_field(2,'Component_',3400)
File.add_field(3,'FileName',4095)
File.add_field(4,'FileSize',260)
File.add_field(5,'Version',7496)
File.add_field(6,'Language',7444)
File.add_field(7,'Attributes',5378)
File.add_field(8,'Sequence',1282)
CCPSearch = Table('CCPSearch')
CCPSearch.add_field(1,'Signature_',11592)
CheckBox = Table('CheckBox')
CheckBox.add_field(1,'Property',11592)
CheckBox.add_field(2,'Value',7488)
Class = Table('Class')
Class.add_field(1,'CLSID',11558)
Class.add_field(2,'Context',11552)
Class.add_field(3,'Component_',11592)
Class.add_field(4,'ProgId_Default',7679)
Class.add_field(5,'Description',8191)
Class.add_field(6,'AppId_',7462)
Class.add_field(7,'FileTypeMask',7679)
Class.add_field(8,'Icon_',7496)
Class.add_field(9,'IconIndex',5378)
Class.add_field(10,'DefInprocHandler',7456)
Class.add_field(11,'Argument',7679)
Class.add_field(12,'Feature_',3366)
Class.add_field(13,'Attributes',5378)
Component = Table('Component')
Component.add_field(1,'Component',11592)
Component.add_field(2,'ComponentId',7462)
Component.add_field(3,'Directory_',3400)
Component.add_field(4,'Attributes',1282)
Component.add_field(5,'Condition',7679)
Component.add_field(6,'KeyPath',7496)
Icon = Table('Icon')
Icon.add_field(1,'Name',11592)
Icon.add_field(2,'Data',2304)
ProgId = Table('ProgId')
ProgId.add_field(1,'ProgId',11775)
ProgId.add_field(2,'ProgId_Parent',7679)
ProgId.add_field(3,'Class_',7462)
ProgId.add_field(4,'Description',8191)
ProgId.add_field(5,'Icon_',7496)
ProgId.add_field(6,'IconIndex',5378)
ComboBox = Table('ComboBox')
ComboBox.add_field(1,'Property',11592)
ComboBox.add_field(2,'Order',9474)
ComboBox.add_field(3,'Value',3392)
ComboBox.add_field(4,'Text',8000)
CompLocator = Table('CompLocator')
CompLocator.add_field(1,'Signature_',11592)
CompLocator.add_field(2,'ComponentId',3366)
CompLocator.add_field(3,'Type',5378)
Complus = Table('Complus')
Complus.add_field(1,'Component_',11592)
Complus.add_field(2,'ExpType',13570)
Directory = Table('Directory')
Directory.add_field(1,'Directory',11592)
Directory.add_field(2,'Directory_Parent',7496)
Directory.add_field(3,'DefaultDir',4095)
Control = Table('Control')
Control.add_field(1,'Dialog_',11592)
Control.add_field(2,'Control',11570)
Control.add_field(3,'Type',3348)
Control.add_field(4,'X',1282)
Control.add_field(5,'Y',1282)
Control.add_field(6,'Width',1282)
Control.add_field(7,'Height',1282)
Control.add_field(8,'Attributes',4356)
Control.add_field(9,'Property',7474)
Control.add_field(10,'Text',7936)
Control.add_field(11,'Control_Next',7474)
Control.add_field(12,'Help',7986)
Dialog = Table('Dialog')
Dialog.add_field(1,'Dialog',11592)
Dialog.add_field(2,'HCentering',1282)
Dialog.add_field(3,'VCentering',1282)
Dialog.add_field(4,'Width',1282)
Dialog.add_field(5,'Height',1282)
Dialog.add_field(6,'Attributes',4356)
Dialog.add_field(7,'Title',8064)
Dialog.add_field(8,'Control_First',3378)
Dialog.add_field(9,'Control_Default',7474)
Dialog.add_field(10,'Control_Cancel',7474)
ControlCondition = Table('ControlCondition')
ControlCondition.add_field(1,'Dialog_',11592)
ControlCondition.add_field(2,'Control_',11570)
ControlCondition.add_field(3,'Action',11570)
ControlCondition.add_field(4,'Condition',11775)
ControlEvent = Table('ControlEvent')
ControlEvent.add_field(1,'Dialog_',11592)
ControlEvent.add_field(2,'Control_',11570)
ControlEvent.add_field(3,'Event',11570)
ControlEvent.add_field(4,'Argument',11775)
ControlEvent.add_field(5,'Condition',15871)
ControlEvent.add_field(6,'Ordering',5378)
CreateFolder = Table('CreateFolder')
CreateFolder.add_field(1,'Directory_',11592)
CreateFolder.add_field(2,'Component_',11592)
CustomAction = Table('CustomAction')
CustomAction.add_field(1,'Action',11592)
CustomAction.add_field(2,'Type',1282)
CustomAction.add_field(3,'Source',7496)
CustomAction.add_field(4,'Target',7679)
DrLocator = Table('DrLocator')
DrLocator.add_field(1,'Signature_',11592)
DrLocator.add_field(2,'Parent',15688)
DrLocator.add_field(3,'Path',15871)
DrLocator.add_field(4,'Depth',5378)
DuplicateFile = Table('DuplicateFile')
DuplicateFile.add_field(1,'FileKey',11592)
DuplicateFile.add_field(2,'Component_',3400)
DuplicateFile.add_field(3,'File_',3400)
DuplicateFile.add_field(4,'DestName',8191)
DuplicateFile.add_field(5,'DestFolder',7496)
Environment = Table('Environment')
Environment.add_field(1,'Environment',11592)
Environment.add_field(2,'Name',4095)
Environment.add_field(3,'Value',8191)
Environment.add_field(4,'Component_',3400)
Error = Table('Error')
Error.add_field(1,'Error',9474)
Error.add_field(2,'Message',7936)
EventMapping = Table('EventMapping')
EventMapping.add_field(1,'Dialog_',11592)
EventMapping.add_field(2,'Control_',11570)
EventMapping.add_field(3,'Event',11570)
EventMapping.add_field(4,'Attribute',3378)
Extension = Table('Extension')
Extension.add_field(1,'Extension',11775)
Extension.add_field(2,'Component_',11592)
Extension.add_field(3,'ProgId_',7679)
Extension.add_field(4,'MIME_',7488)
Extension.add_field(5,'Feature_',3366)
MIME = Table('MIME')
MIME.add_field(1,'ContentType',11584)
MIME.add_field(2,'Extension_',3583)
MIME.add_field(3,'CLSID',7462)
FeatureComponents = Table('FeatureComponents')
FeatureComponents.add_field(1,'Feature_',11558)
FeatureComponents.add_field(2,'Component_',11592)
FileSFPCatalog = Table('FileSFPCatalog')
FileSFPCatalog.add_field(1,'File_',11592)
FileSFPCatalog.add_field(2,'SFPCatalog_',11775)
SFPCatalog = Table('SFPCatalog')
SFPCatalog.add_field(1,'SFPCatalog',11775)
SFPCatalog.add_field(2,'Catalog',2304)
SFPCatalog.add_field(3,'Dependency',7424)
Font = Table('Font')
Font.add_field(1,'File_',11592)
Font.add_field(2,'FontTitle',7552)
IniFile = Table('IniFile')
IniFile.add_field(1,'IniFile',11592)
IniFile.add_field(2,'FileName',4095)
IniFile.add_field(3,'DirProperty',7496)
IniFile.add_field(4,'Section',3936)
IniFile.add_field(5,'Key',3968)
IniFile.add_field(6,'Value',4095)
IniFile.add_field(7,'Action',1282)
IniFile.add_field(8,'Component_',3400)
IniLocator = Table('IniLocator')
IniLocator.add_field(1,'Signature_',11592)
IniLocator.add_field(2,'FileName',3583)
IniLocator.add_field(3,'Section',3424)
IniLocator.add_field(4,'Key',3456)
IniLocator.add_field(5,'Field',5378)
IniLocator.add_field(6,'Type',5378)
InstallExecuteSequence = Table('InstallExecuteSequence')
InstallExecuteSequence.add_field(1,'Action',11592)
InstallExecuteSequence.add_field(2,'Condition',7679)
InstallExecuteSequence.add_field(3,'Sequence',5378)
InstallUISequence = Table('InstallUISequence')
InstallUISequence.add_field(1,'Action',11592)
InstallUISequence.add_field(2,'Condition',7679)
InstallUISequence.add_field(3,'Sequence',5378)
IsolatedComponent = Table('IsolatedComponent')
IsolatedComponent.add_field(1,'Component_Shared',11592)
IsolatedComponent.add_field(2,'Component_Application',11592)
LaunchCondition = Table('LaunchCondition')
LaunchCondition.add_field(1,'Condition',11775)
LaunchCondition.add_field(2,'Description',4095)
ListBox = Table('ListBox')
ListBox.add_field(1,'Property',11592)
ListBox.add_field(2,'Order',9474)
ListBox.add_field(3,'Value',3392)
ListBox.add_field(4,'Text',8000)
ListView = Table('ListView')
ListView.add_field(1,'Property',11592)
ListView.add_field(2,'Order',9474)
ListView.add_field(3,'Value',3392)
ListView.add_field(4,'Text',8000)
ListView.add_field(5,'Binary_',7496)
LockPermissions = Table('LockPermissions')
LockPermissions.add_field(1,'LockObject',11592)
LockPermissions.add_field(2,'Table',11552)
LockPermissions.add_field(3,'Domain',15871)
LockPermissions.add_field(4,'User',11775)
LockPermissions.add_field(5,'Permission',4356)
Media = Table('Media')
Media.add_field(1,'DiskId',9474)
Media.add_field(2,'LastSequence',1282)
Media.add_field(3,'DiskPrompt',8000)
Media.add_field(4,'Cabinet',7679)
Media.add_field(5,'VolumeLabel',7456)
Media.add_field(6,'Source',7496)
MoveFile = Table('MoveFile')
MoveFile.add_field(1,'FileKey',11592)
MoveFile.add_field(2,'Component_',3400)
MoveFile.add_field(3,'SourceName',8191)
MoveFile.add_field(4,'DestName',8191)
MoveFile.add_field(5,'SourceFolder',7496)
MoveFile.add_field(6,'DestFolder',3400)
MoveFile.add_field(7,'Options',1282)
MsiAssembly = Table('MsiAssembly')
MsiAssembly.add_field(1,'Component_',11592)
MsiAssembly.add_field(2,'Feature_',3366)
MsiAssembly.add_field(3,'File_Manifest',7496)
MsiAssembly.add_field(4,'File_Application',7496)
MsiAssembly.add_field(5,'Attributes',5378)
MsiAssemblyName = Table('MsiAssemblyName')
MsiAssemblyName.add_field(1,'Component_',11592)
MsiAssemblyName.add_field(2,'Name',11775)
MsiAssemblyName.add_field(3,'Value',3583)
MsiDigitalCertificate = Table('MsiDigitalCertificate')
MsiDigitalCertificate.add_field(1,'DigitalCertificate',11592)
MsiDigitalCertificate.add_field(2,'CertData',2304)
MsiDigitalSignature = Table('MsiDigitalSignature')
MsiDigitalSignature.add_field(1,'Table',11552)
MsiDigitalSignature.add_field(2,'SignObject',11592)
MsiDigitalSignature.add_field(3,'DigitalCertificate_',3400)
MsiDigitalSignature.add_field(4,'Hash',6400)
MsiFileHash = Table('MsiFileHash')
MsiFileHash.add_field(1,'File_',11592)
MsiFileHash.add_field(2,'Options',1282)
MsiFileHash.add_field(3,'HashPart1',260)
MsiFileHash.add_field(4,'HashPart2',260)
MsiFileHash.add_field(5,'HashPart3',260)
MsiFileHash.add_field(6,'HashPart4',260)
MsiPatchHeaders = Table('MsiPatchHeaders')
MsiPatchHeaders.add_field(1,'StreamRef',11558)
MsiPatchHeaders.add_field(2,'Header',2304)
ODBCAttribute = Table('ODBCAttribute')
ODBCAttribute.add_field(1,'Driver_',11592)
ODBCAttribute.add_field(2,'Attribute',11560)
ODBCAttribute.add_field(3,'Value',8191)
ODBCDriver = Table('ODBCDriver')
ODBCDriver.add_field(1,'Driver',11592)
ODBCDriver.add_field(2,'Component_',3400)
ODBCDriver.add_field(3,'Description',3583)
ODBCDriver.add_field(4,'File_',3400)
ODBCDriver.add_field(5,'File_Setup',7496)
ODBCDataSource = Table('ODBCDataSource')
ODBCDataSource.add_field(1,'DataSource',11592)
ODBCDataSource.add_field(2,'Component_',3400)
ODBCDataSource.add_field(3,'Description',3583)
ODBCDataSource.add_field(4,'DriverDescription',3583)
ODBCDataSource.add_field(5,'Registration',1282)
ODBCSourceAttribute = Table('ODBCSourceAttribute')
ODBCSourceAttribute.add_field(1,'DataSource_',11592)
ODBCSourceAttribute.add_field(2,'Attribute',11552)
ODBCSourceAttribute.add_field(3,'Value',8191)
ODBCTranslator = Table('ODBCTranslator')
ODBCTranslator.add_field(1,'Translator',11592)
ODBCTranslator.add_field(2,'Component_',3400)
ODBCTranslator.add_field(3,'Description',3583)
ODBCTranslator.add_field(4,'File_',3400)
ODBCTranslator.add_field(5,'File_Setup',7496)
Patch = Table('Patch')
Patch.add_field(1,'File_',11592)
Patch.add_field(2,'Sequence',9474)
Patch.add_field(3,'PatchSize',260)
Patch.add_field(4,'Attributes',1282)
Patch.add_field(5,'Header',6400)
Patch.add_field(6,'StreamRef_',7462)
PatchPackage = Table('PatchPackage')
PatchPackage.add_field(1,'PatchId',11558)
PatchPackage.add_field(2,'Media_',1282)
PublishComponent = Table('PublishComponent')
PublishComponent.add_field(1,'ComponentId',11558)
PublishComponent.add_field(2,'Qualifier',11775)
PublishComponent.add_field(3,'Component_',11592)
PublishComponent.add_field(4,'AppData',8191)
PublishComponent.add_field(5,'Feature_',3366)
RadioButton = Table('RadioButton')
RadioButton.add_field(1,'Property',11592)
RadioButton.add_field(2,'Order',9474)
RadioButton.add_field(3,'Value',3392)
RadioButton.add_field(4,'X',1282)
RadioButton.add_field(5,'Y',1282)
RadioButton.add_field(6,'Width',1282)
RadioButton.add_field(7,'Height',1282)
RadioButton.add_field(8,'Text',8000)
RadioButton.add_field(9,'Help',7986)
Registry = Table('Registry')
Registry.add_field(1,'Registry',11592)
Registry.add_field(2,'Root',1282)
Registry.add_field(3,'Key',4095)
Registry.add_field(4,'Name',8191)
Registry.add_field(5,'Value',7936)
Registry.add_field(6,'Component_',3400)
RegLocator = Table('RegLocator')
RegLocator.add_field(1,'Signature_',11592)
RegLocator.add_field(2,'Root',1282)
RegLocator.add_field(3,'Key',3583)
RegLocator.add_field(4,'Name',7679)
RegLocator.add_field(5,'Type',5378)
RemoveFile = Table('RemoveFile')
RemoveFile.add_field(1,'FileKey',11592)
RemoveFile.add_field(2,'Component_',3400)
RemoveFile.add_field(3,'FileName',8191)
RemoveFile.add_field(4,'DirProperty',3400)
RemoveFile.add_field(5,'InstallMode',1282)
RemoveIniFile = Table('RemoveIniFile')
RemoveIniFile.add_field(1,'RemoveIniFile',11592)
RemoveIniFile.add_field(2,'FileName',4095)
RemoveIniFile.add_field(3,'DirProperty',7496)
RemoveIniFile.add_field(4,'Section',3936)
RemoveIniFile.add_field(5,'Key',3968)
RemoveIniFile.add_field(6,'Value',8191)
RemoveIniFile.add_field(7,'Action',1282)
RemoveIniFile.add_field(8,'Component_',3400)
RemoveRegistry = Table('RemoveRegistry')
RemoveRegistry.add_field(1,'RemoveRegistry',11592)
RemoveRegistry.add_field(2,'Root',1282)
RemoveRegistry.add_field(3,'Key',4095)
RemoveRegistry.add_field(4,'Name',8191)
RemoveRegistry.add_field(5,'Component_',3400)
ReserveCost = Table('ReserveCost')
ReserveCost.add_field(1,'ReserveKey',11592)
ReserveCost.add_field(2,'Component_',3400)
ReserveCost.add_field(3,'ReserveFolder',7496)
ReserveCost.add_field(4,'ReserveLocal',260)
ReserveCost.add_field(5,'ReserveSource',260)
SelfReg = Table('SelfReg')
SelfReg.add_field(1,'File_',11592)
SelfReg.add_field(2,'Cost',5378)
ServiceControl = Table('ServiceControl')
ServiceControl.add_field(1,'ServiceControl',11592)
ServiceControl.add_field(2,'Name',4095)
ServiceControl.add_field(3,'Event',1282)
ServiceControl.add_field(4,'Arguments',8191)
ServiceControl.add_field(5,'Wait',5378)
ServiceControl.add_field(6,'Component_',3400)
ServiceInstall = Table('ServiceInstall')
ServiceInstall.add_field(1,'ServiceInstall',11592)
ServiceInstall.add_field(2,'Name',3583)
ServiceInstall.add_field(3,'DisplayName',8191)
ServiceInstall.add_field(4,'ServiceType',260)
ServiceInstall.add_field(5,'StartType',260)
ServiceInstall.add_field(6,'ErrorControl',260)
ServiceInstall.add_field(7,'LoadOrderGroup',7679)
ServiceInstall.add_field(8,'Dependencies',7679)
ServiceInstall.add_field(9,'StartName',7679)
ServiceInstall.add_field(10,'Password',7679)
ServiceInstall.add_field(11,'Arguments',7679)
ServiceInstall.add_field(12,'Component_',3400)
ServiceInstall.add_field(13,'Description',8191)
Shortcut = Table('Shortcut')
Shortcut.add_field(1,'Shortcut',11592)
Shortcut.add_field(2,'Directory_',3400)
Shortcut.add_field(3,'Name',3968)
Shortcut.add_field(4,'Component_',3400)
Shortcut.add_field(5,'Target',3400)
Shortcut.add_field(6,'Arguments',7679)
Shortcut.add_field(7,'Description',8191)
Shortcut.add_field(8,'Hotkey',5378)
Shortcut.add_field(9,'Icon_',7496)
Shortcut.add_field(10,'IconIndex',5378)
Shortcut.add_field(11,'ShowCmd',5378)
Shortcut.add_field(12,'WkDir',7496)
Signature = Table('Signature')
Signature.add_field(1,'Signature',11592)
Signature.add_field(2,'FileName',3583)
Signature.add_field(3,'MinVersion',7444)
Signature.add_field(4,'MaxVersion',7444)
Signature.add_field(5,'MinSize',4356)
Signature.add_field(6,'MaxSize',4356)
Signature.add_field(7,'MinDate',4356)
Signature.add_field(8,'MaxDate',4356)
Signature.add_field(9,'Languages',7679)
TextStyle = Table('TextStyle')
TextStyle.add_field(1,'TextStyle',11592)
TextStyle.add_field(2,'FaceName',3360)
TextStyle.add_field(3,'Size',1282)
TextStyle.add_field(4,'Color',4356)
TextStyle.add_field(5,'StyleBits',5378)
TypeLib = Table('TypeLib')
TypeLib.add_field(1,'LibID',11558)
TypeLib.add_field(2,'Language',9474)
TypeLib.add_field(3,'Component_',11592)
TypeLib.add_field(4,'Version',4356)
TypeLib.add_field(5,'Description',8064)
TypeLib.add_field(6,'Directory_',7496)
TypeLib.add_field(7,'Feature_',3366)
TypeLib.add_field(8,'Cost',4356)
UIText = Table('UIText')
UIText.add_field(1,'Key',11592)
UIText.add_field(2,'Text',8191)
Upgrade = Table('Upgrade')
Upgrade.add_field(1,'UpgradeCode',11558)
Upgrade.add_field(2,'VersionMin',15636)
Upgrade.add_field(3,'VersionMax',15636)
Upgrade.add_field(4,'Language',15871)
Upgrade.add_field(5,'Attributes',8452)
Upgrade.add_field(6,'Remove',7679)
Upgrade.add_field(7,'ActionProperty',3400)
Verb = Table('Verb')
Verb.add_field(1,'Extension_',11775)
Verb.add_field(2,'Verb',11552)
Verb.add_field(3,'Sequence',5378)
Verb.add_field(4,'Command',8191)
Verb.add_field(5,'Argument',8191)
tables=[_Validation, ActionText, AdminExecuteSequence, Condition, AdminUISequence, AdvtExecuteSequence, AdvtUISequence, AppId, AppSearch, Property, BBControl, Billboard, Feature, Binary, BindImage, File, CCPSearch, CheckBox, Class, Component, Icon, ProgId, ComboBox, CompLocator, Complus, Directory, Control, Dialog, ControlCondition, ControlEvent, CreateFolder, CustomAction, DrLocator, DuplicateFile, Environment, Error, EventMapping, Extension, MIME, FeatureComponents, FileSFPCatalog, SFPCatalog, Font, IniFile, IniLocator, InstallExecuteSequence, InstallUISequence, IsolatedComponent, LaunchCondition, ListBox, ListView, LockPermissions, Media, MoveFile, MsiAssembly, MsiAssemblyName, MsiDigitalCertificate, MsiDigitalSignature, MsiFileHash, MsiPatchHeaders, ODBCAttribute, ODBCDriver, ODBCDataSource, ODBCSourceAttribute, ODBCTranslator, Patch, PatchPackage, PublishComponent, RadioButton, Registry, RegLocator, RemoveFile, RemoveIniFile, RemoveRegistry, ReserveCost, SelfReg, ServiceControl, ServiceInstall, Shortcut, Signature, TextStyle, TypeLib, UIText, Upgrade, Verb]
_Validation_records = [
('_Validation','Table','N',None, None, None, None, 'Identifier',None, 'Name of table',),
('_Validation','Column','N',None, None, None, None, 'Identifier',None, 'Name of column',),
('_Validation','Description','Y',None, None, None, None, 'Text',None, 'Description of column',),
('_Validation','Set','Y',None, None, None, None, 'Text',None, 'Set of values that are permitted',),
('_Validation','Category','Y',None, None, None, None, None, 'Text;Formatted;Template;Condition;Guid;Path;Version;Language;Identifier;Binary;UpperCase;LowerCase;Filename;Paths;AnyPath;WildCardFilename;RegPath;KeyFormatted;CustomSource;Property;Cabinet;Shortcut;URL','String category',),
('_Validation','KeyColumn','Y',1,32,None, None, None, None, 'Column to which foreign key connects',),
('_Validation','KeyTable','Y',None, None, None, None, 'Identifier',None, 'For foreign key, Name of table to which data must link',),
('_Validation','MaxValue','Y',-2147483647,2147483647,None, None, None, None, 'Maximum value allowed',),
('_Validation','MinValue','Y',-2147483647,2147483647,None, None, None, None, 'Minimum value allowed',),
('_Validation','Nullable','N',None, None, None, None, None, 'Y;N;@','Whether the column is nullable',),
('ActionText','Description','Y',None, None, None, None, 'Text',None, 'Localized description displayed in progress dialog and log when action is executing.',),
('ActionText','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to be described.',),
('ActionText','Template','Y',None, None, None, None, 'Template',None, 'Optional localized format template used to format action data records for display during action execution.',),
('AdminExecuteSequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdminExecuteSequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdminExecuteSequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('Condition','Condition','Y',None, None, None, None, 'Condition',None, 'Expression evaluated to determine if Level in the Feature table is to change.',),
('Condition','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Reference to a Feature entry in Feature table.',),
('Condition','Level','N',0,32767,None, None, None, None, 'New selection Level to set in Feature table if Condition evaluates to TRUE.',),
('AdminUISequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdminUISequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdminUISequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('AdvtExecuteSequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdvtExecuteSequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdvtExecuteSequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('AdvtUISequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdvtUISequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdvtUISequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('AppId','AppId','N',None, None, None, None, 'Guid',None, None, ),
('AppId','ActivateAtStorage','Y',0,1,None, None, None, None, None, ),
('AppId','DllSurrogate','Y',None, None, None, None, 'Text',None, None, ),
('AppId','LocalService','Y',None, None, None, None, 'Text',None, None, ),
('AppId','RemoteServerName','Y',None, None, None, None, 'Formatted',None, None, ),
('AppId','RunAsInteractiveUser','Y',0,1,None, None, None, None, None, ),
('AppId','ServiceParameters','Y',None, None, None, None, 'Text',None, None, ),
('AppSearch','Property','N',None, None, None, None, 'Identifier',None, 'The property associated with a Signature',),
('AppSearch','Signature_','N',None, None, 'Signature;RegLocator;IniLocator;DrLocator;CompLocator',1,'Identifier',None, 'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.',),
('Property','Property','N',None, None, None, None, 'Identifier',None, 'Name of property, uppercase if settable by launcher or loader.',),
('Property','Value','N',None, None, None, None, 'Text',None, 'String value for property. Never null or empty.',),
('BBControl','Type','N',None, None, None, None, 'Identifier',None, 'The type of the control.',),
('BBControl','Y','N',0,32767,None, None, None, None, 'Vertical coordinate of the upper left corner of the bounding rectangle of the control.',),
('BBControl','Text','Y',None, None, None, None, 'Text',None, 'A string used to set the initial text contained within a control (if appropriate).',),
('BBControl','BBControl','N',None, None, None, None, 'Identifier',None, 'Name of the control. This name must be unique within a billboard, but can repeat on different billboard.',),
('BBControl','Attributes','Y',0,2147483647,None, None, None, None, 'A 32-bit word that specifies the attribute flags to be applied to this control.',),
('BBControl','Billboard_','N',None, None, 'Billboard',1,'Identifier',None, 'External key to the Billboard table, name of the billboard.',),
('BBControl','Height','N',0,32767,None, None, None, None, 'Height of the bounding rectangle of the control.',),
('BBControl','Width','N',0,32767,None, None, None, None, 'Width of the bounding rectangle of the control.',),
('BBControl','X','N',0,32767,None, None, None, None, 'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.',),
('Billboard','Action','Y',None, None, None, None, 'Identifier',None, 'The name of an action. The billboard is displayed during the progress messages received from this action.',),
('Billboard','Billboard','N',None, None, None, None, 'Identifier',None, 'Name of the billboard.',),
('Billboard','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'An external key to the Feature Table. The billboard is shown only if this feature is being installed.',),
('Billboard','Ordering','Y',0,32767,None, None, None, None, 'A positive integer. If there is more than one billboard corresponding to an action they will be shown in the order defined by this column.',),
('Feature','Description','Y',None, None, None, None, 'Text',None, 'Longer descriptive text describing a visible feature item.',),
('Feature','Attributes','N',None, None, None, None, None, '0;1;2;4;5;6;8;9;10;16;17;18;20;21;22;24;25;26;32;33;34;36;37;38;48;49;50;52;53;54','Feature attributes',),
('Feature','Feature','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular feature record.',),
('Feature','Directory_','Y',None, None, 'Directory',1,'UpperCase',None, 'The name of the Directory that can be configured by the UI. A non-null value will enable the browse button.',),
('Feature','Level','N',0,32767,None, None, None, None, 'The install level at which record will be initially selected. An install level of 0 will disable an item and prevent its display.',),
('Feature','Title','Y',None, None, None, None, 'Text',None, 'Short text identifying a visible feature item.',),
('Feature','Display','Y',0,32767,None, None, None, None, 'Numeric sort order, used to force a specific display ordering.',),
('Feature','Feature_Parent','Y',None, None, 'Feature',1,'Identifier',None, 'Optional key of a parent record in the same table. If the parent is not selected, then the record will not be installed. Null indicates a root item.',),
('Binary','Name','N',None, None, None, None, 'Identifier',None, 'Unique key identifying the binary data.',),
('Binary','Data','N',None, None, None, None, 'Binary',None, 'The unformatted binary data.',),
('BindImage','File_','N',None, None, 'File',1,'Identifier',None, 'The index into the File table. This must be an executable file.',),
('BindImage','Path','Y',None, None, None, None, 'Paths',None, 'A list of ; delimited paths that represent the paths to be searched for the import DLLS. The list is usually a list of properties each enclosed within square brackets [] .',),
('File','Sequence','N',1,32767,None, None, None, None, 'Sequence with respect to the media images; order must track cabinet order.',),
('File','Attributes','Y',0,32767,None, None, None, None, 'Integer containing bit flags representing file attributes (with the decimal value of each bit position in parentheses)',),
('File','File','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token, must match identifier in cabinet. For uncompressed files, this field is ignored.',),
('File','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the file.',),
('File','FileName','N',None, None, None, None, 'Filename',None, 'File name used for installation, may be localized. This may contain a "short name|long name" pair.',),
('File','FileSize','N',0,2147483647,None, None, None, None, 'Size of file in bytes (integer).',),
('File','Language','Y',None, None, None, None, 'Language',None, 'List of decimal language Ids, comma-separated if more than one.',),
('File','Version','Y',None, None, 'File',1,'Version',None, 'Version string for versioned files; Blank for unversioned files.',),
('CCPSearch','Signature_','N',None, None, 'Signature;RegLocator;IniLocator;DrLocator;CompLocator',1,'Identifier',None, 'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.',),
('CheckBox','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to the item.',),
('CheckBox','Value','Y',None, None, None, None, 'Formatted',None, 'The value string associated with the item.',),
('Class','Description','Y',None, None, None, None, 'Text',None, 'Localized description for the Class.',),
('Class','Attributes','Y',None, 32767,None, None, None, None, 'Class registration attributes.',),
('Class','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.',),
('Class','AppId_','Y',None, None, 'AppId',1,'Guid',None, 'Optional AppID containing DCOM information for associated application (string GUID).',),
('Class','Argument','Y',None, None, None, None, 'Formatted',None, 'optional argument for LocalServers.',),
('Class','CLSID','N',None, None, None, None, 'Guid',None, 'The CLSID of an OLE factory.',),
('Class','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
('Class','Context','N',None, None, None, None, 'Identifier',None, 'The numeric server context for this server. CLSCTX_xxxx',),
('Class','DefInprocHandler','Y',None, None, None, None, 'Filename','1;2;3','Optional default inproc handler. Only optionally provided if Context=CLSCTX_LOCAL_SERVER. Typically "ole32.dll" or "mapi32.dll"',),
('Class','FileTypeMask','Y',None, None, None, None, 'Text',None, 'Optional string containing information for the HKCRthis CLSID) key. If multiple patterns exist, they must be delimited by a semicolon, and numeric subkeys will be generated: 0,1,2...',),
('Class','Icon_','Y',None, None, 'Icon',1,'Identifier',None, 'Optional foreign key into the Icon Table, specifying the icon file associated with this CLSID. Will be written under the DefaultIcon key.',),
('Class','IconIndex','Y',-32767,32767,None, None, None, None, 'Optional icon index.',),
('Class','ProgId_Default','Y',None, None, 'ProgId',1,'Text',None, 'Optional ProgId associated with this CLSID.',),
('Component','Condition','Y',None, None, None, None, 'Condition',None, "A conditional statement that will disable this component if the specified condition evaluates to the 'True' state. If a component is disabled, it will not be installed, regardless of the 'Action' state associated with the component.",),
('Component','Attributes','N',None, None, None, None, None, None, 'Remote execution option, one of irsEnum',),
('Component','Component','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular component record.',),
('Component','ComponentId','Y',None, None, None, None, 'Guid',None, 'A string GUID unique to this component, version, and language.',),
('Component','Directory_','N',None, None, 'Directory',1,'Identifier',None, 'Required key of a Directory table record. This is actually a property name whose value contains the actual path, set either by the AppSearch action or with the default setting obtained from the Directory table.',),
('Component','KeyPath','Y',None, None, 'File;Registry;ODBCDataSource',1,'Identifier',None, 'Either the primary key into the File table, Registry table, or ODBCDataSource table. This extract path is stored when the component is installed, and is used to detect the presence of the component and to return the path to it.',),
('Icon','Name','N',None, None, None, None, 'Identifier',None, 'Primary key. Name of the icon file.',),
('Icon','Data','N',None, None, None, None, 'Binary',None, 'Binary stream. The binary icon data in PE (.DLL or .EXE) or icon (.ICO) format.',),
('ProgId','Description','Y',None, None, None, None, 'Text',None, 'Localized description for the Program identifier.',),
('ProgId','Icon_','Y',None, None, 'Icon',1,'Identifier',None, 'Optional foreign key into the Icon Table, specifying the icon file associated with this ProgId. Will be written under the DefaultIcon key.',),
('ProgId','IconIndex','Y',-32767,32767,None, None, None, None, 'Optional icon index.',),
('ProgId','ProgId','N',None, None, None, None, 'Text',None, 'The Program Identifier. Primary key.',),
('ProgId','Class_','Y',None, None, 'Class',1,'Guid',None, 'The CLSID of an OLE factory corresponding to the ProgId.',),
('ProgId','ProgId_Parent','Y',None, None, 'ProgId',1,'Text',None, 'The Parent Program Identifier. If specified, the ProgId column becomes a version independent prog id.',),
('ComboBox','Text','Y',None, None, None, None, 'Formatted',None, 'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
('ComboBox','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this item. All the items tied to the same property become part of the same combobox.',),
('ComboBox','Value','N',None, None, None, None, 'Formatted',None, 'The value string associated with this item. Selecting the line will set the associated property to this value.',),
('ComboBox','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list.\tThe integers do not have to be consecutive.',),
('CompLocator','Type','Y',0,1,None, None, None, None, 'A boolean value that determines if the registry value is a filename or a directory location.',),
('CompLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
('CompLocator','ComponentId','N',None, None, None, None, 'Guid',None, 'A string GUID unique to this component, version, and language.',),
('Complus','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the ComPlus component.',),
('Complus','ExpType','Y',0,32767,None, None, None, None, 'ComPlus component attributes.',),
('Directory','Directory','N',None, None, None, None, 'Identifier',None, 'Unique identifier for directory entry, primary key. If a property by this name is defined, it contains the full path to the directory.',),
('Directory','DefaultDir','N',None, None, None, None, 'DefaultDir',None, "The default sub-path under parent's path.",),
('Directory','Directory_Parent','Y',None, None, 'Directory',1,'Identifier',None, 'Reference to the entry in this table specifying the default parent directory. A record parented to itself or with a Null parent represents a root of the install tree.',),
('Control','Type','N',None, None, None, None, 'Identifier',None, 'The type of the control.',),
('Control','Y','N',0,32767,None, None, None, None, 'Vertical coordinate of the upper left corner of the bounding rectangle of the control.',),
('Control','Text','Y',None, None, None, None, 'Formatted',None, 'A string used to set the initial text contained within a control (if appropriate).',),
('Control','Property','Y',None, None, None, None, 'Identifier',None, 'The name of a defined property to be linked to this control. ',),
('Control','Attributes','Y',0,2147483647,None, None, None, None, 'A 32-bit word that specifies the attribute flags to be applied to this control.',),
('Control','Height','N',0,32767,None, None, None, None, 'Height of the bounding rectangle of the control.',),
('Control','Width','N',0,32767,None, None, None, None, 'Width of the bounding rectangle of the control.',),
('Control','X','N',0,32767,None, None, None, None, 'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.',),
('Control','Control','N',None, None, None, None, 'Identifier',None, 'Name of the control. This name must be unique within a dialog, but can repeat on different dialogs. ',),
('Control','Control_Next','Y',None, None, 'Control',2,'Identifier',None, 'The name of an other control on the same dialog. This link defines the tab order of the controls. The links have to form one or more cycles!',),
('Control','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'External key to the Dialog table, name of the dialog.',),
('Control','Help','Y',None, None, None, None, 'Text',None, 'The help strings used with the button. The text is optional. ',),
('Dialog','Attributes','Y',0,2147483647,None, None, None, None, 'A 32-bit word that specifies the attribute flags to be applied to this dialog.',),
('Dialog','Height','N',0,32767,None, None, None, None, 'Height of the bounding rectangle of the dialog.',),
('Dialog','Width','N',0,32767,None, None, None, None, 'Width of the bounding rectangle of the dialog.',),
('Dialog','Dialog','N',None, None, None, None, 'Identifier',None, 'Name of the dialog.',),
('Dialog','Control_Cancel','Y',None, None, 'Control',2,'Identifier',None, 'Defines the cancel control. Hitting escape or clicking on the close icon on the dialog is equivalent to pushing this button.',),
('Dialog','Control_Default','Y',None, None, 'Control',2,'Identifier',None, 'Defines the default control. Hitting return is equivalent to pushing this button.',),
('Dialog','Control_First','N',None, None, 'Control',2,'Identifier',None, 'Defines the control that has the focus when the dialog is created.',),
('Dialog','HCentering','N',0,100,None, None, None, None, 'Horizontal position of the dialog on a 0-100 scale. 0 means left end, 100 means right end of the screen, 50 center.',),
('Dialog','Title','Y',None, None, None, None, 'Formatted',None, "A text string specifying the title to be displayed in the title bar of the dialog's window.",),
('Dialog','VCentering','N',0,100,None, None, None, None, 'Vertical position of the dialog on a 0-100 scale. 0 means top end, 100 means bottom end of the screen, 50 center.',),
('ControlCondition','Action','N',None, None, None, None, None, 'Default;Disable;Enable;Hide;Show','The desired action to be taken on the specified control.',),
('ControlCondition','Condition','N',None, None, None, None, 'Condition',None, 'A standard conditional statement that specifies under which conditions the action should be triggered.',),
('ControlCondition','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'A foreign key to the Dialog table, name of the dialog.',),
('ControlCondition','Control_','N',None, None, 'Control',2,'Identifier',None, 'A foreign key to the Control table, name of the control.',),
('ControlEvent','Condition','Y',None, None, None, None, 'Condition',None, 'A standard conditional statement that specifies under which conditions an event should be triggered.',),
('ControlEvent','Ordering','Y',0,2147483647,None, None, None, None, 'An integer used to order several events tied to the same control. Can be left blank.',),
('ControlEvent','Argument','N',None, None, None, None, 'Formatted',None, 'A value to be used as a modifier when triggering a particular event.',),
('ControlEvent','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'A foreign key to the Dialog table, name of the dialog.',),
('ControlEvent','Control_','N',None, None, 'Control',2,'Identifier',None, 'A foreign key to the Control table, name of the control',),
('ControlEvent','Event','N',None, None, None, None, 'Formatted',None, 'An identifier that specifies the type of the event that should take place when the user interacts with control specified by the first two entries.',),
('CreateFolder','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table.',),
('CreateFolder','Directory_','N',None, None, 'Directory',1,'Identifier',None, 'Primary key, could be foreign key into the Directory table.',),
('CustomAction','Type','N',1,16383,None, None, None, None, 'The numeric custom action type, consisting of source location, code type, entry, option flags.',),
('CustomAction','Action','N',None, None, None, None, 'Identifier',None, 'Primary key, name of action, normally appears in sequence table unless private use.',),
('CustomAction','Source','Y',None, None, None, None, 'CustomSource',None, 'The table reference of the source of the code.',),
('CustomAction','Target','Y',None, None, None, None, 'Formatted',None, 'Excecution parameter, depends on the type of custom action',),
('DrLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
('DrLocator','Path','Y',None, None, None, None, 'AnyPath',None, 'The path on the user system. This is a either a subpath below the value of the Parent or a full path. The path may contain properties enclosed within [ ] that will be expanded.',),
('DrLocator','Depth','Y',0,32767,None, None, None, None, 'The depth below the path to which the Signature_ is recursively searched. If absent, the depth is assumed to be 0.',),
('DrLocator','Parent','Y',None, None, None, None, 'Identifier',None, 'The parent file signature. It is also a foreign key in the Signature table. If null and the Path column does not expand to a full path, then all the fixed drives of the user system are searched using the Path.',),
('DuplicateFile','File_','N',None, None, 'File',1,'Identifier',None, 'Foreign key referencing the source file to be duplicated.',),
('DuplicateFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the duplicate file.',),
('DuplicateFile','DestFolder','Y',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full pathname to a destination folder.',),
('DuplicateFile','DestName','Y',None, None, None, None, 'Filename',None, 'Filename to be given to the duplicate file.',),
('DuplicateFile','FileKey','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular file entry',),
('Environment','Name','N',None, None, None, None, 'Text',None, 'The name of the environmental value.',),
('Environment','Value','Y',None, None, None, None, 'Formatted',None, 'The value to set in the environmental settings.',),
('Environment','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the installing of the environmental value.',),
('Environment','Environment','N',None, None, None, None, 'Identifier',None, 'Unique identifier for the environmental variable setting',),
('Error','Error','N',0,32767,None, None, None, None, 'Integer error number, obtained from header file IError(...) macros.',),
('Error','Message','Y',None, None, None, None, 'Template',None, 'Error formatting template, obtained from user ed. or localizers.',),
('EventMapping','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'A foreign key to the Dialog table, name of the Dialog.',),
('EventMapping','Control_','N',None, None, 'Control',2,'Identifier',None, 'A foreign key to the Control table, name of the control.',),
('EventMapping','Event','N',None, None, None, None, 'Identifier',None, 'An identifier that specifies the type of the event that the control subscribes to.',),
('EventMapping','Attribute','N',None, None, None, None, 'Identifier',None, 'The name of the control attribute, that is set when this event is received.',),
('Extension','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.',),
('Extension','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
('Extension','Extension','N',None, None, None, None, 'Text',None, 'The extension associated with the table row.',),
('Extension','MIME_','Y',None, None, 'MIME',1,'Text',None, 'Optional Context identifier, typically "type/format" associated with the extension',),
('Extension','ProgId_','Y',None, None, 'ProgId',1,'Text',None, 'Optional ProgId associated with this extension.',),
('MIME','CLSID','Y',None, None, None, None, 'Guid',None, 'Optional associated CLSID.',),
('MIME','ContentType','N',None, None, None, None, 'Text',None, 'Primary key. Context identifier, typically "type/format".',),
('MIME','Extension_','N',None, None, 'Extension',1,'Text',None, 'Optional associated extension (without dot)',),
('FeatureComponents','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Foreign key into Feature table.',),
('FeatureComponents','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into Component table.',),
('FileSFPCatalog','File_','N',None, None, 'File',1,'Identifier',None, 'File associated with the catalog',),
('FileSFPCatalog','SFPCatalog_','N',None, None, 'SFPCatalog',1,'Filename',None, 'Catalog associated with the file',),
('SFPCatalog','SFPCatalog','N',None, None, None, None, 'Filename',None, 'File name for the catalog.',),
('SFPCatalog','Catalog','N',None, None, None, None, 'Binary',None, 'SFP Catalog',),
('SFPCatalog','Dependency','Y',None, None, None, None, 'Formatted',None, 'Parent catalog - only used by SFP',),
('Font','File_','N',None, None, 'File',1,'Identifier',None, 'Primary key, foreign key into File table referencing font file.',),
('Font','FontTitle','Y',None, None, None, None, 'Text',None, 'Font name.',),
('IniFile','Action','N',None, None, None, None, None, '0;1;3','The type of modification to be made, one of iifEnum',),
('IniFile','Value','N',None, None, None, None, 'Formatted',None, 'The value to be written.',),
('IniFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the installing of the .INI value.',),
('IniFile','FileName','N',None, None, None, None, 'Filename',None, 'The .INI file name in which to write the information',),
('IniFile','IniFile','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('IniFile','DirProperty','Y',None, None, None, None, 'Identifier',None, 'Foreign key into the Directory table denoting the directory where the .INI file is.',),
('IniFile','Key','N',None, None, None, None, 'Formatted',None, 'The .INI file key below Section.',),
('IniFile','Section','N',None, None, None, None, 'Formatted',None, 'The .INI file Section.',),
('IniLocator','Type','Y',0,2,None, None, None, None, 'An integer value that determines if the .INI value read is a filename or a directory location or to be used as is w/o interpretation.',),
('IniLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
('IniLocator','FileName','N',None, None, None, None, 'Filename',None, 'The .INI file name.',),
('IniLocator','Key','N',None, None, None, None, 'Text',None, 'Key value (followed by an equals sign in INI file).',),
('IniLocator','Section','N',None, None, None, None, 'Text',None, 'Section name within in file (within square brackets in INI file).',),
('IniLocator','Field','Y',0,32767,None, None, None, None, 'The field in the .INI line. If Field is null or 0 the entire line is read.',),
('InstallExecuteSequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('InstallExecuteSequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('InstallExecuteSequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('InstallUISequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('InstallUISequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('InstallUISequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('IsolatedComponent','Component_Application','N',None, None, 'Component',1,'Identifier',None, 'Key to Component table item for application',),
('IsolatedComponent','Component_Shared','N',None, None, 'Component',1,'Identifier',None, 'Key to Component table item to be isolated',),
('LaunchCondition','Description','N',None, None, None, None, 'Formatted',None, 'Localizable text to display when condition fails and install must abort.',),
('LaunchCondition','Condition','N',None, None, None, None, 'Condition',None, 'Expression which must evaluate to TRUE in order for install to commence.',),
('ListBox','Text','Y',None, None, None, None, 'Text',None, 'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
('ListBox','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this item. All the items tied to the same property become part of the same listbox.',),
('ListBox','Value','N',None, None, None, None, 'Formatted',None, 'The value string associated with this item. Selecting the line will set the associated property to this value.',),
('ListBox','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
('ListView','Text','Y',None, None, None, None, 'Text',None, 'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
('ListView','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this item. All the items tied to the same property become part of the same listview.',),
('ListView','Value','N',None, None, None, None, 'Identifier',None, 'The value string associated with this item. Selecting the line will set the associated property to this value.',),
('ListView','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
('ListView','Binary_','Y',None, None, 'Binary',1,'Identifier',None, 'The name of the icon to be displayed with the icon. The binary information is looked up from the Binary Table.',),
('LockPermissions','Table','N',None, None, None, None, 'Identifier','Directory;File;Registry','Reference to another table name',),
('LockPermissions','Domain','Y',None, None, None, None, 'Formatted',None, 'Domain name for user whose permissions are being set. (usually a property)',),
('LockPermissions','LockObject','N',None, None, None, None, 'Identifier',None, 'Foreign key into Registry or File table',),
('LockPermissions','Permission','Y',-2147483647,2147483647,None, None, None, None, 'Permission Access mask. Full Control = 268435456 (GENERIC_ALL = 0x10000000)',),
('LockPermissions','User','N',None, None, None, None, 'Formatted',None, 'User for permissions to be set. (usually a property)',),
('Media','Source','Y',None, None, None, None, 'Property',None, 'The property defining the location of the cabinet file.',),
('Media','Cabinet','Y',None, None, None, None, 'Cabinet',None, 'If some or all of the files stored on the media are compressed in a cabinet, the name of that cabinet.',),
('Media','DiskId','N',1,32767,None, None, None, None, 'Primary key, integer to determine sort order for table.',),
('Media','DiskPrompt','Y',None, None, None, None, 'Text',None, 'Disk name: the visible text actually printed on the disk. This will be used to prompt the user when this disk needs to be inserted.',),
('Media','LastSequence','N',0,32767,None, None, None, None, 'File sequence number for the last file for this media.',),
('Media','VolumeLabel','Y',None, None, None, None, 'Text',None, 'The label attributed to the volume.',),
('ModuleComponents','Component','N',None, None, 'Component',1,'Identifier',None, 'Component contained in the module.',),
('ModuleComponents','Language','N',None, None, 'ModuleSignature',2,None, None, 'Default language ID for module (may be changed by transform).',),
('ModuleComponents','ModuleID','N',None, None, 'ModuleSignature',1,'Identifier',None, 'Module containing the component.',),
('ModuleSignature','Language','N',None, None, None, None, None, None, 'Default decimal language of module.',),
('ModuleSignature','Version','N',None, None, None, None, 'Version',None, 'Version of the module.',),
('ModuleSignature','ModuleID','N',None, None, None, None, 'Identifier',None, 'Module identifier (String.GUID).',),
('ModuleDependency','ModuleID','N',None, None, 'ModuleSignature',1,'Identifier',None, 'Module requiring the dependency.',),
('ModuleDependency','ModuleLanguage','N',None, None, 'ModuleSignature',2,None, None, 'Language of module requiring the dependency.',),
('ModuleDependency','RequiredID','N',None, None, None, None, None, None, 'String.GUID of required module.',),
('ModuleDependency','RequiredLanguage','N',None, None, None, None, None, None, 'LanguageID of the required module.',),
('ModuleDependency','RequiredVersion','Y',None, None, None, None, 'Version',None, 'Version of the required version.',),
('ModuleExclusion','ModuleID','N',None, None, 'ModuleSignature',1,'Identifier',None, 'String.GUID of module with exclusion requirement.',),
('ModuleExclusion','ModuleLanguage','N',None, None, 'ModuleSignature',2,None, None, 'LanguageID of module with exclusion requirement.',),
('ModuleExclusion','ExcludedID','N',None, None, None, None, None, None, 'String.GUID of excluded module.',),
('ModuleExclusion','ExcludedLanguage','N',None, None, None, None, None, None, 'Language of excluded module.',),
('ModuleExclusion','ExcludedMaxVersion','Y',None, None, None, None, 'Version',None, 'Maximum version of excluded module.',),
('ModuleExclusion','ExcludedMinVersion','Y',None, None, None, None, 'Version',None, 'Minimum version of excluded module.',),
('MoveFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'If this component is not "selected" for installation or removal, no action will be taken on the associated MoveFile entry',),
('MoveFile','DestFolder','N',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full path to the destination directory',),
('MoveFile','DestName','Y',None, None, None, None, 'Filename',None, 'Name to be given to the original file after it is moved or copied. If blank, the destination file will be given the same name as the source file',),
('MoveFile','FileKey','N',None, None, None, None, 'Identifier',None, 'Primary key that uniquely identifies a particular MoveFile record',),
('MoveFile','Options','N',0,1,None, None, None, None, 'Integer value specifying the MoveFile operating mode, one of imfoEnum',),
('MoveFile','SourceFolder','Y',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full path to the source directory',),
('MoveFile','SourceName','Y',None, None, None, None, 'Text',None, "Name of the source file(s) to be moved or copied. Can contain the '*' or '?' wildcards.",),
('MsiAssembly','Attributes','Y',None, None, None, None, None, None, 'Assembly attributes',),
('MsiAssembly','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Foreign key into Feature table.',),
('MsiAssembly','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into Component table.',),
('MsiAssembly','File_Application','Y',None, None, 'File',1,'Identifier',None, 'Foreign key into File table, denoting the application context for private assemblies. Null for global assemblies.',),
('MsiAssembly','File_Manifest','Y',None, None, 'File',1,'Identifier',None, 'Foreign key into the File table denoting the manifest file for the assembly.',),
('MsiAssemblyName','Name','N',None, None, None, None, 'Text',None, 'The name part of the name-value pairs for the assembly name.',),
('MsiAssemblyName','Value','N',None, None, None, None, 'Text',None, 'The value part of the name-value pairs for the assembly name.',),
('MsiAssemblyName','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into Component table.',),
('MsiDigitalCertificate','CertData','N',None, None, None, None, 'Binary',None, 'A certificate context blob for a signer certificate',),
('MsiDigitalCertificate','DigitalCertificate','N',None, None, None, None, 'Identifier',None, 'A unique identifier for the row',),
('MsiDigitalSignature','Table','N',None, None, None, None, None, 'Media','Reference to another table name (only Media table is supported)',),
('MsiDigitalSignature','DigitalCertificate_','N',None, None, 'MsiDigitalCertificate',1,'Identifier',None, 'Foreign key to MsiDigitalCertificate table identifying the signer certificate',),
('MsiDigitalSignature','Hash','Y',None, None, None, None, 'Binary',None, 'The encoded hash blob from the digital signature',),
('MsiDigitalSignature','SignObject','N',None, None, None, None, 'Text',None, 'Foreign key to Media table',),
('MsiFileHash','File_','N',None, None, 'File',1,'Identifier',None, 'Primary key, foreign key into File table referencing file with this hash',),
('MsiFileHash','Options','N',0,32767,None, None, None, None, 'Various options and attributes for this hash.',),
('MsiFileHash','HashPart1','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiFileHash','HashPart2','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiFileHash','HashPart3','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiFileHash','HashPart4','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiPatchHeaders','StreamRef','N',None, None, None, None, 'Identifier',None, 'Primary key. A unique identifier for the row.',),
('MsiPatchHeaders','Header','N',None, None, None, None, 'Binary',None, 'Binary stream. The patch header, used for patch validation.',),
('ODBCAttribute','Value','Y',None, None, None, None, 'Text',None, 'Value for ODBC driver attribute',),
('ODBCAttribute','Attribute','N',None, None, None, None, 'Text',None, 'Name of ODBC driver attribute',),
('ODBCAttribute','Driver_','N',None, None, 'ODBCDriver',1,'Identifier',None, 'Reference to ODBC driver in ODBCDriver table',),
('ODBCDriver','Description','N',None, None, None, None, 'Text',None, 'Text used as registered name for driver, non-localized',),
('ODBCDriver','File_','N',None, None, 'File',1,'Identifier',None, 'Reference to key driver file',),
('ODBCDriver','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reference to associated component',),
('ODBCDriver','Driver','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized.internal token for driver',),
('ODBCDriver','File_Setup','Y',None, None, 'File',1,'Identifier',None, 'Optional reference to key driver setup DLL',),
('ODBCDataSource','Description','N',None, None, None, None, 'Text',None, 'Text used as registered name for data source',),
('ODBCDataSource','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reference to associated component',),
('ODBCDataSource','DataSource','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized.internal token for data source',),
('ODBCDataSource','DriverDescription','N',None, None, None, None, 'Text',None, 'Reference to driver description, may be existing driver',),
('ODBCDataSource','Registration','N',0,1,None, None, None, None, 'Registration option: 0=machine, 1=user, others t.b.d.',),
('ODBCSourceAttribute','Value','Y',None, None, None, None, 'Text',None, 'Value for ODBC data source attribute',),
('ODBCSourceAttribute','Attribute','N',None, None, None, None, 'Text',None, 'Name of ODBC data source attribute',),
('ODBCSourceAttribute','DataSource_','N',None, None, 'ODBCDataSource',1,'Identifier',None, 'Reference to ODBC data source in ODBCDataSource table',),
('ODBCTranslator','Description','N',None, None, None, None, 'Text',None, 'Text used as registered name for translator',),
('ODBCTranslator','File_','N',None, None, 'File',1,'Identifier',None, 'Reference to key translator file',),
('ODBCTranslator','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reference to associated component',),
('ODBCTranslator','File_Setup','Y',None, None, 'File',1,'Identifier',None, 'Optional reference to key translator setup DLL',),
('ODBCTranslator','Translator','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized.internal token for translator',),
('Patch','Sequence','N',0,32767,None, None, None, None, 'Primary key, sequence with respect to the media images; order must track cabinet order.',),
('Patch','Attributes','N',0,32767,None, None, None, None, 'Integer containing bit flags representing patch attributes',),
('Patch','File_','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token, foreign key to File table, must match identifier in cabinet.',),
('Patch','Header','Y',None, None, None, None, 'Binary',None, 'Binary stream. The patch header, used for patch validation.',),
('Patch','PatchSize','N',0,2147483647,None, None, None, None, 'Size of patch in bytes (integer).',),
('Patch','StreamRef_','Y',None, None, None, None, 'Identifier',None, 'Identifier. Foreign key to the StreamRef column of the MsiPatchHeaders table.',),
('PatchPackage','Media_','N',0,32767,None, None, None, None, 'Foreign key to DiskId column of Media table. Indicates the disk containing the patch package.',),
('PatchPackage','PatchId','N',None, None, None, None, 'Guid',None, 'A unique string GUID representing this patch.',),
('PublishComponent','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Foreign key into the Feature table.',),
('PublishComponent','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table.',),
('PublishComponent','ComponentId','N',None, None, None, None, 'Guid',None, 'A string GUID that represents the component id that will be requested by the alien product.',),
('PublishComponent','AppData','Y',None, None, None, None, 'Text',None, 'This is localisable Application specific data that can be associated with a Qualified Component.',),
('PublishComponent','Qualifier','N',None, None, None, None, 'Text',None, 'This is defined only when the ComponentId column is an Qualified Component Id. This is the Qualifier for ProvideComponentIndirect.',),
('RadioButton','Y','N',0,32767,None, None, None, None, 'The vertical coordinate of the upper left corner of the bounding rectangle of the radio button.',),
('RadioButton','Text','Y',None, None, None, None, 'Text',None, 'The visible title to be assigned to the radio button.',),
('RadioButton','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this radio button. All the buttons tied to the same property become part of the same group.',),
('RadioButton','Height','N',0,32767,None, None, None, None, 'The height of the button.',),
('RadioButton','Width','N',0,32767,None, None, None, None, 'The width of the button.',),
('RadioButton','X','N',0,32767,None, None, None, None, 'The horizontal coordinate of the upper left corner of the bounding rectangle of the radio button.',),
('RadioButton','Value','N',None, None, None, None, 'Formatted',None, 'The value string associated with this button. Selecting the button will set the associated property to this value.',),
('RadioButton','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
('RadioButton','Help','Y',None, None, None, None, 'Text',None, 'The help strings used with the button. The text is optional.',),
('Registry','Name','Y',None, None, None, None, 'Formatted',None, 'The registry value name.',),
('Registry','Value','Y',None, None, None, None, 'Formatted',None, 'The registry value.',),
('Registry','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the installing of the registry value.',),
('Registry','Key','N',None, None, None, None, 'RegPath',None, 'The key for the registry value.',),
('Registry','Registry','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('Registry','Root','N',-1,3,None, None, None, None, 'The predefined root key for the registry value, one of rrkEnum.',),
('RegLocator','Name','Y',None, None, None, None, 'Formatted',None, 'The registry value name.',),
('RegLocator','Type','Y',0,18,None, None, None, None, 'An integer value that determines if the registry value is a filename or a directory location or to be used as is w/o interpretation.',),
('RegLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table. If the type is 0, the registry values refers a directory, and _Signature is not a foreign key.',),
('RegLocator','Key','N',None, None, None, None, 'RegPath',None, 'The key for the registry value.',),
('RegLocator','Root','N',0,3,None, None, None, None, 'The predefined root key for the registry value, one of rrkEnum.',),
('RemoveFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the file to be removed.',),
('RemoveFile','FileKey','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular file entry',),
('RemoveFile','FileName','Y',None, None, None, None, 'WildCardFilename',None, 'Name of the file to be removed.',),
('RemoveFile','DirProperty','N',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full pathname to the folder of the file to be removed.',),
('RemoveFile','InstallMode','N',None, None, None, None, None, '1;2;3','Installation option, one of iimEnum.',),
('RemoveIniFile','Action','N',None, None, None, None, None, '2;4','The type of modification to be made, one of iifEnum.',),
('RemoveIniFile','Value','Y',None, None, None, None, 'Formatted',None, 'The value to be deleted. The value is required when Action is iifIniRemoveTag',),
('RemoveIniFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the deletion of the .INI value.',),
('RemoveIniFile','FileName','N',None, None, None, None, 'Filename',None, 'The .INI file name in which to delete the information',),
('RemoveIniFile','DirProperty','Y',None, None, None, None, 'Identifier',None, 'Foreign key into the Directory table denoting the directory where the .INI file is.',),
('RemoveIniFile','Key','N',None, None, None, None, 'Formatted',None, 'The .INI file key below Section.',),
('RemoveIniFile','Section','N',None, None, None, None, 'Formatted',None, 'The .INI file Section.',),
('RemoveIniFile','RemoveIniFile','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('RemoveRegistry','Name','Y',None, None, None, None, 'Formatted',None, 'The registry value name.',),
('RemoveRegistry','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the deletion of the registry value.',),
('RemoveRegistry','Key','N',None, None, None, None, 'RegPath',None, 'The key for the registry value.',),
('RemoveRegistry','Root','N',-1,3,None, None, None, None, 'The predefined root key for the registry value, one of rrkEnum',),
('RemoveRegistry','RemoveRegistry','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('ReserveCost','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reserve a specified amount of space if this component is to be installed.',),
('ReserveCost','ReserveFolder','Y',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full path to the destination directory',),
('ReserveCost','ReserveKey','N',None, None, None, None, 'Identifier',None, 'Primary key that uniquely identifies a particular ReserveCost record',),
('ReserveCost','ReserveLocal','N',0,2147483647,None, None, None, None, 'Disk space to reserve if linked component is installed locally.',),
('ReserveCost','ReserveSource','N',0,2147483647,None, None, None, None, 'Disk space to reserve if linked component is installed to run from the source location.',),
('SelfReg','File_','N',None, None, 'File',1,'Identifier',None, 'Foreign key into the File table denoting the module that needs to be registered.',),
('SelfReg','Cost','Y',0,32767,None, None, None, None, 'The cost of registering the module.',),
('ServiceControl','Name','N',None, None, None, None, 'Formatted',None, 'Name of a service. /, \\, comma and space are invalid',),
('ServiceControl','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table that controls the startup of the service',),
('ServiceControl','Event','N',0,187,None, None, None, None, 'Bit field: Install: 0x1 = Start, 0x2 = Stop, 0x8 = Delete, Uninstall: 0x10 = Start, 0x20 = Stop, 0x80 = Delete',),
('ServiceControl','ServiceControl','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('ServiceControl','Arguments','Y',None, None, None, None, 'Formatted',None, 'Arguments for the service. Separate by [~].',),
('ServiceControl','Wait','Y',0,1,None, None, None, None, 'Boolean for whether to wait for the service to fully start',),
('ServiceInstall','Name','N',None, None, None, None, 'Formatted',None, 'Internal Name of the Service',),
('ServiceInstall','Description','Y',None, None, None, None, 'Text',None, 'Description of service.',),
('ServiceInstall','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table that controls the startup of the service',),
('ServiceInstall','Arguments','Y',None, None, None, None, 'Formatted',None, 'Arguments to include in every start of the service, passed to WinMain',),
('ServiceInstall','ServiceInstall','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('ServiceInstall','Dependencies','Y',None, None, None, None, 'Formatted',None, 'Other services this depends on to start. Separate by [~], and end with [~][~]',),
('ServiceInstall','DisplayName','Y',None, None, None, None, 'Formatted',None, 'External Name of the Service',),
('ServiceInstall','ErrorControl','N',-2147483647,2147483647,None, None, None, None, 'Severity of error if service fails to start',),
('ServiceInstall','LoadOrderGroup','Y',None, None, None, None, 'Formatted',None, 'LoadOrderGroup',),
('ServiceInstall','Password','Y',None, None, None, None, 'Formatted',None, 'password to run service with. (with StartName)',),
('ServiceInstall','ServiceType','N',-2147483647,2147483647,None, None, None, None, 'Type of the service',),
('ServiceInstall','StartName','Y',None, None, None, None, 'Formatted',None, 'User or object name to run service as',),
('ServiceInstall','StartType','N',0,4,None, None, None, None, 'Type of the service',),
('Shortcut','Name','N',None, None, None, None, 'Filename',None, 'The name of the shortcut to be created.',),
('Shortcut','Description','Y',None, None, None, None, 'Text',None, 'The description for the shortcut.',),
('Shortcut','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table denoting the component whose selection gates the the shortcut creation/deletion.',),
('Shortcut','Icon_','Y',None, None, 'Icon',1,'Identifier',None, 'Foreign key into the File table denoting the external icon file for the shortcut.',),
('Shortcut','IconIndex','Y',-32767,32767,None, None, None, None, 'The icon index for the shortcut.',),
('Shortcut','Directory_','N',None, None, 'Directory',1,'Identifier',None, 'Foreign key into the Directory table denoting the directory where the shortcut file is created.',),
('Shortcut','Target','N',None, None, None, None, 'Shortcut',None, 'The shortcut target. This is usually a property that is expanded to a file or a folder that the shortcut points to.',),
('Shortcut','Arguments','Y',None, None, None, None, 'Formatted',None, 'The command-line arguments for the shortcut.',),
('Shortcut','Shortcut','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('Shortcut','Hotkey','Y',0,32767,None, None, None, None, 'The hotkey for the shortcut. It has the virtual-key code for the key in the low-order byte, and the modifier flags in the high-order byte. ',),
('Shortcut','ShowCmd','Y',None, None, None, None, None, '1;3;7','The show command for the application window.The following values may be used.',),
('Shortcut','WkDir','Y',None, None, None, None, 'Identifier',None, 'Name of property defining location of working directory.',),
('Signature','FileName','N',None, None, None, None, 'Filename',None, 'The name of the file. This may contain a "short name|long name" pair.',),
('Signature','Signature','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature represents a unique file signature.',),
('Signature','Languages','Y',None, None, None, None, 'Language',None, 'The languages supported by the file.',),
('Signature','MaxDate','Y',0,2147483647,None, None, None, None, 'The maximum creation date of the file.',),
('Signature','MaxSize','Y',0,2147483647,None, None, None, None, 'The maximum size of the file. ',),
('Signature','MaxVersion','Y',None, None, None, None, 'Text',None, 'The maximum version of the file.',),
('Signature','MinDate','Y',0,2147483647,None, None, None, None, 'The minimum creation date of the file.',),
('Signature','MinSize','Y',0,2147483647,None, None, None, None, 'The minimum size of the file.',),
('Signature','MinVersion','Y',None, None, None, None, 'Text',None, 'The minimum version of the file.',),
('TextStyle','TextStyle','N',None, None, None, None, 'Identifier',None, 'Name of the style. The primary key of this table. This name is embedded in the texts to indicate a style change.',),
('TextStyle','Color','Y',0,16777215,None, None, None, None, 'An integer indicating the color of the string in the RGB format (Red, Green, Blue each 0-255, RGB = R + 256*G + 256^2*B).',),
('TextStyle','FaceName','N',None, None, None, None, 'Text',None, 'A string indicating the name of the font used. Required. The string must be at most 31 characters long.',),
('TextStyle','Size','N',0,32767,None, None, None, None, 'The size of the font used. This size is given in our units (1/12 of the system font height). Assuming that the system font is set to 12 point size, this is equivalent to the point size.',),
('TextStyle','StyleBits','Y',0,15,None, None, None, None, 'A combination of style bits.',),
('TypeLib','Description','Y',None, None, None, None, 'Text',None, None, ),
('TypeLib','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the type library to be operational.',),
('TypeLib','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
('TypeLib','Directory_','Y',None, None, 'Directory',1,'Identifier',None, 'Optional. The foreign key into the Directory table denoting the path to the help file for the type library.',),
('TypeLib','Language','N',0,32767,None, None, None, None, 'The language of the library.',),
('TypeLib','Version','Y',0,16777215,None, None, None, None, 'The version of the library. The minor version is in the lower 8 bits of the integer. The major version is in the next 16 bits. ',),
('TypeLib','Cost','Y',0,2147483647,None, None, None, None, 'The cost associated with the registration of the typelib. This column is currently optional.',),
('TypeLib','LibID','N',None, None, None, None, 'Guid',None, 'The GUID that represents the library.',),
('UIText','Text','Y',None, None, None, None, 'Text',None, 'The localized version of the string.',),
('UIText','Key','N',None, None, None, None, 'Identifier',None, 'A unique key that identifies the particular string.',),
('Upgrade','Attributes','N',0,2147483647,None, None, None, None, 'The attributes of this product set.',),
('Upgrade','Language','Y',None, None, None, None, 'Language',None, 'A comma-separated list of languages for either products in this set or products not in this set.',),
('Upgrade','ActionProperty','N',None, None, None, None, 'UpperCase',None, 'The property to set when a product in this set is found.',),
('Upgrade','Remove','Y',None, None, None, None, 'Formatted',None, 'The list of features to remove when uninstalling a product from this set. The default is "ALL".',),
('Upgrade','UpgradeCode','N',None, None, None, None, 'Guid',None, 'The UpgradeCode GUID belonging to the products in this set.',),
('Upgrade','VersionMax','Y',None, None, None, None, 'Text',None, 'The maximum ProductVersion of the products in this set. The set may or may not include products with this particular version.',),
('Upgrade','VersionMin','Y',None, None, None, None, 'Text',None, 'The minimum ProductVersion of the products in this set. The set may or may not include products with this particular version.',),
('Verb','Sequence','Y',0,32767,None, None, None, None, 'Order within the verbs for a particular extension. Also used simply to specify the default verb.',),
('Verb','Argument','Y',None, None, None, None, 'Formatted',None, 'Optional value for the command arguments.',),
('Verb','Extension_','N',None, None, 'Extension',1,'Text',None, 'The extension associated with the table row.',),
('Verb','Verb','N',None, None, None, None, 'Text',None, 'The verb for the command.',),
('Verb','Command','Y',None, None, None, None, 'Formatted',None, 'The command text.',),
]
| apache-2.0 |
fiji-flo/servo | tests/wpt/web-platform-tests/tools/third_party/pluggy/testing/benchmark.py | 43 | 1328 | """
Benchmarking and performance tests.
"""
import pytest
from pluggy import (_multicall, _legacymulticall, HookImpl, HookspecMarker,
HookimplMarker)
hookspec = HookspecMarker("example")
hookimpl = HookimplMarker("example")
def MC(methods, kwargs, callertype, firstresult=False):
hookfuncs = []
for method in methods:
f = HookImpl(None, "<temp>", method, method.example_impl)
hookfuncs.append(f)
return callertype(hookfuncs, kwargs, {"firstresult": firstresult})
@hookimpl
def hook(arg1, arg2, arg3):
return arg1, arg2, arg3
@hookimpl(hookwrapper=True)
def wrapper(arg1, arg2, arg3):
yield
@pytest.fixture(
params=[10, 100],
ids="hooks={}".format,
)
def hooks(request):
return [hook for i in range(request.param)]
@pytest.fixture(
params=[10, 100],
ids="wrappers={}".format,
)
def wrappers(request):
return [wrapper for i in range(request.param)]
@pytest.fixture(
params=[_multicall, _legacymulticall],
ids=lambda item: item.__name__
)
def callertype(request):
return request.param
def inner_exec(methods, callertype):
return MC(methods, {'arg1': 1, 'arg2': 2, 'arg3': 3}, callertype)
def test_hook_and_wrappers_speed(benchmark, hooks, wrappers, callertype):
benchmark(inner_exec, hooks + wrappers, callertype)
| mpl-2.0 |
3dfxmadscientist/odoo_vi | addons/sale_mrp/__init__.py | 445 | 1062 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale_mrp
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
odejesush/tensorflow | tensorflow/contrib/graph_editor/tests/util_test.py | 154 | 6110 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.contrib.graph_editor."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import graph_editor as ge
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class UtilTest(test.TestCase):
def test_list_view(self):
"""Test for ge.util.ListView."""
l = [0, 1, 2]
lv = ge.util.ListView(l)
# Should not be the same id.
self.assertIsNot(l, lv)
# Should behave the same way than the original list.
self.assertTrue(len(lv) == 3 and lv[0] == 0 and lv[1] == 1 and lv[2] == 2)
# Should be read only.
with self.assertRaises(TypeError):
lv[0] = 0
def test_is_iterable(self):
"""Test for ge.util.is_iterable."""
self.assertTrue(ge.util.is_iterable([0, 1, 2]))
self.assertFalse(ge.util.is_iterable(3))
def test_unique_graph(self):
"""Test for ge.util.check_graphs and ge.util.get_unique_graph."""
g0 = ops.Graph()
with g0.as_default():
a0 = constant_op.constant(1)
b0 = constant_op.constant(2)
g1 = ops.Graph()
with g1.as_default():
a1 = constant_op.constant(1)
b1 = constant_op.constant(2)
# Same graph, should be fine.
self.assertIsNone(ge.util.check_graphs(a0, b0))
# Two different graphs, should assert.
with self.assertRaises(ValueError):
ge.util.check_graphs(a0, b0, a1, b1)
# a0 and b0 belongs to the same graph, should be fine.
self.assertEqual(ge.util.get_unique_graph([a0, b0]), g0)
# Different graph, should raise an error.
with self.assertRaises(ValueError):
ge.util.get_unique_graph([a0, b0, a1, b1])
def test_make_list_of_op(self):
"""Test for ge.util.make_list_of_op."""
g0 = ops.Graph()
with g0.as_default():
a0 = constant_op.constant(1)
b0 = constant_op.constant(2)
# Should extract the ops from the graph.
self.assertEqual(len(ge.util.make_list_of_op(g0)), 2)
# Should extract the ops from the tuple.
self.assertEqual(len(ge.util.make_list_of_op((a0.op, b0.op))), 2)
def test_make_list_of_t(self):
"""Test for ge.util.make_list_of_t."""
g0 = ops.Graph()
with g0.as_default():
a0 = constant_op.constant(1)
b0 = constant_op.constant(2)
c0 = math_ops.add(a0, b0) # pylint: disable=unused-variable
# Should extract the tensors from tre graph.
self.assertEqual(len(ge.util.make_list_of_t(g0)), 3)
# Should extract the tensors from the tuple
self.assertEqual(len(ge.util.make_list_of_t((a0, b0))), 2)
# Should extract the tensors and ignore the ops.
self.assertEqual(
len(ge.util.make_list_of_t(
(a0, a0.op, b0), ignore_ops=True)), 2)
def test_get_generating_consuming(self):
"""Test for ge.util.get_generating_ops and ge.util.get_generating_ops."""
g0 = ops.Graph()
with g0.as_default():
a0 = constant_op.constant(1)
b0 = constant_op.constant(2)
c0 = math_ops.add(a0, b0)
self.assertEqual(len(ge.util.get_generating_ops([a0, b0])), 2)
self.assertEqual(len(ge.util.get_consuming_ops([a0, b0])), 1)
self.assertEqual(len(ge.util.get_generating_ops([c0])), 1)
self.assertEqual(ge.util.get_consuming_ops([c0]), [])
def test_control_outputs(self):
"""Test for the ge.util.ControlOutputs class."""
g0 = ops.Graph()
with g0.as_default():
a0 = constant_op.constant(1)
b0 = constant_op.constant(2)
x0 = constant_op.constant(3)
with ops.control_dependencies([x0.op]):
c0 = math_ops.add(a0, b0) # pylint: disable=unused-variable
control_outputs = ge.util.ControlOutputs(g0).get_all()
self.assertEqual(len(control_outputs), 1)
self.assertEqual(len(control_outputs[x0.op]), 1)
self.assertIs(list(control_outputs[x0.op])[0], c0.op)
def test_scope(self):
"""Test simple path scope functionalities."""
self.assertEqual(ge.util.scope_finalize("foo/bar"), "foo/bar/")
self.assertEqual(ge.util.scope_dirname("foo/bar/op"), "foo/bar/")
self.assertEqual(ge.util.scope_basename("foo/bar/op"), "op")
def test_placeholder(self):
"""Test placeholder functionalities."""
g0 = ops.Graph()
with g0.as_default():
a0 = constant_op.constant(1, name="foo")
# Test placeholder name.
self.assertEqual(ge.util.placeholder_name(a0), "geph__foo_0")
self.assertEqual(ge.util.placeholder_name(None), "geph")
self.assertEqual(
ge.util.placeholder_name(
a0, scope="foo/"), "foo/geph__foo_0")
self.assertEqual(
ge.util.placeholder_name(
a0, scope="foo"), "foo/geph__foo_0")
self.assertEqual(ge.util.placeholder_name(None, scope="foo/"), "foo/geph")
self.assertEqual(ge.util.placeholder_name(None, scope="foo"), "foo/geph")
# Test placeholder creation.
g0 = ops.Graph()
with g0.as_default():
a0 = constant_op.constant(1, dtype=dtypes.float32, name="a0")
c0 = math_ops.add(
ge.util.make_placeholder_from_tensor(a0),
ge.util.make_placeholder_from_dtype_and_shape(dtype=dtypes.float32))
self.assertEqual(c0.op.inputs[0].op.name, "geph__a0_0")
self.assertEqual(c0.op.inputs[1].op.name, "geph")
if __name__ == "__main__":
test.main()
| apache-2.0 |
benfinke/ns_python | build/lib/nssrc/com/citrix/netscaler/nitro/resource/config/vpn/vpnepaprofile.py | 3 | 7146 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class vpnepaprofile(base_resource) :
""" Configuration for Epa profile resource. """
def __init__(self) :
self._name = ""
self._filename = ""
self._data = ""
self.___count = 0
@property
def name(self) :
ur"""name of device profile.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
ur"""name of device profile.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def filename(self) :
ur"""filename of the deviceprofile data xml.<br/>Minimum length = 1.
"""
try :
return self._filename
except Exception as e:
raise e
@filename.setter
def filename(self, filename) :
ur"""filename of the deviceprofile data xml.<br/>Minimum length = 1
"""
try :
self._filename = filename
except Exception as e:
raise e
@property
def data(self) :
ur"""deviceprofile data xml.<br/>Minimum length = 1.
"""
try :
return self._data
except Exception as e:
raise e
@data.setter
def data(self, data) :
ur"""deviceprofile data xml.<br/>Minimum length = 1
"""
try :
self._data = data
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(vpnepaprofile_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.vpnepaprofile
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
ur""" Use this API to add vpnepaprofile.
"""
try :
if type(resource) is not list :
addresource = vpnepaprofile()
addresource.name = resource.name
addresource.filename = resource.filename
addresource.data = resource.data
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ vpnepaprofile() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].name = resource[i].name
addresources[i].filename = resource[i].filename
addresources[i].data = resource[i].data
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
ur""" Use this API to delete vpnepaprofile.
"""
try :
if type(resource) is not list :
deleteresource = vpnepaprofile()
if type(resource) != type(deleteresource):
deleteresource.name = resource
else :
deleteresource.name = resource.name
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ vpnepaprofile() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ vpnepaprofile() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
ur""" Use this API to fetch all the vpnepaprofile resources that are configured on netscaler.
"""
try :
if not name :
obj = vpnepaprofile()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = vpnepaprofile()
obj.name = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [vpnepaprofile() for _ in range(len(name))]
obj = [vpnepaprofile() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = vpnepaprofile()
obj[i].name = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
ur""" Use this API to fetch filtered set of vpnepaprofile resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = vpnepaprofile()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
ur""" Use this API to count the vpnepaprofile resources configured on NetScaler.
"""
try :
obj = vpnepaprofile()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
ur""" Use this API to count filtered the set of vpnepaprofile resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = vpnepaprofile()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class vpnepaprofile_response(base_response) :
def __init__(self, length=1) :
self.vpnepaprofile = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.vpnepaprofile = [vpnepaprofile() for _ in range(length)]
| apache-2.0 |
rrampage/rethinkdb | external/v8_3.30.33.16/testing/gmock/test/gmock_leak_test.py | 779 | 4384 | #!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests that leaked mock objects can be caught be Google Mock."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import gmock_test_utils
PROGRAM_PATH = gmock_test_utils.GetTestExecutablePath('gmock_leak_test_')
TEST_WITH_EXPECT_CALL = [PROGRAM_PATH, '--gtest_filter=*ExpectCall*']
TEST_WITH_ON_CALL = [PROGRAM_PATH, '--gtest_filter=*OnCall*']
TEST_MULTIPLE_LEAKS = [PROGRAM_PATH, '--gtest_filter=*MultipleLeaked*']
environ = gmock_test_utils.environ
SetEnvVar = gmock_test_utils.SetEnvVar
# Tests in this file run a Google-Test-based test program and expect it
# to terminate prematurely. Therefore they are incompatible with
# the premature-exit-file protocol by design. Unset the
# premature-exit filepath to prevent Google Test from creating
# the file.
SetEnvVar(gmock_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)
class GMockLeakTest(gmock_test_utils.TestCase):
def testCatchesLeakedMockByDefault(self):
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL,
env=environ).exit_code)
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_ON_CALL,
env=environ).exit_code)
def testDoesNotCatchLeakedMockWhenDisabled(self):
self.assertEquals(
0,
gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL +
['--gmock_catch_leaked_mocks=0'],
env=environ).exit_code)
self.assertEquals(
0,
gmock_test_utils.Subprocess(TEST_WITH_ON_CALL +
['--gmock_catch_leaked_mocks=0'],
env=environ).exit_code)
def testCatchesLeakedMockWhenEnabled(self):
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL +
['--gmock_catch_leaked_mocks'],
env=environ).exit_code)
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_ON_CALL +
['--gmock_catch_leaked_mocks'],
env=environ).exit_code)
def testCatchesLeakedMockWhenEnabledWithExplictFlagValue(self):
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_WITH_EXPECT_CALL +
['--gmock_catch_leaked_mocks=1'],
env=environ).exit_code)
def testCatchesMultipleLeakedMocks(self):
self.assertNotEqual(
0,
gmock_test_utils.Subprocess(TEST_MULTIPLE_LEAKS +
['--gmock_catch_leaked_mocks'],
env=environ).exit_code)
if __name__ == '__main__':
gmock_test_utils.Main()
| agpl-3.0 |
hujiajie/chromium-crosswalk | tools/telemetry/third_party/typ/typ/arg_parser.py | 33 | 13928 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import optparse
from typ.host import Host
class _Bailout(Exception):
pass
DEFAULT_COVERAGE_OMIT = ['*/typ/*', '*/site-packages/*']
DEFAULT_STATUS_FORMAT = '[%f/%t] '
DEFAULT_SUFFIXES = ['*_test.py', '*_unittest.py']
class ArgumentParser(argparse.ArgumentParser):
@staticmethod
def add_option_group(parser, title, discovery=False,
running=False, reporting=False, skip=None):
# TODO: Get rid of this when telemetry upgrades to argparse.
ap = ArgumentParser(add_help=False, version=False, discovery=discovery,
running=running, reporting=reporting)
optlist = ap.optparse_options(skip=skip)
group = optparse.OptionGroup(parser, title)
group.add_options(optlist)
parser.add_option_group(group)
def __init__(self, host=None, add_help=True, version=True, discovery=True,
reporting=True, running=True):
super(ArgumentParser, self).__init__(prog='typ', add_help=add_help)
self._host = host or Host()
self.exit_status = None
self.usage = '%(prog)s [options] [tests...]'
if version:
self.add_argument('-V', '--version', action='store_true',
help='Print the typ version and exit.')
if discovery:
self.add_argument('-f', '--file-list', metavar='FILENAME',
action='store',
help=('Takes the list of tests from the file '
'(use "-" for stdin).'))
self.add_argument('--all', action='store_true',
help=('Run all the tests, including the ones '
'normally skipped.'))
self.add_argument('--isolate', metavar='glob', default=[],
action='append',
help=('Globs of tests to run in isolation '
'(serially).'))
self.add_argument('--skip', metavar='glob', default=[],
action='append',
help=('Globs of test names to skip ('
'defaults to %(default)s).'))
self.add_argument('--suffixes', metavar='glob', default=[],
action='append',
help=('Globs of test filenames to look for ('
'can specify multiple times; defaults '
'to %s).' % DEFAULT_SUFFIXES))
if reporting:
self.add_argument('--builder-name',
help=('Builder name to include in the '
'uploaded data.'))
self.add_argument('-c', '--coverage', action='store_true',
help='Reports coverage information.')
self.add_argument('--coverage-source', action='append',
default=[],
help=('Directories to include when running and '
'reporting coverage (defaults to '
'--top-level-dir plus --path)'))
self.add_argument('--coverage-omit', action='append',
default=[],
help=('Globs to omit when reporting coverage '
'(defaults to %s).' %
DEFAULT_COVERAGE_OMIT))
self.add_argument('--coverage-annotate', action='store_true',
help=('Produce an annotate source report.'))
self.add_argument('--coverage-show-missing', action='store_true',
help=('Show missing line ranges in coverage '
'report.'))
self.add_argument('--master-name',
help=('Buildbot master name to include in the '
'uploaded data.'))
self.add_argument('--metadata', action='append', default=[],
help=('Optional key=value metadata that will '
'be included in the results.'))
self.add_argument('--test-results-server',
help=('If specified, uploads the full results '
'to this server.'))
self.add_argument('--test-type',
help=('Name of test type to include in the '
'uploaded data (e.g., '
'"telemetry_unittests").'))
self.add_argument('--write-full-results-to', metavar='FILENAME',
action='store',
help=('If specified, writes the full results to '
'that path.'))
self.add_argument('--write-trace-to', metavar='FILENAME',
action='store',
help=('If specified, writes the trace to '
'that path.'))
self.add_argument('tests', nargs='*', default=[],
help=argparse.SUPPRESS)
if running:
self.add_argument('-d', '--debugger', action='store_true',
help='Runs the tests under the debugger.')
self.add_argument('-j', '--jobs', metavar='N', type=int,
default=self._host.cpu_count(),
help=('Runs N jobs in parallel '
'(defaults to %(default)s).'))
self.add_argument('-l', '--list-only', action='store_true',
help='Lists all the test names found and exits.')
self.add_argument('-n', '--dry-run', action='store_true',
help=argparse.SUPPRESS)
self.add_argument('-q', '--quiet', action='store_true',
default=False,
help=('Runs as quietly as possible '
'(only prints errors).'))
self.add_argument('-s', '--status-format',
default=self._host.getenv('NINJA_STATUS',
DEFAULT_STATUS_FORMAT),
help=argparse.SUPPRESS)
self.add_argument('-t', '--timing', action='store_true',
help='Prints timing info.')
self.add_argument('-v', '--verbose', action='count', default=0,
help=('Prints more stuff (can specify multiple '
'times for more output).'))
self.add_argument('--passthrough', action='store_true',
default=False,
help='Prints all output while running.')
self.add_argument('--retry-limit', type=int, default=0,
help='Retries each failure up to N times.')
self.add_argument('--terminal-width', type=int,
default=self._host.terminal_width(),
help=argparse.SUPPRESS)
self.add_argument('--overwrite', action='store_true',
default=None,
help=argparse.SUPPRESS)
self.add_argument('--no-overwrite', action='store_false',
dest='overwrite', default=None,
help=argparse.SUPPRESS)
if discovery or running:
self.add_argument('-P', '--path', action='append', default=[],
help=('Adds dir to sys.path (can specify '
'multiple times).'))
self.add_argument('--top-level-dir', default=None,
help=('Sets the top directory of project '
'(used when running subdirs).'))
def parse_args(self, args=None, namespace=None):
try:
rargs = super(ArgumentParser, self).parse_args(args=args,
namespace=namespace)
except _Bailout:
return None
for val in rargs.metadata:
if '=' not in val:
self._print_message('Error: malformed --metadata "%s"' % val)
self.exit_status = 2
if rargs.test_results_server:
if not rargs.builder_name:
self._print_message('Error: --builder-name must be specified '
'along with --test-result-server')
self.exit_status = 2
if not rargs.master_name:
self._print_message('Error: --master-name must be specified '
'along with --test-result-server')
self.exit_status = 2
if not rargs.test_type:
self._print_message('Error: --test-type must be specified '
'along with --test-result-server')
self.exit_status = 2
if not rargs.suffixes:
rargs.suffixes = DEFAULT_SUFFIXES
if not rargs.coverage_omit:
rargs.coverage_omit = DEFAULT_COVERAGE_OMIT
if rargs.debugger: # pragma: no cover
rargs.jobs = 1
rargs.passthrough = True
if rargs.overwrite is None:
rargs.overwrite = self._host.stdout.isatty() and not rargs.verbose
return rargs
# Redefining built-in 'file' pylint: disable=W0622
def _print_message(self, msg, file=None):
self._host.print_(msg=msg, stream=file, end='\n')
def print_help(self, file=None):
self._print_message(msg=self.format_help(), file=file)
def error(self, message, bailout=True): # pylint: disable=W0221
self.exit(2, '%s: error: %s\n' % (self.prog, message), bailout=bailout)
def exit(self, status=0, message=None, # pylint: disable=W0221
bailout=True):
self.exit_status = status
if message:
self._print_message(message, file=self._host.stderr)
if bailout:
raise _Bailout()
def optparse_options(self, skip=None):
skip = skip or []
options = []
for action in self._actions:
args = [flag for flag in action.option_strings if flag not in skip]
if not args or action.help == '==SUPPRESS==':
# must either be a positional argument like 'tests'
# or an option we want to skip altogether.
continue
kwargs = {
'default': action.default,
'dest': action.dest,
'help': action.help,
'metavar': action.metavar,
'type': action.type,
'action': _action_str(action)
}
options.append(optparse.make_option(*args, **kwargs))
return options
def argv_from_args(self, args):
default_parser = ArgumentParser(host=self._host)
default_args = default_parser.parse_args([])
argv = []
tests = []
d = vars(args)
for k in sorted(d.keys()):
v = d[k]
argname = _argname_from_key(k)
action = self._action_for_key(k)
action_str = _action_str(action)
if k == 'tests':
tests = v
continue
if getattr(default_args, k) == v:
# this arg has the default value, so skip it.
continue
assert action_str in ['append', 'count', 'store', 'store_true']
if action_str == 'append':
for el in v:
argv.append(argname)
argv.append(el)
elif action_str == 'count':
for _ in range(v):
argv.append(argname)
elif action_str == 'store':
argv.append(argname)
argv.append(str(v))
else:
# action_str == 'store_true'
argv.append(argname)
return argv + tests
def _action_for_key(self, key):
for action in self._actions:
if action.dest == key:
return action
assert False, ('Could not find an action for %s' # pragma: no cover
% key)
def _action_str(action):
# Access to a protected member pylint: disable=W0212
assert action.__class__ in (
argparse._AppendAction,
argparse._CountAction,
argparse._StoreAction,
argparse._StoreTrueAction
)
if isinstance(action, argparse._AppendAction):
return 'append'
if isinstance(action, argparse._CountAction):
return 'count'
if isinstance(action, argparse._StoreAction):
return 'store'
if isinstance(action, argparse._StoreTrueAction):
return 'store_true'
def _argname_from_key(key):
return '--' + key.replace('_', '-')
| bsd-3-clause |
yangming85/lettuce | tests/integration/lib/Django-1.3/django/contrib/localflavor/fr/fr_department.py | 314 | 3326 | # -*- coding: utf-8 -*-
DEPARTMENT_ASCII_CHOICES = (
('01', '01 - Ain'),
('02', '02 - Aisne'),
('03', '03 - Allier'),
('04', '04 - Alpes-de-Haute-Provence'),
('05', '05 - Hautes-Alpes'),
('06', '06 - Alpes-Maritimes'),
('07', '07 - Ardeche'),
('08', '08 - Ardennes'),
('09', '09 - Ariege'),
('10', '10 - Aube'),
('11', '11 - Aude'),
('12', '12 - Aveyron'),
('13', '13 - Bouches-du-Rhone'),
('14', '14 - Calvados'),
('15', '15 - Cantal'),
('16', '16 - Charente'),
('17', '17 - Charente-Maritime'),
('18', '18 - Cher'),
('19', '19 - Correze'),
('21', '21 - Cote-d\'Or'),
('22', '22 - Cotes-d\'Armor'),
('23', '23 - Creuse'),
('24', '24 - Dordogne'),
('25', '25 - Doubs'),
('26', '26 - Drome'),
('27', '27 - Eure'),
('28', '28 - Eure-et-Loire'),
('29', '29 - Finistere'),
('2A', '2A - Corse-du-Sud'),
('2B', '2B - Haute-Corse'),
('30', '30 - Gard'),
('31', '31 - Haute-Garonne'),
('32', '32 - Gers'),
('33', '33 - Gironde'),
('34', '34 - Herault'),
('35', '35 - Ille-et-Vilaine'),
('36', '36 - Indre'),
('37', '37 - Indre-et-Loire'),
('38', '38 - Isere'),
('39', '39 - Jura'),
('40', '40 - Landes'),
('41', '41 - Loir-et-Cher'),
('42', '42 - Loire'),
('43', '43 - Haute-Loire'),
('44', '44 - Loire-Atlantique'),
('45', '45 - Loiret'),
('46', '46 - Lot'),
('47', '47 - Lot-et-Garonne'),
('48', '48 - Lozere'),
('49', '49 - Maine-et-Loire'),
('50', '50 - Manche'),
('51', '51 - Marne'),
('52', '52 - Haute-Marne'),
('53', '53 - Mayenne'),
('54', '54 - Meurthe-et-Moselle'),
('55', '55 - Meuse'),
('56', '56 - Morbihan'),
('57', '57 - Moselle'),
('58', '58 - Nievre'),
('59', '59 - Nord'),
('60', '60 - Oise'),
('61', '61 - Orne'),
('62', '62 - Pas-de-Calais'),
('63', '63 - Puy-de-Dome'),
('64', '64 - Pyrenees-Atlantiques'),
('65', '65 - Hautes-Pyrenees'),
('66', '66 - Pyrenees-Orientales'),
('67', '67 - Bas-Rhin'),
('68', '68 - Haut-Rhin'),
('69', '69 - Rhone'),
('70', '70 - Haute-Saone'),
('71', '71 - Saone-et-Loire'),
('72', '72 - Sarthe'),
('73', '73 - Savoie'),
('74', '74 - Haute-Savoie'),
('75', '75 - Paris'),
('76', '76 - Seine-Maritime'),
('77', '77 - Seine-et-Marne'),
('78', '78 - Yvelines'),
('79', '79 - Deux-Sevres'),
('80', '80 - Somme'),
('81', '81 - Tarn'),
('82', '82 - Tarn-et-Garonne'),
('83', '83 - Var'),
('84', '84 - Vaucluse'),
('85', '85 - Vendee'),
('86', '86 - Vienne'),
('87', '87 - Haute-Vienne'),
('88', '88 - Vosges'),
('89', '89 - Yonne'),
('90', '90 - Territoire de Belfort'),
('91', '91 - Essonne'),
('92', '92 - Hauts-de-Seine'),
('93', '93 - Seine-Saint-Denis'),
('94', '94 - Val-de-Marne'),
('95', '95 - Val-d\'Oise'),
('971', '971 - Guadeloupe'),
('972', '972 - Martinique'),
('973', '973 - Guyane'),
('974', '974 - La Reunion'),
('975', '975 - Saint-Pierre-et-Miquelon'),
('976', '976 - Mayotte'),
('984', '984 - Terres Australes et Antarctiques'),
('986', '986 - Wallis et Futuna'),
('987', '987 - Polynesie Francaise'),
('988', '988 - Nouvelle-Caledonie'),
)
| gpl-3.0 |
gae-init/gae-init-debug | main/auth/microsoft.py | 8 | 1734 | # coding: utf-8
import flask
import auth
import config
import model
import util
from main import app
microsoft_config = dict(
access_token_method='POST',
access_token_url='https://login.live.com/oauth20_token.srf',
authorize_url='https://login.live.com/oauth20_authorize.srf',
base_url='https://apis.live.net/v5.0/',
consumer_key=config.CONFIG_DB.microsoft_client_id,
consumer_secret=config.CONFIG_DB.microsoft_client_secret,
request_token_params={'scope': 'wl.emails'},
)
microsoft = auth.create_oauth_app(microsoft_config, 'microsoft')
@app.route('/api/auth/callback/microsoft/')
def microsoft_authorized():
response = microsoft.authorized_response()
if response is None:
flask.flash('You denied the request to sign in.')
return flask.redirect(util.get_next_url())
flask.session['oauth_token'] = (response['access_token'], '')
me = microsoft.get('me')
if me.data.get('error', {}):
return 'Unknown error: error:%s error_description:%s' % (
me['error']['code'],
me['error']['message'],
)
user_db = retrieve_user_from_microsoft(me.data)
return auth.signin_user_db(user_db)
@microsoft.tokengetter
def get_microsoft_oauth_token():
return flask.session.get('oauth_token')
@app.route('/signin/microsoft/')
def signin_microsoft():
return auth.signin_oauth(microsoft)
def retrieve_user_from_microsoft(response):
auth_id = 'microsoft_%s' % response['id']
user_db = model.User.get_by('auth_ids', auth_id)
if user_db:
return user_db
email = response['emails']['preferred'] or response['emails']['account']
return auth.create_user_db(
auth_id=auth_id,
name=response.get('name', ''),
username=email,
email=email,
verified=bool(email),
)
| mit |
tangyiyong/odoo | addons/l10n_th/__init__.py | 893 | 1045 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
pricingassistant/mrq | tests/test_subpool.py | 1 | 3033 | from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import range
from bson import ObjectId
import urllib.request, urllib.error, urllib.parse
import json
import time
import os
import pytest
@pytest.mark.parametrize(["use_worker"], [[False], [True]])
def test_subpool_simple(worker, use_worker):
# Check that a subpool can be used both in an outside of a Job context
if use_worker:
worker.start()
else:
from tests.tasks.general import SubPool
def run(params):
if use_worker:
return worker.send_task("tests.tasks.general.SubPool", params)
else:
return SubPool().run(params)
# Check that sequential sleeps work
start_time = time.time()
result = run({
"pool_size": 1, "inner_params": [1, 1]
})
total_time = time.time() - start_time
assert result == [1, 1]
assert total_time > 2
# py.test doesn't use gevent so we don't get the benefits of the hub
if use_worker:
# Parallel sleeps
start_time = time.time()
result = run({
"pool_size": 20, "inner_params": [1] * 20
})
total_time = time.time() - start_time
assert result == [1] * 20
assert total_time < 2
@pytest.mark.parametrize(["p_imap"], [
[True],
[False]
])
def test_subpool_exception(worker, p_imap):
# An exception in the subpool is raised outside the pool
worker.send_task("tests.tasks.general.SubPool", {
"pool_size": 20, "inner_params": ["exception"], "imap": p_imap
}, accept_statuses=["failed"])
job = worker.mongodb_jobs.mrq_jobs.find_one()
assert job
assert job["status"] == "failed"
assert "__INNER_EXCEPTION_LINE__" in job["traceback"]
@pytest.mark.parametrize(["p_size"], [
[0],
[1],
[2],
[100]
])
def test_subpool_import(worker, p_size):
""" This tests that the patch_import() function does its job of preventing a gevent crash
like explained in https://code.google.com/p/gevent/issues/detail?id=108 """
# Large file import
worker.send_task("tests.tasks.general.SubPool", {
"pool_size": p_size, "inner_params": ["import-large-file"] * p_size
}, accept_statuses=["success"])
def test_subpool_imap():
from mrq.context import subpool_imap
def iterator(n):
for i in range(0, n):
if i == 5:
raise Exception("Iterator exception!")
yield i
def inner_func(i):
time.sleep(1)
print("inner_func: %s" % i)
if i == 4:
raise Exception("Inner exception!")
return i * 2
with pytest.raises(Exception):
for res in subpool_imap(10, inner_func, iterator(10)):
print("Got %s" % res)
for res in subpool_imap(2, inner_func, iterator(1)):
print("Got %s" % res)
with pytest.raises(Exception):
for res in subpool_imap(2, inner_func, iterator(5)):
print("Got %s" % res)
| mit |
willingc/oh-mainline | vendor/packages/twisted/doc/core/examples/wxacceptance.py | 19 | 3217 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Acceptance tests for wxreactor.
Please test on Linux, Win32 and OS X:
1. Startup event is called at startup.
2. Scheduled event is called after 2 seconds.
3. Shutdown takes 3 seconds, both when quiting from menu and when closing
window (e.g. Alt-F4 in metacity). This tests reactor.stop() and
wxApp.ExitEventLoop().
4. 'hello, world' continues to be printed even when modal dialog is open
(use dialog menu item), when menus are held down, when window is being
dragged.
"""
import sys, time
try:
from wx import Frame as wxFrame, DefaultPosition as wxDefaultPosition, \
Size as wxSize, Menu as wxMenu, MenuBar as wxMenuBar, \
EVT_MENU, MessageDialog as wxMessageDialog, App as wxApp
except ImportError, e:
from wxPython.wx import *
from twisted.python import log
from twisted.internet import wxreactor
wxreactor.install()
from twisted.internet import reactor, defer
# set up so that "hello, world" is printed continously
dc = None
def helloWorld():
global dc
print "hello, world", time.time()
dc = reactor.callLater(0.1, helloWorld)
dc = reactor.callLater(0.1, helloWorld)
def twoSecondsPassed():
print "two seconds passed"
def printer(s):
print s
def shutdown():
print "shutting down in 3 seconds"
if dc.active():
dc.cancel()
reactor.callLater(1, printer, "2...")
reactor.callLater(2, printer, "1...")
reactor.callLater(3, printer, "0...")
d = defer.Deferred()
reactor.callLater(3, d.callback, 1)
return d
def startup():
print "Start up event!"
reactor.callLater(2, twoSecondsPassed)
reactor.addSystemEventTrigger("after", "startup", startup)
reactor.addSystemEventTrigger("before", "shutdown", shutdown)
ID_EXIT = 101
ID_DIALOG = 102
class MyFrame(wxFrame):
def __init__(self, parent, ID, title):
wxFrame.__init__(self, parent, ID, title, wxDefaultPosition, wxSize(300, 200))
menu = wxMenu()
menu.Append(ID_DIALOG, "D&ialog", "Show dialog")
menu.Append(ID_EXIT, "E&xit", "Terminate the program")
menuBar = wxMenuBar()
menuBar.Append(menu, "&File")
self.SetMenuBar(menuBar)
EVT_MENU(self, ID_EXIT, self.DoExit)
EVT_MENU(self, ID_DIALOG, self.DoDialog)
# you really ought to do this instead of reactor.stop() in
# DoExit, but for the sake of testing we'll let closing the
# window shutdown wx without reactor.stop(), to make sure that
# still does the right thing.
#EVT_CLOSE(self, lambda evt: reactor.stop())
def DoDialog(self, event):
dl = wxMessageDialog(self, "Check terminal to see if messages are still being "
"printed by Twisted.")
dl.ShowModal()
dl.Destroy()
def DoExit(self, event):
reactor.stop()
class MyApp(wxApp):
def OnInit(self):
frame = MyFrame(None, -1, "Hello, world")
frame.Show(True)
self.SetTopWindow(frame)
return True
def demo():
log.startLogging(sys.stdout)
app = MyApp(0)
reactor.registerWxApp(app)
reactor.run()
if __name__ == '__main__':
demo()
| agpl-3.0 |
takeshineshiro/swift | test/unit/common/test_daemon.py | 12 | 3492 | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TODO(clayg): Test kill_children signal handlers
import os
from six import StringIO
import unittest
from getpass import getuser
import logging
from test.unit import tmpfile
from mock import patch
from swift.common import daemon, utils
class MyDaemon(daemon.Daemon):
def __init__(self, conf):
self.conf = conf
self.logger = utils.get_logger(None, 'server', log_route='server')
MyDaemon.forever_called = False
MyDaemon.once_called = False
def run_forever(self):
MyDaemon.forever_called = True
def run_once(self):
MyDaemon.once_called = True
def run_raise(self):
raise OSError
def run_quit(self):
raise KeyboardInterrupt
class TestDaemon(unittest.TestCase):
def test_create(self):
d = daemon.Daemon({})
self.assertEquals(d.conf, {})
self.assertTrue(isinstance(d.logger, utils.LogAdapter))
def test_stubs(self):
d = daemon.Daemon({})
self.assertRaises(NotImplementedError, d.run_once)
self.assertRaises(NotImplementedError, d.run_forever)
class TestRunDaemon(unittest.TestCase):
def setUp(self):
utils.HASH_PATH_SUFFIX = 'endcap'
utils.HASH_PATH_PREFIX = 'startcap'
utils.drop_privileges = lambda *args: None
utils.capture_stdio = lambda *args: None
def tearDown(self):
reload(utils)
def test_run(self):
d = MyDaemon({})
self.assertFalse(MyDaemon.forever_called)
self.assertFalse(MyDaemon.once_called)
# test default
d.run()
self.assertEquals(d.forever_called, True)
# test once
d.run(once=True)
self.assertEquals(d.once_called, True)
def test_run_daemon(self):
sample_conf = "[my-daemon]\nuser = %s\n" % getuser()
with tmpfile(sample_conf) as conf_file:
with patch.dict('os.environ', {'TZ': ''}):
daemon.run_daemon(MyDaemon, conf_file)
self.assertEquals(MyDaemon.forever_called, True)
self.assertTrue(os.environ['TZ'] is not '')
daemon.run_daemon(MyDaemon, conf_file, once=True)
self.assertEquals(MyDaemon.once_called, True)
# test raise in daemon code
MyDaemon.run_once = MyDaemon.run_raise
self.assertRaises(OSError, daemon.run_daemon, MyDaemon,
conf_file, once=True)
# test user quit
MyDaemon.run_forever = MyDaemon.run_quit
sio = StringIO()
logger = logging.getLogger('server')
logger.addHandler(logging.StreamHandler(sio))
logger = utils.get_logger(None, 'server', log_route='server')
daemon.run_daemon(MyDaemon, conf_file, logger=logger)
self.assertTrue('user quit' in sio.getvalue().lower())
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
redraw/freshbook | freshbook/cli.py | 1 | 2178 | #!/usr/bin/env python
"""
Freshbooks logger tool.
Usage:
freshbook init
freshbook commit [-d DATE] [--hours HOURS] [-m MESSAGE]
freshbook list [--since SINCE] [--until UNTIL]
freshbook -h | --help
Commands:
list List time entries. (defaults today)
commit Commit a new time entry.
Options:
-h --help Show this screen.
-d DATE --date DATE Date in ISO format, i.e. 2015-09-10 (defaults today)
-m MESSAGE --message=MESSAGE Message to attach on the time entry.
--hours HOURS Hours spend on the task (defaults to config task hours)
--since SINCE Since date in ISO format
--until UNTIL Until date in ISO format
"""
from __future__ import print_function
from docopt import docopt
import os
import sys
import json
from datetime import date
from six.moves import input
from . import Freshbook
from .utils import SetupConfig
def main():
args = docopt(__doc__)
if args['init']:
SetupConfig().run()
sys.exit(0)
if not os.path.exists(SetupConfig.PATH):
sys.exit("Run `freshbook init` first.")
with open(SetupConfig.PATH) as f:
config = json.load(f)
freshbook = Freshbook(config['account']['url'], config['account']['token'])
if args['commit']:
status = freshbook.commit(
project_id=config['project']['id'],
task_id=config['project']['task']['id'],
hours=args['--hours'] or config['project']['task']['hours'],
date=args['--date'] or date.today().isoformat(),
notes=args['--message'],
)
print(status)
if args['list']:
entries = freshbook.list(
project_id=config['project']['id'],
task_id=config['project']['task']['id'],
date_from=args['--since'] or date.today().isoformat(),
date_to=args['--until'] or date.today().isoformat()
)
for entry in entries:
print("[%s]" % entry.date)
print(entry.notes)
print()
sys.stdout.flush()
if __name__ == '__main__':
main() | mit |
kaustubhhiware/coala-bears | bears/coffee_script/CoffeeLintBear.py | 16 | 15115 | import json
from coalib.bearlib import deprecate_settings
from coalib.bearlib.abstractions.Linter import linter
from dependency_management.requirements.NpmRequirement import NpmRequirement
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.results.Result import Result
from coala_utils.param_conversion import negate
@linter(executable='coffeelint',
use_stdin=True)
class CoffeeLintBear:
"""
Check CoffeeScript code for a clean and consistent style.
For more information about coffeelint, visit <http://www.coffeelint.org/>.
"""
LANGUAGES = {'CoffeeScript'}
REQUIREMENTS = {NpmRequirement('coffeelint', '1')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
CAN_DETECT = {'Syntax', 'Formatting', 'Smell', 'Complexity', 'Duplication'}
severity_map = {'warn': RESULT_SEVERITY.NORMAL,
'error': RESULT_SEVERITY.MAJOR,
'ignore': RESULT_SEVERITY.INFO}
@staticmethod
def create_arguments(filename, file, config_file):
return '--reporter=raw', '--stdin', '-f', config_file
@staticmethod
@deprecate_settings(indent_size='tab_width',
allow_increment=(
'no_decr_or_incrementation_operators', negate),
allow_no_parameters=(
'no_empty_parameter_list', negate),
allow_empty_functions=('no_empty_functions', negate),
allow_this_statements=('no_this', negate),
allow_implicit_parentheses=(
'no_implicit_parentheses', negate),
allow_interpolation_in_single_quotes=(
'no_interpolation_in_single_quotes', negate),
allow_stand_alone_at_sign=(
'no_stand_alone_at_sign', negate),
allow_throwing_strings=(
'disable_throwing_strings', negate),
allow_unnecessary_double_quotes=(
'no_unnecessary_double_quotes', negate),
allow_bitwise_operators=(
'use_english_operator', negate),
force_braces='no_implicit_braces')
def generate_config(filename, file,
max_line_length: int=79,
max_line_length_affect_comments: bool=True,
space_before_and_after_arrow: bool=True,
check_braces_spacing: bool=False,
braces_spacing_width: int=1,
spacing_in_empty_braces: int=0,
class_naming_camelCase: bool=True,
spaces_before_and_after_colon: bool=False,
spaces_before_colon: int=0,
spaces_after_colon: int=1,
enforce_newline_at_EOF: bool=True,
use_spaces: bool=True,
indent_size: int=2,
number_of_newlines_after_classes: int=2,
prohibit_embedding_javascript_snippet: bool=True,
force_braces: bool=False,
allow_implicit_parentheses: bool=True,
allow_interpolation_in_single_quotes: bool=True,
allow_stand_alone_at_sign: bool=False,
allow_throwing_strings: bool=False,
allow_trailing_semicolons: bool=False,
allow_trailing_whitespaces: bool=False,
allow_unnecessary_double_quotes: bool=True,
allow_bitwise_operators: bool=True,
spaces_around_operators: bool=True,
space_after_comma: bool=True,
cyclomatic_complexity: int=0,
prevent_duplicate_keys: bool=True,
consistent_line_endings_style: str='',
allow_this_statements: bool=True,
allow_increment: bool=True,
allow_no_parameters: bool=True,
allow_empty_functions: bool=False,
enforce_parentheses_on_non_empty_constructors:
bool=True
):
"""
:param max_line_length:
Maximum number of characters per line.
:param max_line_length_affect_comments:
Determines if ``max_line_length`` should also affects comments or
not.
:param space_before_and_after_arrow:
Determines if spaces should be used before and after the arrow.
:param check_braces_spacing:
Checks if proper spacing is used inside curly braces.
:param braces_spacing_width:
Determines the number of blank spaces after the opening ``{`` and
before the closing brace ``}`` given that there is something within
the braces.
:param spacing_in_empty_braces:
Determines the number of blank spaces after the opening ``{`` and
before the closing brace ``}`` given empty content.
:param class_naming_camelCase:
Checks whether the classes name should be in camel-case or not.
:param spaces_before_and_after_colon:
Checks the number of spaces before and after colon.
:param spaces_before_colon:
Determines the number of blank spaces before colon when
``spaces_before_and_after_colon == True``.
:param spaces_after_colon:
Determines the number of space after colon when
``spaces_before_and_after_colon == True``.
:param enforce_newline_at_EOF:
Checks if the file ends with a single newline.
:param use_spaces:
Forbids tabs in indentation and applies two spaces for this
purpose.
:param indent_size:
Number of spaces per indentation level.
:param number_of_newlines_after_classes:
Determines the number of newlines that separate the class
definition and the rest of the code.
:param prohibit_embedding_javascript_snippet:
Prevents some JavaScript elements like ``eval`` to affect
CoffeeScript.
:param force_braces:
Prohibits implicit braces when declaring object literals.
Example: If ``force_braces = True`` then::
1:2, 3:4
is prohibited, whereas::
{1:2, 3:4}
is accepted.
:param allow_implicit_parentheses:
Allows implicit parentheses.
:param allow_interpolation_in_single_quotes:
Allows string interpolation in a single quoted string.
Example: If ``allow_interpolation_in_single_quotes = False`` then::
f = '#{bar}'
is prohibited, whereas::
f = "#{bar}"
is correct.
:param allow_stand_alone_at_sign:
Allows the use of stand alone ``@``.
Example: If ``allow_stand_alone_at_sign = False``::
@ notok
not(@).ok
@::
are prohibited, whereas::
@alright
@(fn)
@ok()
@[ok]
@ok()
are accepted.
:param allow_throwing_strings:
Allows throwing string literals or interpolation.
Example: If ``allow_throwing_strings = False``::
throw 'my error'
throw "#{1234}"
will not be permitted.
:param allow_trailing_semicolons:
Prohibits trailing semicolons when ``False`` since they are
not useful. The semicolon is meaningful only if there's another
instruction on the same line.
Example: If ``allow_trailing_semicolon = False``::
x = '1234'; console.log(x)
Here the semicolon is meaningful::
alert('end of line');
This semicolon is redundant.
:param allow_trailing_whitespaces:
Checks whether to allow trailing whitespacess in the code or not.
:param allow_unnecessary_double_quotes:
Allows enclosing strings in double quotes.
:param allow_bitwise_operators:
Determines if ``and``, ``or``, ``is`` and ``isnt`` should be used
instead of ``&&``, ``||``, ``==`` and ``!=``.
:param spaces_around_operators:
Enforces that operators have spaces around them.
:param space_after_comma:
Checks if there is a blank space after commas.
:param cyclomatic_complexity:
Maximum cyclomatic complexity of the file.
:param prevent_duplicate_keys:
Prevents defining duplicate keys in object literals and classes.
:param enforce_parentheses_on_non_empty_constructors:
Requires constructors with parameters to include parentheses.
Example::
class Foo
# Warn about missing parentheses here
a = new Foo
b = new bar.foo.Foo
# The parentheses make it clear no parameters are intended
c = new Foo()
d = new bar.foo.Foo()
e = new Foo 1, 2
f = new bar.foo.Foo 1, 2
:param consistent_line_endings_style:
The option to ``line_endings``, its value is either ``unix`` or
``windows``.
:param allow_this_statements:
Allows the use of ``this``. ``@`` should be used if ``False``.
:param allow_increment:
Allows the use of increment and decrement arithmetic operators.
:param allow_no_parameters:
Allows empty parameter lists in function definitions.
:param allow_empty_functions:
Allows declaring empty functions.
"""
coffee_configs = {'max_line_length':
{'value': max_line_length,
'level': 'error',
'limitComments':
max_line_length_affect_comments}}
coffee_configs['arrow_spacing'] = (
{'level': 'error' if space_before_and_after_arrow else 'ignore'})
if check_braces_spacing:
coffee_configs['braces_spacing'] = (
{'level': 'error',
'spaces': braces_spacing_width,
'empty_object_spaces': spacing_in_empty_braces})
if class_naming_camelCase:
coffee_configs['camel_case_classes'] = {'level': 'error'}
if spaces_before_and_after_colon:
coffee_configs['colon_assignment_spacing'] = (
{'level': 'error',
'spacing': {'left': spaces_before_colon,
'right': spaces_after_colon}})
coffee_configs['eol_last'] = (
{'level': 'error' if enforce_newline_at_EOF else 'ignore'})
coffee_configs['newlines_after_classes'] = (
{'value': number_of_newlines_after_classes,
'level': 'error'})
coffee_configs['no_backticks'] = (
{'level': 'error'
if prohibit_embedding_javascript_snippet else 'ignore'})
if force_braces:
coffee_configs['no_implicit_braces'] = (
{'level': 'error', 'strict': True})
if not allow_implicit_parentheses:
coffee_configs['no_implicit_parens'] = (
{'strict': True, 'level': 'error'})
coffee_configs['no_interpolation_in_single_quotes'] = (
{'level': 'error'
if not allow_interpolation_in_single_quotes else 'ignore'})
if not allow_stand_alone_at_sign:
coffee_configs['no_stand_alone_at'] = {'level': 'error'}
if use_spaces:
coffee_configs['no_tabs'] = {'level': 'error'}
coffee_configs['indentation'] = (
{'value': indent_size, 'level': 'error'})
coffee_configs['no_throwing_strings'] = (
{'level': 'error' if not allow_throwing_strings else 'ignore'})
coffee_configs['no_trailing_semicolons'] = (
{'level': 'error' if not allow_trailing_semicolons else 'ignore'})
if not allow_trailing_whitespaces:
coffee_configs['no_trailing_whitespace'] = (
{'level': 'error',
'allowed_in_comments': True,
'allowed_in_empty_lines': True})
if not allow_unnecessary_double_quotes:
coffee_configs['no_unnecessary_double_quotes'] = {'level': 'error'}
if not allow_bitwise_operators:
coffee_configs['prefer_english_operator'] = (
{'level': 'error', 'doubleNotLevel': 'ignore'})
if spaces_around_operators:
coffee_configs['space_operators'] = {'level': 'error'}
if space_after_comma:
coffee_configs['spacing_after_comma'] = {'level': 'warn'}
coffee_configs['cyclomatic_complexity'] = (
{'value': cyclomatic_complexity,
'level': ('error' if cyclomatic_complexity else 'ignore')})
coffee_configs['duplicate_key'] = (
{'level': 'error' if prevent_duplicate_keys else 'ignore'})
if enforce_parentheses_on_non_empty_constructors:
coffee_configs['non_empty_constructor_needs_parens'] = (
{'level': 'error'})
if consistent_line_endings_style:
coffee_configs['line_endings'] = (
{'level': 'error', 'value': consistent_line_endings_style})
if not allow_this_statements:
coffee_configs['no_this'] = {'level': 'error'}
if not allow_increment:
coffee_configs['no_plusplus'] = {'level': 'error'}
coffee_configs['no_empty_param_list'] = (
{'level': 'error' if not allow_no_parameters else 'ignore'})
coffee_configs['no_empty_functions'] = (
{'level': 'error' if not allow_empty_functions else 'ignore'})
return json.dumps(coffee_configs)
def process_output(self, output, filename, file):
output = json.loads(output)
assert len(output) == 1, (
'More than 1 file parsed, something went wrong')
for item in tuple(output.values())[0]:
yield Result.from_values(
origin='{} ({})'.format(self.name, item['rule']),
message=item['message'],
file=filename,
line=item.get('lineNumber', None),
end_line=item.get('lineNumberEnd', None),
severity=self.severity_map[item['level']],
additional_info=item.get('description',
item.get('context', '')))
| agpl-3.0 |
chengjunjian/vnpy | vn.strategy/strategydemo/demoApi.py | 88 | 35262 | # encoding: UTF-8
"""
该文件中包含的是交易平台的底层接口相关的部分,
主要对API进行了一定程度的简化封装,方便开发。
"""
import os
from vnctpmd import MdApi
from vnctptd import TdApi
from eventEngine import *
from ctp_data_type import defineDict
#----------------------------------------------------------------------
def print_dict(d):
"""打印API收到的字典,该函数主要用于开发时的debug"""
print '-'*60
l = d.keys()
l.sort()
for key in l:
print key, ':', d[key]
########################################################################
class DemoMdApi(MdApi):
"""
Demo中的行情API封装
封装后所有数据自动推送到事件驱动引擎中,由其负责推送到各个监听该事件的回调函数上
对用户暴露的主动函数包括:
登陆 login
订阅合约 subscribe
"""
#----------------------------------------------------------------------
def __init__(self, eventEngine):
"""
API对象的初始化函数
"""
super(DemoMdApi, self).__init__()
# 事件引擎,所有数据都推送到其中,再由事件引擎进行分发
self.__eventEngine = eventEngine
# 请求编号,由api负责管理
self.__reqid = 0
# 以下变量用于实现连接和重连后的自动登陆
self.__userid = ''
self.__password = ''
self.__brokerid = ''
# 以下集合用于重连后自动订阅之前已订阅的合约,使用集合为了防止重复
self.__setSubscribed = set()
# 初始化.con文件的保存目录为\mdconnection,注意这个目录必须已存在,否则会报错
self.createFtdcMdApi(os.getcwd() + '\\mdconnection\\')
#----------------------------------------------------------------------
def onFrontConnected(self):
"""服务器连接"""
event = Event(type_=EVENT_LOG)
event.dict_['log'] = u'行情服务器连接成功'
self.__eventEngine.put(event)
# 如果用户已经填入了用户名等等,则自动尝试连接
if self.__userid:
req = {}
req['UserID'] = self.__userid
req['Password'] = self.__password
req['BrokerID'] = self.__brokerid
self.__reqid = self.__reqid + 1
self.reqUserLogin(req, self.__reqid)
#----------------------------------------------------------------------
def onFrontDisconnected(self, n):
"""服务器断开"""
event = Event(type_=EVENT_LOG)
event.dict_['log'] = u'行情服务器连接断开'
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onHeartBeatWarning(self, n):
"""心跳报警"""
# 因为API的心跳报警比较常被触发,且与API工作关系不大,因此选择忽略
pass
#----------------------------------------------------------------------
def onRspError(self, error, n, last):
"""错误回报"""
event = Event(type_=EVENT_LOG)
log = u'行情错误回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onRspUserLogin(self, data, error, n, last):
"""登陆回报"""
event = Event(type_=EVENT_LOG)
if error['ErrorID'] == 0:
log = u'行情服务器登陆成功'
else:
log = u'登陆回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
## 重连后自动订阅之前已经订阅过的合约
#if self.__setSubscribed:
#for instrument in self.__setSubscribed:
#self.subscribe(instrument[0], instrument[1])
#----------------------------------------------------------------------
def onRspUserLogout(self, data, error, n, last):
"""登出回报"""
event = Event(type_=EVENT_LOG)
if error['ErrorID'] == 0:
log = u'行情服务器登出成功'
else:
log = u'登出回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onRspSubMarketData(self, data, error, n, last):
"""订阅合约回报"""
# 通常不在乎订阅错误,选择忽略
pass
#----------------------------------------------------------------------
def onRspUnSubMarketData(self, data, error, n, last):
"""退订合约回报"""
# 同上
pass
#----------------------------------------------------------------------
def onRtnDepthMarketData(self, data):
"""行情推送"""
# 行情推送收到后,同时触发常规行情事件,以及特定合约行情事件,用于满足不同类型的监听
# 常规行情事件
event1 = Event(type_=EVENT_MARKETDATA)
event1.dict_['data'] = data
self.__eventEngine.put(event1)
# 特定合约行情事件
event2 = Event(type_=(EVENT_MARKETDATA_CONTRACT+data['InstrumentID']))
event2.dict_['data'] = data
self.__eventEngine.put(event2)
#----------------------------------------------------------------------
def onRspSubForQuoteRsp(self, data, error, n, last):
"""订阅期权询价"""
pass
#----------------------------------------------------------------------
def onRspUnSubForQuoteRsp(self, data, error, n, last):
"""退订期权询价"""
pass
#----------------------------------------------------------------------
def onRtnForQuoteRsp(self, data):
"""期权询价推送"""
pass
#----------------------------------------------------------------------
def login(self, address, userid, password, brokerid):
"""连接服务器"""
self.__userid = userid
self.__password = password
self.__brokerid = brokerid
# 注册服务器地址
self.registerFront(address)
# 初始化连接,成功会调用onFrontConnected
self.init()
#----------------------------------------------------------------------
def subscribe(self, instrumentid, exchangeid):
"""订阅合约"""
self.subscribeMarketData(instrumentid)
instrument = (instrumentid, exchangeid)
self.__setSubscribed.add(instrument)
########################################################################
class DemoTdApi(TdApi):
"""
Demo中的交易API封装
主动函数包括:
login 登陆
getInstrument 查询合约信息
getAccount 查询账号资金
getInvestor 查询投资者
getPosition 查询持仓
sendOrder 发单
cancelOrder 撤单
"""
#----------------------------------------------------------------------
def __init__(self, eventEngine):
"""API对象的初始化函数"""
super(DemoTdApi, self).__init__()
# 事件引擎,所有数据都推送到其中,再由事件引擎进行分发
self.__eventEngine = eventEngine
# 请求编号,由api负责管理
self.__reqid = 0
# 报单编号,由api负责管理
self.__orderref = 0
# 以下变量用于实现连接和重连后的自动登陆
self.__userid = ''
self.__password = ''
self.__brokerid = ''
# 合约字典(保存合约查询数据)
self.__dictInstrument = {}
# 初始化.con文件的保存目录为\tdconnection
self.createFtdcTraderApi(os.getcwd() + '\\tdconnection\\')
#----------------------------------------------------------------------
def onFrontConnected(self):
"""服务器连接"""
event = Event(type_=EVENT_LOG)
event.dict_['log'] = u'交易服务器连接成功'
self.__eventEngine.put(event)
# 如果用户已经填入了用户名等等,则自动尝试连接
if self.__userid:
req = {}
req['UserID'] = self.__userid
req['Password'] = self.__password
req['BrokerID'] = self.__brokerid
self.__reqid = self.__reqid + 1
self.reqUserLogin(req, self.__reqid)
#----------------------------------------------------------------------
def onFrontDisconnected(self, n):
"""服务器断开"""
event = Event(type_=EVENT_LOG)
event.dict_['log'] = u'交易服务器连接断开'
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onHeartBeatWarning(self, n):
""""""
pass
#----------------------------------------------------------------------
def onRspAuthenticate(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspUserLogin(self, data, error, n, last):
"""登陆回报"""
event = Event(type_=EVENT_LOG)
if error['ErrorID'] == 0:
log = u'交易服务器登陆成功'
else:
log = u'登陆回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
self.getSettlement() # 登录完成后立即查询结算信息
#----------------------------------------------------------------------
def onRspUserLogout(self, data, error, n, last):
"""登出回报"""
event = Event(type_=EVENT_LOG)
if error['ErrorID'] == 0:
log = u'交易服务器登出成功'
else:
log = u'登出回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onRspUserPasswordUpdate(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspTradingAccountPasswordUpdate(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspOrderInsert(self, data, error, n, last):
"""发单错误(柜台)"""
event = Event(type_=EVENT_LOG)
log = u' 发单错误回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onRspParkedOrderInsert(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspParkedOrderAction(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspOrderAction(self, data, error, n, last):
"""撤单错误(柜台)"""
event = Event(type_=EVENT_LOG)
log = u'撤单错误回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onRspQueryMaxOrderVolume(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspSettlementInfoConfirm(self, data, error, n, last):
"""确认结算信息回报"""
event = Event(type_=EVENT_LOG)
log = u'结算信息确认完成'
event.dict_['log'] = log
self.__eventEngine.put(event)
event = Event(type_=EVENT_TDLOGIN)
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onRspRemoveParkedOrder(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspRemoveParkedOrderAction(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspExecOrderInsert(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspExecOrderAction(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspForQuoteInsert(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQuoteInsert(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQuoteAction(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryOrder(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryTrade(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryInvestorPosition(self, data, error, n, last):
"""持仓查询回报"""
if error['ErrorID'] == 0:
event = Event(type_=EVENT_POSITION)
event.dict_['data'] = data
self.__eventEngine.put(event)
else:
event = Event(type_=EVENT_LOG)
log = u'持仓查询回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onRspQryTradingAccount(self, data, error, n, last):
"""资金账户查询回报"""
if error['ErrorID'] == 0:
event = Event(type_=EVENT_ACCOUNT)
event.dict_['data'] = data
self.__eventEngine.put(event)
else:
event = Event(type_=EVENT_LOG)
log = u'账户查询回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onRspQryInvestor(self, data, error, n, last):
"""投资者查询回报"""
if error['ErrorID'] == 0:
event = Event(type_=EVENT_INVESTOR)
event.dict_['data'] = data
self.__eventEngine.put(event)
else:
event = Event(type_=EVENT_LOG)
log = u'合约投资者回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onRspQryTradingCode(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryInstrumentMarginRate(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryInstrumentCommissionRate(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryExchange(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryProduct(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryInstrument(self, data, error, n, last):
"""
合约查询回报
由于该回报的推送速度极快,因此不适合全部存入队列中处理,
选择先储存在一个本地字典中,全部收集完毕后再推送到队列中
(由于耗时过长目前使用其他进程读取)
"""
if error['ErrorID'] == 0:
event = Event(type_=EVENT_INSTRUMENT)
event.dict_['data'] = data
event.dict_['last'] = last
self.__eventEngine.put(event)
else:
event = Event(type_=EVENT_LOG)
log = u'合约投资者回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onRspQryDepthMarketData(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQrySettlementInfo(self, data, error, n, last):
"""查询结算信息回报"""
if last:
event = Event(type_=EVENT_LOG)
log = u'结算信息查询完成'
event.dict_['log'] = log
self.__eventEngine.put(event)
self.confirmSettlement() # 查询完成后立即确认结算信息
#----------------------------------------------------------------------
def onRspQryTransferBank(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryInvestorPositionDetail(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryNotice(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQrySettlementInfoConfirm(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryInvestorPositionCombineDetail(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryCFMMCTradingAccountKey(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryEWarrantOffset(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryInvestorProductGroupMargin(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryExchangeMarginRate(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryExchangeMarginRateAdjust(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryExchangeRate(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQrySecAgentACIDMap(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryOptionInstrTradeCost(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryOptionInstrCommRate(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryExecOrder(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryForQuote(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryQuote(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryTransferSerial(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryAccountregister(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspError(self, error, n, last):
"""错误回报"""
event = Event(type_=EVENT_LOG)
log = u'交易错误回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onRtnOrder(self, data):
"""报单回报"""
# 更新最大报单编号
newref = data['OrderRef']
self.__orderref = max(self.__orderref, int(newref))
# 常规报单事件
event1 = Event(type_=EVENT_ORDER)
event1.dict_['data'] = data
self.__eventEngine.put(event1)
# 特定合约行情事件
event2 = Event(type_=(EVENT_ORDER_ORDERREF+data['OrderRef']))
event2.dict_['data'] = data
self.__eventEngine.put(event2)
#----------------------------------------------------------------------
def onRtnTrade(self, data):
"""成交回报"""
# 常规成交事件
event1 = Event(type_=EVENT_TRADE)
event1.dict_['data'] = data
self.__eventEngine.put(event1)
# 特定合约成交事件
event2 = Event(type_=(EVENT_TRADE_CONTRACT+data['InstrumentID']))
event2.dict_['data'] = data
self.__eventEngine.put(event2)
#----------------------------------------------------------------------
def onErrRtnOrderInsert(self, data, error):
"""发单错误回报(交易所)"""
event = Event(type_=EVENT_LOG)
log = u'发单错误回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onErrRtnOrderAction(self, data, error):
"""撤单错误回报(交易所)"""
event = Event(type_=EVENT_LOG)
log = u'撤单错误回报,错误代码:' + unicode(error['ErrorID']) + u',' + u'错误信息:' + error['ErrorMsg'].decode('gbk')
event.dict_['log'] = log
self.__eventEngine.put(event)
#----------------------------------------------------------------------
def onRtnInstrumentStatus(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnTradingNotice(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnErrorConditionalOrder(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnExecOrder(self, data):
""""""
pass
#----------------------------------------------------------------------
def onErrRtnExecOrderInsert(self, data, error):
""""""
pass
#----------------------------------------------------------------------
def onErrRtnExecOrderAction(self, data, error):
""""""
pass
#----------------------------------------------------------------------
def onErrRtnForQuoteInsert(self, data, error):
""""""
pass
#----------------------------------------------------------------------
def onRtnQuote(self, data):
""""""
pass
#----------------------------------------------------------------------
def onErrRtnQuoteInsert(self, data, error):
""""""
pass
#----------------------------------------------------------------------
def onErrRtnQuoteAction(self, data, error):
""""""
pass
#----------------------------------------------------------------------
def onRtnForQuoteRsp(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRspQryContractBank(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryParkedOrder(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryParkedOrderAction(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryTradingNotice(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryBrokerTradingParams(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryBrokerTradingAlgos(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRtnFromBankToFutureByBank(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnFromFutureToBankByBank(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnRepealFromBankToFutureByBank(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnRepealFromFutureToBankByBank(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnFromBankToFutureByFuture(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnFromFutureToBankByFuture(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnRepealFromBankToFutureByFutureManual(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnRepealFromFutureToBankByFutureManual(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnQueryBankBalanceByFuture(self, data):
""""""
pass
#----------------------------------------------------------------------
def onErrRtnBankToFutureByFuture(self, data, error):
""""""
pass
#----------------------------------------------------------------------
def onErrRtnFutureToBankByFuture(self, data, error):
""""""
pass
#----------------------------------------------------------------------
def onErrRtnRepealBankToFutureByFutureManual(self, data, error):
""""""
pass
#----------------------------------------------------------------------
def onErrRtnRepealFutureToBankByFutureManual(self, data, error):
""""""
pass
#----------------------------------------------------------------------
def onErrRtnQueryBankBalanceByFuture(self, data, error):
""""""
pass
#----------------------------------------------------------------------
def onRtnRepealFromBankToFutureByFuture(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnRepealFromFutureToBankByFuture(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRspFromBankToFutureByFuture(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspFromFutureToBankByFuture(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQueryBankAccountMoneyByFuture(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRtnOpenAccountByBank(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnCancelAccountByBank(self, data):
""""""
pass
#----------------------------------------------------------------------
def onRtnChangeAccountByBank(self, data):
""""""
pass
#----------------------------------------------------------------------
def login(self, address, userid, password, brokerid):
"""连接服务器"""
self.__userid = userid
self.__password = password
self.__brokerid = brokerid
# 数据重传模式设为从本日开始
self.subscribePrivateTopic(0)
self.subscribePublicTopic(0)
# 注册服务器地址
self.registerFront(address)
# 初始化连接,成功会调用onFrontConnected
self.init()
#----------------------------------------------------------------------
def getInstrument(self):
"""查询合约"""
self.__reqid = self.__reqid + 1
self.reqQryInstrument({}, self.__reqid)
#----------------------------------------------------------------------
def getAccount(self):
"""查询账户"""
self.__reqid = self.__reqid + 1
self.reqQryTradingAccount({}, self.__reqid)
#----------------------------------------------------------------------
def getInvestor(self):
"""查询投资者"""
self.__reqid = self.__reqid + 1
self.reqQryInvestor({}, self.__reqid)
#----------------------------------------------------------------------
def getPosition(self):
"""查询持仓"""
self.__reqid = self.__reqid + 1
req = {}
req['BrokerID'] = self.__brokerid
req['InvestorID'] = self.__userid
self.reqQryInvestorPosition(req, self.__reqid)
#----------------------------------------------------------------------
def sendOrder(self, instrumentid, exchangeid, price, pricetype, volume, direction, offset):
"""发单"""
self.__reqid = self.__reqid + 1
req = {}
req['InstrumentID'] = instrumentid
req['OrderPriceType'] = pricetype
req['LimitPrice'] = price
req['VolumeTotalOriginal'] = volume
req['Direction'] = direction
req['CombOffsetFlag'] = offset
self.__orderref = self.__orderref + 1
req['OrderRef'] = str(self.__orderref)
req['InvestorID'] = self.__userid
req['UserID'] = self.__userid
req['BrokerID'] = self.__brokerid
req['CombHedgeFlag'] = defineDict['THOST_FTDC_HF_Speculation'] # 投机单
req['ContingentCondition'] = defineDict['THOST_FTDC_CC_Immediately'] # 立即发单
req['ForceCloseReason'] = defineDict['THOST_FTDC_FCC_NotForceClose'] # 非强平
req['IsAutoSuspend'] = 0 # 非自动挂起
req['TimeCondition'] = defineDict['THOST_FTDC_TC_GFD'] # 今日有效
req['VolumeCondition'] = defineDict['THOST_FTDC_VC_AV'] # 任意成交量
req['MinVolume'] = 1 # 最小成交量为1
self.reqOrderInsert(req, self.__reqid)
# 返回订单号,便于某些算法进行动态管理
return self.__orderref
#----------------------------------------------------------------------
def cancelOrder(self, instrumentid, exchangeid, orderref, frontid, sessionid):
"""撤单"""
self.__reqid = self.__reqid + 1
req = {}
req['InstrumentID'] = instrumentid
req['ExchangeID'] = exchangeid
req['OrderRef'] = orderref
req['FrontID'] = frontid
req['SessionID'] = sessionid
req['ActionFlag'] = defineDict['THOST_FTDC_AF_Delete']
req['BrokerID'] = self.__brokerid
req['InvestorID'] = self.__userid
self.reqOrderAction(req, self.__reqid)
#----------------------------------------------------------------------
def getSettlement(self):
"""查询结算信息"""
self.__reqid = self.__reqid + 1
req = {}
req['BrokerID'] = self.__brokerid
req['InvestorID'] = self.__userid
self.reqQrySettlementInfo(req, self.__reqid)
#----------------------------------------------------------------------
def confirmSettlement(self):
"""确认结算信息"""
self.__reqid = self.__reqid + 1
req = {}
req['BrokerID'] = self.__brokerid
req['InvestorID'] = self.__userid
self.reqSettlementInfoConfirm(req, self.__reqid) | mit |
michalliu/chromium-depot_tools | third_party/pylint/checkers/similar.py | 19 | 12641 | # pylint: disable=W0622
# Copyright (c) 2004-2006 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""a similarities / code duplication command line tool and pylint checker
"""
from __future__ import generators
import sys
from itertools import izip
from logilab.common.ureports import Table
from pylint.interfaces import IRawChecker
from pylint.checkers import BaseChecker, table_lines_from_stats
class Similar:
"""finds copy-pasted lines of code in a project"""
def __init__(self, min_lines=4, ignore_comments=False,
ignore_docstrings=False):
self.min_lines = min_lines
self.ignore_comments = ignore_comments
self.ignore_docstrings = ignore_docstrings
self.linesets = []
def append_stream(self, streamid, stream):
"""append a file to search for similarities"""
stream.seek(0) # XXX may be removed with astng > 0.23
self.linesets.append(LineSet(streamid,
stream.readlines(),
self.ignore_comments,
self.ignore_docstrings))
def run(self):
"""start looking for similarities and display results on stdout"""
self._display_sims(self._compute_sims())
def _compute_sims(self):
"""compute similarities in appended files"""
no_duplicates = {}
for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
duplicate = no_duplicates.setdefault(num, [])
for couples in duplicate:
if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
couples.add( (lineset1, idx1) )
couples.add( (lineset2, idx2) )
break
else:
duplicate.append( set([(lineset1, idx1), (lineset2, idx2)]) )
sims = []
for num, ensembles in no_duplicates.iteritems():
for couples in ensembles:
sims.append( (num, couples) )
sims.sort()
sims.reverse()
return sims
def _display_sims(self, sims):
"""display computed similarities on stdout"""
nb_lignes_dupliquees = 0
for num, couples in sims:
print
print num, "similar lines in", len(couples), "files"
couples = sorted(couples)
for lineset, idx in couples:
print "==%s:%s" % (lineset.name, idx)
# pylint: disable=W0631
for line in lineset._real_lines[idx:idx+num]:
print " ", line,
nb_lignes_dupliquees += num * (len(couples)-1)
nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
print "TOTAL lines=%s duplicates=%s percent=%.2f" \
% (nb_total_lignes, nb_lignes_dupliquees,
nb_lignes_dupliquees*100. / nb_total_lignes)
def _find_common(self, lineset1, lineset2):
"""find similarities in the two given linesets"""
lines1 = lineset1.enumerate_stripped
lines2 = lineset2.enumerate_stripped
find = lineset2.find
index1 = 0
min_lines = self.min_lines
while index1 < len(lineset1):
skip = 1
num = 0
for index2 in find( lineset1[index1] ):
non_blank = 0
for num, ((_, line1), (_, line2)) in enumerate(
izip(lines1(index1), lines2(index2))):
if line1 != line2:
if non_blank > min_lines:
yield num, lineset1, index1, lineset2, index2
skip = max(skip, num)
break
if line1:
non_blank += 1
else:
# we may have reach the end
num += 1
if non_blank > min_lines:
yield num, lineset1, index1, lineset2, index2
skip = max(skip, num)
index1 += skip
def _iter_sims(self):
"""iterate on similarities among all files, by making a cartesian
product
"""
for idx, lineset in enumerate(self.linesets[:-1]):
for lineset2 in self.linesets[idx+1:]:
for sim in self._find_common(lineset, lineset2):
yield sim
def stripped_lines(lines, ignore_comments, ignore_docstrings):
strippedlines = []
docstring = None
for line in lines:
line = line.strip()
if ignore_docstrings:
if not docstring and \
(line.startswith('"""') or line.startswith("'''")):
docstring = line[:3]
line = line[3:]
if docstring:
if line.endswith(docstring):
docstring = None
line = ''
if ignore_comments:
# XXX should use regex in checkers/format to avoid cutting
# at a "#" in a string
line = line.split('#', 1)[0].strip()
strippedlines.append(line)
return strippedlines
class LineSet:
"""Holds and indexes all the lines of a single source file"""
def __init__(self, name, lines, ignore_comments=False,
ignore_docstrings=False):
self.name = name
self._real_lines = lines
self._stripped_lines = stripped_lines(lines, ignore_comments,
ignore_docstrings)
self._index = self._mk_index()
def __str__(self):
return '<Lineset for %s>' % self.name
def __len__(self):
return len(self._real_lines)
def __getitem__(self, index):
return self._stripped_lines[index]
def __lt__(self, other):
return self.name < other.name
def __hash__(self):
return id(self)
def enumerate_stripped(self, start_at=0):
"""return an iterator on stripped lines, starting from a given index
if specified, else 0
"""
idx = start_at
if start_at:
lines = self._stripped_lines[start_at:]
else:
lines = self._stripped_lines
for line in lines:
#if line:
yield idx, line
idx += 1
def find(self, stripped_line):
"""return positions of the given stripped line in this set"""
return self._index.get(stripped_line, ())
def _mk_index(self):
"""create the index for this set"""
index = {}
for line_no, line in enumerate(self._stripped_lines):
if line:
index.setdefault(line, []).append( line_no )
return index
MSGS = {'R0801': ('Similar lines in %s files\n%s',
'Indicates that a set of similar lines has been detected \
among multiple file. This usually means that the code should \
be refactored to avoid this duplication.')}
def report_similarities(sect, stats, old_stats):
"""make a layout with some stats about duplication"""
lines = ['', 'now', 'previous', 'difference']
lines += table_lines_from_stats(stats, old_stats,
('nb_duplicated_lines',
'percent_duplicated_lines'))
sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))
# wrapper to get a pylint checker from the similar class
class SimilarChecker(BaseChecker, Similar):
"""checks for similarities and duplicated code. This computation may be
memory / CPU intensive, so you should disable it if you experiment some
problems.
"""
__implements__ = (IRawChecker,)
# configuration section name
name = 'similarities'
# messages
msgs = MSGS
# configuration options
# for available dict keys/values see the optik parser 'add_option' method
options = (('min-similarity-lines',
{'default' : 4, 'type' : "int", 'metavar' : '<int>',
'help' : 'Minimum lines number of a similarity.'}),
('ignore-comments',
{'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
'help': 'Ignore comments when computing similarities.'}
),
('ignore-docstrings',
{'default' : True, 'type' : 'yn', 'metavar' : '<y or n>',
'help': 'Ignore docstrings when computing similarities.'}
),
)
# reports
reports = ( ('R0801', 'Duplication', report_similarities), ) # XXX actually a Refactoring message
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
Similar.__init__(self, min_lines=4,
ignore_comments=True, ignore_docstrings=True)
self.stats = None
def set_option(self, optname, value, action=None, optdict=None):
"""method called to set an option (registered in the options list)
overridden to report options setting to Similar
"""
BaseChecker.set_option(self, optname, value, action, optdict)
if optname == 'min-similarity-lines':
self.min_lines = self.config.min_similarity_lines
elif optname == 'ignore-comments':
self.ignore_comments = self.config.ignore_comments
elif optname == 'ignore-docstrings':
self.ignore_docstrings = self.config.ignore_docstrings
def open(self):
"""init the checkers: reset linesets and statistics information"""
self.linesets = []
self.stats = self.linter.add_stats(nb_duplicated_lines=0,
percent_duplicated_lines=0)
def process_module(self, node):
"""process a module
the module's content is accessible via the stream object
stream must implement the readlines method
"""
self.append_stream(self.linter.current_name, node.file_stream)
def close(self):
"""compute and display similarities on closing (i.e. end of parsing)"""
total = sum([len(lineset) for lineset in self.linesets])
duplicated = 0
stats = self.stats
for num, couples in self._compute_sims():
msg = []
for lineset, idx in couples:
msg.append("==%s:%s" % (lineset.name, idx))
msg.sort()
# pylint: disable=W0631
for line in lineset._real_lines[idx:idx+num]:
msg.append(line.rstrip())
self.add_message('R0801', args=(len(couples), '\n'.join(msg)))
duplicated += num * (len(couples) - 1)
stats['nb_duplicated_lines'] = duplicated
stats['percent_duplicated_lines'] = total and duplicated * 100. / total
def register(linter):
"""required method to auto register this checker """
linter.register_checker(SimilarChecker(linter))
def usage(status=0):
"""display command line usage information"""
print "finds copy pasted blocks in a set of files"
print
print 'Usage: symilar [-d|--duplicates min_duplicated_lines] \
[-i|--ignore-comments] file1...'
sys.exit(status)
def run(argv=None):
"""standalone command line access point"""
if argv is None:
argv = sys.argv[1:]
from getopt import getopt
s_opts = 'hdi'
l_opts = ('help', 'duplicates=', 'ignore-comments')
min_lines = 4
ignore_comments = False
opts, args = getopt(argv, s_opts, l_opts)
for opt, val in opts:
if opt in ('-d', '--duplicates'):
min_lines = int(val)
elif opt in ('-h', '--help'):
usage()
elif opt in ('-i', '--ignore-comments'):
ignore_comments = True
if not args:
usage(1)
sim = Similar(min_lines, ignore_comments)
for filename in args:
sim.append_stream(filename, open(filename))
sim.run()
if __name__ == '__main__':
run()
| bsd-3-clause |
hexlism/xx_net | gae_proxy/server/lib/yaml/composer.py | 120 | 4226 |
__all__ = ['Composer', 'ComposerError']
from error import MarkedYAMLError
from events import *
from nodes import *
class ComposerError(MarkedYAMLError):
pass
class Composer(object):
def __init__(self):
self.anchors = {}
def check_node(self):
# Drop the STREAM-START event.
if self.check_event(StreamStartEvent):
self.get_event()
# If there are more documents available?
return not self.check_event(StreamEndEvent)
def get_node(self):
# Get the root node of the next document.
if not self.check_event(StreamEndEvent):
return self.compose_document()
def compose_document(self):
# Drop the DOCUMENT-START event.
self.get_event()
# Compose the root node.
node = self.compose_node(None, None)
# Drop the DOCUMENT-END event.
self.get_event()
self.anchors = {}
return node
def compose_node(self, parent, index):
if self.check_event(AliasEvent):
event = self.get_event()
anchor = event.anchor
if anchor not in self.anchors:
raise ComposerError(None, None, "found undefined alias %r"
% anchor.encode('utf-8'), event.start_mark)
return self.anchors[anchor]
event = self.peek_event()
anchor = event.anchor
if anchor is not None:
if anchor in self.anchors:
raise ComposerError("found duplicate anchor %r; first occurence"
% anchor.encode('utf-8'), self.anchors[anchor].start_mark,
"second occurence", event.start_mark)
self.descend_resolver(parent, index)
if self.check_event(ScalarEvent):
node = self.compose_scalar_node(anchor)
elif self.check_event(SequenceStartEvent):
node = self.compose_sequence_node(anchor)
elif self.check_event(MappingStartEvent):
node = self.compose_mapping_node(anchor)
self.ascend_resolver()
return node
def compose_scalar_node(self, anchor):
event = self.get_event()
tag = event.tag
if tag is None or tag == u'!':
tag = self.resolve(ScalarNode, event.value, event.implicit)
node = ScalarNode(tag, event.value,
event.start_mark, event.end_mark, style=event.style)
if anchor is not None:
self.anchors[anchor] = node
return node
def compose_sequence_node(self, anchor):
start_event = self.get_event()
tag = start_event.tag
if tag is None or tag == u'!':
tag = self.resolve(SequenceNode, None, start_event.implicit)
node = SequenceNode(tag, [],
start_event.start_mark, None,
flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
index = 0
while not self.check_event(SequenceEndEvent):
node.value.append(self.compose_node(node, index))
index += 1
end_event = self.get_event()
node.end_mark = end_event.end_mark
return node
def compose_mapping_node(self, anchor):
start_event = self.get_event()
tag = start_event.tag
if tag is None or tag == u'!':
tag = self.resolve(MappingNode, None, start_event.implicit)
node = MappingNode(tag, [],
start_event.start_mark, None,
flow_style=start_event.flow_style)
if anchor is not None:
self.anchors[anchor] = node
while not self.check_event(MappingEndEvent):
#key_event = self.peek_event()
item_key = self.compose_node(node, None)
#if item_key in node.value:
# raise ComposerError("while composing a mapping", start_event.start_mark,
# "found duplicate key", key_event.start_mark)
item_value = self.compose_node(node, item_key)
#node.value[item_key] = item_value
node.value.append((item_key, item_value))
end_event = self.get_event()
node.end_mark = end_event.end_mark
return node
| bsd-2-clause |
grandamp/certificate-transparency | python/ct/serialization/tls_message.py | 35 | 10820 | """TLS serialization."""
import math
from ct.proto import tls_options_pb2 as options
from google.protobuf import descriptor
class Error(Exception):
pass
class TLSDecodingError(Error):
"""Decoding failed."""
pass
class TLSEncodingError(Error):
"""Encoding failed."""
pass
class TLSReader(object):
"""Read serialized TLS messages into a protocol buffer."""
def __init__(self, serialized_buffer):
# It would be nice to use BytesIO but it has no efficient way of
# testing whether it's empty without advancing the position, so
# we have to keep track of the position manually.
self._buf = serialized_buffer
self._pos = 0
def _read_fixed_bytes(self, num_bytes):
if self._pos + num_bytes > len(self._buf):
raise TLSDecodingError("Buffer underrun: need %d bytes, have "
"%d bytes" % (num_bytes,
len(self._buf) - self._pos))
ret = self._buf[self._pos:self._pos + num_bytes]
self._pos += num_bytes
return ret
def finished(self):
return self._pos >= len(self._buf)
def verify_finished(self):
if not self.finished():
raise TLSDecodingError("Bytes remaining in the buffer")
def _read_uint(self, num_bytes):
int_bytes = bytearray(self._read_fixed_bytes(num_bytes))
ret = 0
for b in int_bytes:
ret <<= 8
ret += b
return ret
def _read_bounded_uint(self, min_value, max_value):
length_of_value = int(math.ceil(math.log(max_value + 1, 256)))
value = self._read_uint(length_of_value)
if value < min_value or value > max_value:
raise TLSDecodingError("Value %d is out of range ([%d, %d])" %
(value, min_value, max_value))
return value
def _read_uint32(self, opts):
return self._read_uint(opts.bytes_in_use or 4)
def _read_uint64(self, opts):
return self._read_uint(opts.bytes_in_use or 8)
def _read_enum(self, opts):
if not opts.max_value:
raise TypeError("Enum field has no maximum value")
return self._read_bounded_uint(0, opts.max_value)
def _read_var_bytes(self, min_length, max_length):
length = self._read_bounded_uint(min_length, max_length)
return self._read_fixed_bytes(length)
def _read_bytes(self, opts):
if opts.fixed_length:
return self._read_fixed_bytes(opts.fixed_length)
elif opts.max_length:
return self._read_var_bytes(opts.min_length, opts.max_length)
else:
raise TypeError("Byte field has no length limit")
def _get_read_method(self, field):
if field.type == descriptor.FieldDescriptor.TYPE_UINT32:
return self._read_uint32
elif field.type == descriptor.FieldDescriptor.TYPE_UINT64:
return self._read_uint64
elif field.type == descriptor.FieldDescriptor.TYPE_ENUM:
return self._read_enum
elif field.type == descriptor.FieldDescriptor.TYPE_BYTES:
return self._read_bytes
else:
raise TypeError("Field %s of type %d not supported" %
(field.name, field.type))
def _read_repeated(self, message, field, opts):
"""Read a repeated field."""
if not opts.max_total_length:
raise TypeError("Repeated field %s has no length limit" %
field.name)
# Recursive, naive.
reader = TLSReader(self._read_var_bytes(opts.min_total_length,
opts.max_total_length))
target = getattr(message, field.name)
if field.type == field.TYPE_MESSAGE:
while not reader.finished():
new_message = target.add()
reader.read(new_message)
else:
if field.type == field.TYPE_ENUM:
opts = field.enum_type.GetOptions().Extensions[
options.tls_enum_opts]
# |reader| is another member of this class.
# pylint: disable=protected-access
read_method = reader._get_read_method(field)
while not reader.finished():
target.append(read_method(opts))
def read(self, message):
"""Read from the buffer into the protocol buffer message."""
# TODO(ekasper): probably better not to modify the
# original message until we're guaranteed to succeed?
fields = message.DESCRIPTOR.fields_by_number
for i in sorted(fields):
field = fields[i]
opts = field.GetOptions().Extensions[options.tls_opts]
if opts.skip:
continue
if opts.select_field:
value = getattr(message, opts.select_field)
if value != opts.select_value:
continue
if field.label == field.LABEL_REPEATED:
self._read_repeated(message, field, opts)
elif field.type == field.TYPE_MESSAGE:
self.read(getattr(message, field.name))
else:
if field.type == field.TYPE_ENUM:
opts = field.enum_type.GetOptions().Extensions[
options.tls_enum_opts]
setattr(message, field.name,
self._get_read_method(field)(opts))
@classmethod
def decode(cls, buf, message):
reader = cls(buf)
reader.read(message)
reader.verify_finished()
class TLSWriter(object):
"""Serialize protocol buffers into TLS wire format."""
def __init__(self):
self._buf = bytearray()
def __len__(self):
"""Current length of the result."""
return len(self._buf)
def get_serialized_result(self):
"""Get the serialized contents."""
return str(self._buf)
def _write_uint(self, value, num_bytes):
int_bytes = bytearray()
for _ in range(num_bytes):
int_bytes.append(value & 0xff)
value >>= 8
if value:
raise TLSEncodingError("Value %d is too large to fit in %d bytes" %
(value, num_bytes))
int_bytes.reverse()
self._buf.extend(int_bytes)
def _write_bounded_uint(self, value, min_value, max_value):
if value < min_value or value > max_value:
raise TLSEncodingError("Value %d out of range ([%d, %d])" %
(value, min_value, max_value))
length_of_value = int(math.ceil(math.log(max_value + 1, 256)))
self._write_uint(value, length_of_value)
def _write_uint32(self, value, opts):
self._write_uint(value, opts.bytes_in_use or 4)
def _write_uint64(self, value, opts):
self._write_uint(value, opts.bytes_in_use or 8)
def _write_enum(self, value, opts):
if not opts.max_value:
raise TypeError("Enum field has no maximum value")
self._write_bounded_uint(value, 0, opts.max_value)
def _write_fixed_bytes(self, value, length):
if len(value) != length:
raise TLSEncodingError("Invalid value %s of length %d: required "
"length is %d" % (value, len(value), length))
self._buf.extend(value)
def _write_var_bytes(self, value, min_length, max_length):
self._write_bounded_uint(len(value), min_length, max_length)
self._buf.extend(value)
def _write_bytes(self, value, opts):
if opts.fixed_length:
return self._write_fixed_bytes(value, opts.fixed_length)
elif opts.max_length:
return self._write_var_bytes(value, opts.min_length,
opts.max_length)
else:
raise TypeError("Byte field has no length limit")
def _get_write_method(self, field):
if field.type == descriptor.FieldDescriptor.TYPE_UINT32:
return self._write_uint32
elif field.type == descriptor.FieldDescriptor.TYPE_UINT64:
return self._write_uint64
elif field.type == descriptor.FieldDescriptor.TYPE_ENUM:
return self._write_enum
elif field.type == descriptor.FieldDescriptor.TYPE_BYTES:
return self._write_bytes
else:
raise TypeError("Field %s of type %d not supported" %
(field.name, field.type))
def _write_repeated(self, message, field, opts):
"""Write a repeated field."""
writer = TLSWriter()
if field.type == field.TYPE_MESSAGE:
# Recursive, naive: could instead read ahead to determine
# the total length first.
for elem in getattr(message, field.name):
writer.write(elem)
else:
if field.type == field.TYPE_ENUM:
opts = field.enum_type.GetOptions().Extensions[
options.tls_enum_opts]
# pylint: disable=protected-access
write_method = writer._get_write_method(field)
for elem in getattr(message, field.name):
write_method(elem, opts)
length = len(writer)
self._write_bounded_uint(length, opts.min_total_length,
opts.max_total_length)
self._buf.extend(writer._buf) # pylint: disable=protected-access
def write(self, message):
"""Append a serialized message to the writer's buffer."""
fields = message.DESCRIPTOR.fields_by_number
for i in sorted(fields):
field = fields[i]
opts = field.GetOptions().Extensions[options.tls_opts]
if opts.skip:
continue
if opts.select_field:
value = getattr(message, opts.select_field)
if value != opts.select_value:
continue
if field.label == field.LABEL_REPEATED:
self._write_repeated(message, field, opts)
elif field.type == field.TYPE_MESSAGE:
self.write(getattr(message, field.name))
else:
if field.type == field.TYPE_ENUM:
opts = field.enum_type.GetOptions().Extensions[
options.tls_enum_opts]
self._get_write_method(field)(getattr(message, field.name),
opts)
@classmethod
def encode(cls, message):
writer = cls()
writer.write(message)
return writer.get_serialized_result()
def encode(message):
return TLSWriter.encode(message)
def decode(buf, message):
TLSReader.decode(buf, message)
| apache-2.0 |
JulienMcJay/eclock | windows/Python27/Lib/site-packages/pywin32-218-py2.7-win32.egg/pywin/Demos/ocx/ocxtest.py | 34 | 5473 | # OCX Tester for Pythonwin
#
# This file _is_ ready to run. All that is required is that the OCXs being tested
# are installed on your machine.
#
# The .py files behind the OCXs will be automatically generated and imported.
from pywin.mfc import dialog, window, activex
import win32ui, win32uiole
import win32con
import os, sys, win32api, glob
from win32com.client import gencache
def MakeDlgTemplate():
style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT
cs = win32con.WS_CHILD | win32con.WS_VISIBLE
dlg = [ ["OCX Demos", (0, 0, 350, 350), style, None, (8, "MS Sans Serif")], ]
s = win32con.WS_TABSTOP | cs
# dlg.append([131, None, 130, (5, 40, 110, 48),
# s | win32con.LBS_NOTIFY | win32con.LBS_SORT | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL | win32con.WS_BORDER])
# dlg.append(["{8E27C92B-1264-101C-8A2F-040224009C02}", None, 131, (5, 40, 110, 48),win32con.WS_TABSTOP])
dlg.append([128, "About", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON])
s = win32con.BS_PUSHBUTTON | s
dlg.append([128, "Close", win32con.IDCANCEL, (124, 22, 50, 14), s])
return dlg
####################################
#
# Calendar test code
#
def GetTestCalendarClass():
global calendarParentModule
win32ui.DoWaitCursor(1)
calendarParentModule = gencache.EnsureModule("{8E27C92E-1264-101C-8A2F-040224009C02}", 0, 7, 0)
win32ui.DoWaitCursor(0)
if calendarParentModule is None:
return None
class TestCalDialog(dialog.Dialog):
def OnInitDialog(self):
class MyCal(activex.Control, calendarParentModule.Calendar):
def OnAfterUpdate(self):
print "OnAfterUpdate"
def OnClick(self):
print "OnClick"
def OnDblClick(self):
print "OnDblClick"
def OnKeyDown(self, KeyCode, Shift):
print "OnKeyDown", KeyCode, Shift
def OnKeyPress(self, KeyAscii):
print "OnKeyPress", KeyAscii
def OnKeyUp(self, KeyCode, Shift):
print "OnKeyUp", KeyCode, Shift
def OnBeforeUpdate(self, Cancel):
print "OnBeforeUpdate", Cancel
def OnNewMonth(self):
print "OnNewMonth"
def OnNewYear(self):
print "OnNewYear"
rc = dialog.Dialog.OnInitDialog(self)
self.olectl = MyCal()
try:
self.olectl.CreateControl("OCX", win32con.WS_TABSTOP | win32con.WS_VISIBLE, (7,43,500,300), self._obj_, 131)
except win32ui.error:
self.MessageBox("The Calendar Control could not be created")
self.olectl = None
self.EndDialog(win32con.IDCANCEL)
return rc
def OnOK(self):
self.olectl.AboutBox()
return TestCalDialog
####################################
#
# Video Control
#
def GetTestVideoModule():
global videoControlModule, videoControlFileName
win32ui.DoWaitCursor(1)
videoControlModule = gencache.EnsureModule("{05589FA0-C356-11CE-BF01-00AA0055595A}", 0, 2, 0)
win32ui.DoWaitCursor(0)
if videoControlModule is None:
return None
fnames = glob.glob(os.path.join(win32api.GetWindowsDirectory(), "*.avi"))
if not fnames:
print "No AVI files available in system directory"
return None
videoControlFileName = fnames[0]
return videoControlModule
def GetTestVideoDialogClass():
if GetTestVideoModule() is None:
return None
class TestVideoDialog(dialog.Dialog):
def OnInitDialog(self):
rc = dialog.Dialog.OnInitDialog(self)
try:
self.olectl = activex.MakeControlInstance(videoControlModule.ActiveMovie)
self.olectl.CreateControl("", win32con.WS_TABSTOP | win32con.WS_VISIBLE, (7,43,500,300), self._obj_, 131)
except win32ui.error:
self.MessageBox("The Video Control could not be created")
self.olectl = None
self.EndDialog(win32con.IDCANCEL)
return
self.olectl.FileName = videoControlFileName
# self.olectl.Run()
return rc
def OnOK(self):
self.olectl.AboutBox()
return TestVideoDialog
###############
#
# An OCX in an MDI Frame
#
class OCXFrame(window.MDIChildWnd):
def __init__(self):
pass # Dont call base class doc/view version...
def Create(self, controlClass, title, rect = None, parent = None):
style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW
self._obj_ = win32ui.CreateMDIChild()
self._obj_.AttachObject(self)
self._obj_.CreateWindow(None, title, style, rect, parent)
rect = self.GetClientRect()
rect = (0,0,rect[2]-rect[0], rect[3]-rect[1])
self.ocx = controlClass()
self.ocx.CreateControl("", win32con.WS_VISIBLE | win32con.WS_CHILD, rect, self, 1000)
def MDITest():
calendarParentModule = gencache.EnsureModule("{8E27C92E-1264-101C-8A2F-040224009C02}", 0, 7, 0)
class MyCal(activex.Control, calendarParentModule.Calendar):
def OnAfterUpdate(self):
print "OnAfterUpdate"
def OnClick(self):
print "OnClick"
f = OCXFrame()
f.Create(MyCal, "Calendar Test")
def test1():
klass = GetTestCalendarClass()
if klass is None:
print "Can not test the MSAccess Calendar control - it does not appear to be installed"
return
d = klass(MakeDlgTemplate() )
d.DoModal()
def test2():
klass = GetTestVideoDialogClass()
if klass is None:
print "Can not test the Video OCX - it does not appear to be installed,"
print "or no AVI files can be found."
return
d = klass(MakeDlgTemplate() )
d.DoModal()
d = None
def test3():
d = TestCOMMDialog(MakeDlgTemplate() )
d.DoModal()
d = None
def testall():
test1()
test2()
def demo():
testall()
if __name__=='__main__':
import demoutils
if demoutils.NeedGoodGUI():
testall()
| gpl-2.0 |
resmo/ansible | lib/ansible/modules/cloud/cloudstack/cs_physical_network.py | 24 | 14589 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2017, Netservers Ltd. <support@netservers.co.uk>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_physical_network
short_description: Manages physical networks on Apache CloudStack based clouds.
description:
- Create, update and remove networks.
- Enabled and disabled Network Service Providers
- Enables Internal LoadBalancer and VPC/VirtualRouter elements as required
version_added: "2.8"
author:
- Netservers Ltd. (@netservers)
- Patryk Cichy (@PatTheSilent)
options:
name:
description:
- Name of the physical network.
required: true
aliases:
- physical_network
type: str
zone:
description:
- Name of the zone in which the network belongs.
- If not set, default zone is used.
type: str
broadcast_domain_range:
description:
- broadcast domain range for the physical network[Pod or Zone].
choices: [ POD, ZONE ]
type: str
domain:
description:
- Domain the network is owned by.
type: str
isolation_method:
description:
- Isolation method for the physical network.
choices: [ VLAN, GRE, L3 ]
type: str
network_speed:
description:
- The speed for the physical network.
choices: [1G, 10G]
type: str
tags:
description:
- A tag to identify this network.
- Physical networks support only one tag.
- To remove an existing tag pass an empty string.
aliases:
- tag
type: str
vlan:
description:
- The VLAN/VNI Ranges of the physical network.
type: str
nsps_enabled:
description:
- List of Network Service Providers to enable.
type: list
nsps_disabled:
description:
- List of Network Service Providers to disable.
type: list
state:
description:
- State of the physical network.
default: present
type: str
choices: [ present, absent, disabled, enabled ]
poll_async:
description:
- Poll async jobs until job has finished.
default: yes
type: bool
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: Ensure a network is present
cs_physical_network:
name: net01
zone: zone01
isolation_method: VLAN
broadcast_domain_range: ZONE
delegate_to: localhost
- name: Set a tag on a network
cs_physical_network:
name: net01
tag: overlay
delegate_to: localhost
- name: Remove tag on a network
cs_physical_network:
name: net01
tag: ""
delegate_to: localhost
- name: Ensure a network is enabled with specific nsps enabled
cs_physical_network:
name: net01
zone: zone01
isolation_method: VLAN
vlan: 100-200,300-400
broadcast_domain_range: ZONE
state: enabled
nsps_enabled:
- virtualrouter
- internallbvm
- vpcvirtualrouter
delegate_to: localhost
- name: Ensure a network is disabled
cs_physical_network:
name: net01
zone: zone01
state: disabled
delegate_to: localhost
- name: Ensure a network is enabled
cs_physical_network:
name: net01
zone: zone01
state: enabled
delegate_to: localhost
- name: Ensure a network is absent
cs_physical_network:
name: net01
zone: zone01
state: absent
delegate_to: localhost
'''
RETURN = '''
---
id:
description: UUID of the network.
returned: success
type: str
sample: 3f8f25cd-c498-443f-9058-438cfbcbff50
name:
description: Name of the network.
returned: success
type: str
sample: net01
state:
description: State of the network [Enabled/Disabled].
returned: success
type: str
sample: Enabled
broadcast_domain_range:
description: broadcastdomainrange of the network [POD / ZONE].
returned: success
type: str
sample: ZONE
isolation_method:
description: isolationmethod of the network [VLAN/GRE/L3].
returned: success
type: str
sample: VLAN
network_speed:
description: networkspeed of the network [1G/10G].
returned: success
type: str
sample: 1G
zone:
description: Name of zone the physical network is in.
returned: success
type: str
sample: ch-gva-2
domain:
description: Name of domain the network is in.
returned: success
type: str
sample: domain1
nsps:
description: list of enabled or disabled Network Service Providers
type: complex
returned: on enabling/disabling of Network Service Providers
contains:
enabled:
description: list of Network Service Providers that were enabled
returned: on Network Service Provider enabling
type: list
sample:
- virtualrouter
disabled:
description: list of Network Service Providers that were disabled
returned: on Network Service Provider disabling
type: list
sample:
- internallbvm
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together,
)
class AnsibleCloudStackPhysicalNetwork(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackPhysicalNetwork, self).__init__(module)
self.returns = {
'isolationmethods': 'isolation_method',
'broadcastdomainrange': 'broadcast_domain_range',
'networkspeed': 'network_speed',
'vlan': 'vlan',
'tags': 'tags',
}
self.nsps = []
self.vrouters = None
self.loadbalancers = None
def _get_common_args(self):
args = {
'name': self.module.params.get('name'),
'isolationmethods': self.module.params.get('isolation_method'),
'broadcastdomainrange': self.module.params.get('broadcast_domain_range'),
'networkspeed': self.module.params.get('network_speed'),
'tags': self.module.params.get('tags'),
'vlan': self.module.params.get('vlan'),
}
state = self.module.params.get('state')
if state in ['enabled', 'disabled']:
args['state'] = state.capitalize()
return args
def get_physical_network(self, key=None):
physical_network = self.module.params.get('name')
if self.physical_network:
return self._get_by_key(key, self.physical_network)
args = {
'zoneid': self.get_zone(key='id')
}
physical_networks = self.query_api('listPhysicalNetworks', **args)
if physical_networks:
for net in physical_networks['physicalnetwork']:
if physical_network.lower() in [net['name'].lower(), net['id']]:
self.physical_network = net
self.result['physical_network'] = net['name']
break
return self._get_by_key(key, self.physical_network)
def get_nsp(self, name=None):
if not self.nsps:
args = {
'physicalnetworkid': self.get_physical_network(key='id')
}
res = self.query_api('listNetworkServiceProviders', **args)
self.nsps = res['networkserviceprovider']
names = []
for nsp in self.nsps:
names.append(nsp['name'])
if nsp['name'].lower() == name.lower():
return nsp
self.module.fail_json(msg="Failed: '{0}' not in network service providers list '[{1}]'".format(name, names))
def update_nsp(self, name=None, state=None, service_list=None):
nsp = self.get_nsp(name)
if not service_list and nsp['state'] == state:
return nsp
args = {
'id': nsp['id'],
'servicelist': service_list,
'state': state
}
if not self.module.check_mode:
res = self.query_api('updateNetworkServiceProvider', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
nsp = self.poll_job(res, 'networkserviceprovider')
self.result['changed'] = True
return nsp
def get_vrouter_element(self, nsp_name='virtualrouter'):
nsp = self.get_nsp(nsp_name)
nspid = nsp['id']
if self.vrouters is None:
self.vrouters = dict()
res = self.query_api('listVirtualRouterElements', )
for vrouter in res['virtualrouterelement']:
self.vrouters[vrouter['nspid']] = vrouter
if nspid not in self.vrouters:
self.module.fail_json(msg="Failed: No VirtualRouterElement found for nsp '%s'" % nsp_name)
return self.vrouters[nspid]
def get_loadbalancer_element(self, nsp_name='internallbvm'):
nsp = self.get_nsp(nsp_name)
nspid = nsp['id']
if self.loadbalancers is None:
self.loadbalancers = dict()
res = self.query_api('listInternalLoadBalancerElements', )
for loadbalancer in res['internalloadbalancerelement']:
self.loadbalancers[loadbalancer['nspid']] = loadbalancer
if nspid not in self.loadbalancers:
self.module.fail_json(msg="Failed: No Loadbalancer found for nsp '%s'" % nsp_name)
return self.loadbalancers[nspid]
def set_vrouter_element_state(self, enabled, nsp_name='virtualrouter'):
vrouter = self.get_vrouter_element(nsp_name)
if vrouter['enabled'] == enabled:
return vrouter
args = {
'id': vrouter['id'],
'enabled': enabled
}
if not self.module.check_mode:
res = self.query_api('configureVirtualRouterElement', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
vrouter = self.poll_job(res, 'virtualrouterelement')
self.result['changed'] = True
return vrouter
def set_loadbalancer_element_state(self, enabled, nsp_name='internallbvm'):
loadbalancer = self.get_loadbalancer_element(nsp_name=nsp_name)
if loadbalancer['enabled'] == enabled:
return loadbalancer
args = {
'id': loadbalancer['id'],
'enabled': enabled
}
if not self.module.check_mode:
res = self.query_api('configureInternalLoadBalancerElement', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
loadbalancer = self.poll_job(res, 'internalloadbalancerelement')
self.result['changed'] = True
return loadbalancer
def present_network(self):
network = self.get_physical_network()
if network:
network = self._update_network()
else:
network = self._create_network()
return network
def _create_network(self):
self.result['changed'] = True
args = dict(zoneid=self.get_zone(key='id'))
args.update(self._get_common_args())
if self.get_domain(key='id'):
args['domainid'] = self.get_domain(key='id')
if not self.module.check_mode:
resource = self.query_api('createPhysicalNetwork', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
self.network = self.poll_job(resource, 'physicalnetwork')
return self.network
def _update_network(self):
network = self.get_physical_network()
args = dict(id=network['id'])
args.update(self._get_common_args())
if self.has_changed(args, network):
self.result['changed'] = True
if not self.module.check_mode:
resource = self.query_api('updatePhysicalNetwork', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
self.physical_network = self.poll_job(resource, 'physicalnetwork')
return self.physical_network
def absent_network(self):
physical_network = self.get_physical_network()
if physical_network:
self.result['changed'] = True
args = {
'id': physical_network['id'],
}
if not self.module.check_mode:
resource = self.query_api('deletePhysicalNetwork', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
self.poll_job(resource, 'success')
return physical_network
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name=dict(required=True, aliases=['physical_network']),
zone=dict(),
domain=dict(),
vlan=dict(),
nsps_disabled=dict(type='list'),
nsps_enabled=dict(type='list'),
network_speed=dict(choices=['1G', '10G']),
broadcast_domain_range=dict(choices=['POD', 'ZONE']),
isolation_method=dict(choices=['VLAN', 'GRE', 'L3']),
state=dict(choices=['present', 'enabled', 'disabled', 'absent'], default='present'),
tags=dict(aliases=['tag']),
poll_async=dict(type='bool', default=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
acs_network = AnsibleCloudStackPhysicalNetwork(module)
state = module.params.get('state')
nsps_disabled = module.params.get('nsps_disabled', [])
nsps_enabled = module.params.get('nsps_enabled', [])
if state in ['absent']:
network = acs_network.absent_network()
else:
network = acs_network.present_network()
if nsps_disabled is not None:
for name in nsps_disabled:
acs_network.update_nsp(name=name, state='Disabled')
if nsps_enabled is not None:
for nsp_name in nsps_enabled:
if nsp_name.lower() in ['virtualrouter', 'vpcvirtualrouter']:
acs_network.set_vrouter_element_state(enabled=True, nsp_name=nsp_name)
elif nsp_name.lower() == 'internallbvm':
acs_network.set_loadbalancer_element_state(enabled=True, nsp_name=nsp_name)
acs_network.update_nsp(name=nsp_name, state='Enabled')
result = acs_network.get_result(network)
if nsps_enabled:
result['nsps_enabled'] = nsps_enabled
if nsps_disabled:
result['nsps_disabled'] = nsps_disabled
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
jamesandariese/dd-agent | resources/processes.py | 25 | 3135 | # stdlib
from collections import namedtuple
import subprocess
# project
from resources import (
agg,
ResourcePlugin,
SnapshotDescriptor,
SnapshotField,
)
class Processes(ResourcePlugin):
RESOURCE_KEY = "processes"
FLUSH_INTERVAL = 1 # in minutes
def describe_snapshot(self):
return SnapshotDescriptor(
1,
SnapshotField("user", 'str', aggregator=agg.append, temporal_aggregator=agg.append),
SnapshotField("pct_cpu", 'float'),
SnapshotField("pct_mem", 'float'),
SnapshotField("vsz", 'int'),
SnapshotField("rss", 'int'),
SnapshotField("family", 'str', aggregator=None, temporal_aggregator=None,
group_on=True, temporal_group_on=True),
SnapshotField("ps_count", 'int'))
def _get_proc_list(self):
# Get output from ps
try:
process_exclude_args = self.config.get('exclude_process_args', False)
if process_exclude_args:
ps_arg = 'aux'
else:
ps_arg = 'auxww'
ps = subprocess.Popen(['ps', ps_arg], stdout=subprocess.PIPE, close_fds=True).communicate()[0]
except Exception, e:
self.log.exception('Cannot get process list')
return False
# Split out each process
processLines = ps.split('\n')
del processLines[0] # Removes the headers
processLines.pop() # Removes a trailing empty line
processes = []
for line in processLines:
line = line.split(None, 10)
processes.append(map(lambda s: s.strip(), line))
return processes
@staticmethod
def group_by_family(o):
return o[5]
@staticmethod
def filter_by_usage(o):
# keep everything over 1% (cpu or ram)
return o[0] > 1 or o[1] > 1
def _parse_proc_list(self, processes):
def _compute_family(command):
if command.startswith('['):
return 'kernel'
else:
return (command.split()[0]).split('/')[-1]
PSLine = namedtuple("PSLine", "user,pid,pct_cpu,pct_mem,vsz,rss,tty,stat,started,time,command")
self.start_snapshot()
for line in processes:
try:
psl = PSLine(*line)
self.add_to_snapshot([psl.user,
float(psl.pct_cpu),
float(psl.pct_mem),
int(psl.vsz),
int(psl.rss),
_compute_family(psl.command),
1])
except Exception:
pass
self.end_snapshot(group_by=self.group_by_family)
def flush_snapshots(self, snapshot_group):
self._flush_snapshots(snapshot_group=snapshot_group,
group_by=self.group_by_family,
filter_by=self.filter_by_usage)
def check(self):
self._parse_proc_list(self._get_proc_list())
| bsd-3-clause |
postlund/home-assistant | tests/components/minio/test_minio.py | 2 | 5241 | """Tests for Minio Hass related code."""
import asyncio
import json
from unittest.mock import MagicMock
from asynctest import call, patch
import pytest
from homeassistant.components.minio import (
CONF_ACCESS_KEY,
CONF_HOST,
CONF_LISTEN,
CONF_LISTEN_BUCKET,
CONF_PORT,
CONF_SECRET_KEY,
CONF_SECURE,
DOMAIN,
QueueListener,
)
from homeassistant.core import callback
from homeassistant.setup import async_setup_component
from tests.components.minio.common import TEST_EVENT
@pytest.fixture(name="minio_client")
def minio_client_fixture():
"""Patch Minio client."""
with patch("homeassistant.components.minio.minio_helper.Minio") as minio_mock:
minio_client_mock = minio_mock.return_value
yield minio_client_mock
@pytest.fixture(name="minio_client_event")
def minio_client_event_fixture():
"""Patch helper function for minio notification stream."""
with patch("homeassistant.components.minio.minio_helper.Minio") as minio_mock:
minio_client_mock = minio_mock.return_value
response_mock = MagicMock()
stream_mock = MagicMock()
stream_mock.__next__.side_effect = [
"",
"",
bytearray(json.dumps(TEST_EVENT), "utf-8"),
]
response_mock.stream.return_value = stream_mock
minio_client_mock._url_open.return_value = response_mock
yield minio_client_mock
async def test_minio_services(hass, caplog, minio_client):
"""Test Minio services."""
hass.config.whitelist_external_dirs = set("/test")
await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_HOST: "localhost",
CONF_PORT: "9000",
CONF_ACCESS_KEY: "abcdef",
CONF_SECRET_KEY: "0123456789",
CONF_SECURE: "true",
}
},
)
await hass.async_start()
await hass.async_block_till_done()
assert "Setup of domain minio took" in caplog.text
# Call services
await hass.services.async_call(
DOMAIN,
"put",
{"file_path": "/test/some_file", "key": "some_key", "bucket": "some_bucket"},
blocking=True,
)
assert minio_client.fput_object.call_args == call(
"some_bucket", "some_key", "/test/some_file"
)
minio_client.reset_mock()
await hass.services.async_call(
DOMAIN,
"get",
{"file_path": "/test/some_file", "key": "some_key", "bucket": "some_bucket"},
blocking=True,
)
assert minio_client.fget_object.call_args == call(
"some_bucket", "some_key", "/test/some_file"
)
minio_client.reset_mock()
await hass.services.async_call(
DOMAIN, "remove", {"key": "some_key", "bucket": "some_bucket"}, blocking=True
)
assert minio_client.remove_object.call_args == call("some_bucket", "some_key")
minio_client.reset_mock()
async def test_minio_listen(hass, caplog, minio_client_event):
"""Test minio listen on notifications."""
minio_client_event.presigned_get_object.return_value = "http://url"
events = []
@callback
def event_callback(event):
"""Handle event callbback."""
events.append(event)
hass.bus.async_listen("minio", event_callback)
await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
CONF_HOST: "localhost",
CONF_PORT: "9000",
CONF_ACCESS_KEY: "abcdef",
CONF_SECRET_KEY: "0123456789",
CONF_SECURE: "true",
CONF_LISTEN: [{CONF_LISTEN_BUCKET: "test"}],
}
},
)
await hass.async_start()
await hass.async_block_till_done()
assert "Setup of domain minio took" in caplog.text
while not events:
await asyncio.sleep(0)
assert 1 == len(events)
event = events[0]
assert DOMAIN == event.event_type
assert "s3:ObjectCreated:Put" == event.data["event_name"]
assert "5jJkTAo.jpg" == event.data["file_name"]
assert "test" == event.data["bucket"]
assert "5jJkTAo.jpg" == event.data["key"]
assert "http://url" == event.data["presigned_url"]
assert 0 == len(event.data["metadata"])
async def test_queue_listener():
"""Tests QueueListener firing events on Home Assistant event bus."""
hass = MagicMock()
queue_listener = QueueListener(hass)
queue_listener.start()
queue_entry = {
"event_name": "s3:ObjectCreated:Put",
"bucket": "some_bucket",
"key": "some_dir/some_file.jpg",
"presigned_url": "http://host/url?signature=secret",
"metadata": {},
}
queue_listener.queue.put(queue_entry)
queue_listener.stop()
call_domain, call_event = hass.bus.fire.call_args[0]
expected_event = {
"event_name": "s3:ObjectCreated:Put",
"file_name": "some_file.jpg",
"bucket": "some_bucket",
"key": "some_dir/some_file.jpg",
"presigned_url": "http://host/url?signature=secret",
"metadata": {},
}
assert DOMAIN == call_domain
assert json.dumps(expected_event, sort_keys=True) == json.dumps(
call_event, sort_keys=True
)
| apache-2.0 |
mtlchun/edx | lms/djangoapps/courseware/model_data.py | 8 | 16048 | """
Classes to provide the LMS runtime data storage to XBlocks
"""
import json
from collections import defaultdict
from itertools import chain
from .models import (
StudentModule,
XModuleUserStateSummaryField,
XModuleStudentPrefsField,
XModuleStudentInfoField
)
import logging
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.block_types import BlockTypeKeyV1
from opaque_keys.edx.asides import AsideUsageKeyV1
from django.db import DatabaseError
from xblock.runtime import KeyValueStore
from xblock.exceptions import KeyValueMultiSaveError, InvalidScopeError
from xblock.fields import Scope, UserScope
from xmodule.modulestore.django import modulestore
from xblock.core import XBlockAside
log = logging.getLogger(__name__)
class InvalidWriteError(Exception):
"""
Raised to indicate that writing to a particular key
in the KeyValueStore is disabled
"""
def chunks(items, chunk_size):
"""
Yields the values from items in chunks of size chunk_size
"""
items = list(items)
return (items[i:i + chunk_size] for i in xrange(0, len(items), chunk_size))
class FieldDataCache(object):
"""
A cache of django model objects needed to supply the data
for a module and its decendants
"""
def __init__(self, descriptors, course_id, user, select_for_update=False, asides=None):
'''
Find any courseware.models objects that are needed by any descriptor
in descriptors. Attempts to minimize the number of queries to the database.
Note: Only modules that have store_state = True or have shared
state will have a StudentModule.
Arguments
descriptors: A list of XModuleDescriptors.
course_id: The id of the current course
user: The user for which to cache data
select_for_update: True if rows should be locked until end of transaction
asides: The list of aside types to load, or None to prefetch no asides.
'''
self.cache = {}
self.descriptors = descriptors
self.select_for_update = select_for_update
if asides is None:
self.asides = []
else:
self.asides = asides
assert isinstance(course_id, CourseKey)
self.course_id = course_id
self.user = user
if user.is_authenticated():
for scope, fields in self._fields_to_cache().items():
for field_object in self._retrieve_fields(scope, fields):
self.cache[self._cache_key_from_field_object(scope, field_object)] = field_object
@classmethod
def cache_for_descriptor_descendents(cls, course_id, user, descriptor, depth=None,
descriptor_filter=lambda descriptor: True,
select_for_update=False, asides=None):
"""
course_id: the course in the context of which we want StudentModules.
user: the django user for whom to load modules.
descriptor: An XModuleDescriptor
depth is the number of levels of descendent modules to load StudentModules for, in addition to
the supplied descriptor. If depth is None, load all descendent StudentModules
descriptor_filter is a function that accepts a descriptor and return wether the StudentModule
should be cached
select_for_update: Flag indicating whether the rows should be locked until end of transaction
"""
def get_child_descriptors(descriptor, depth, descriptor_filter):
"""
Return a list of all child descriptors down to the specified depth
that match the descriptor filter. Includes `descriptor`
descriptor: The parent to search inside
depth: The number of levels to descend, or None for infinite depth
descriptor_filter(descriptor): A function that returns True
if descriptor should be included in the results
"""
if descriptor_filter(descriptor):
descriptors = [descriptor]
else:
descriptors = []
if depth is None or depth > 0:
new_depth = depth - 1 if depth is not None else depth
for child in descriptor.get_children() + descriptor.get_required_module_descriptors():
descriptors.extend(get_child_descriptors(child, new_depth, descriptor_filter))
return descriptors
with modulestore().bulk_operations(descriptor.location.course_key):
descriptors = get_child_descriptors(descriptor, depth, descriptor_filter)
return FieldDataCache(descriptors, course_id, user, select_for_update, asides=asides)
def _query(self, model_class, **kwargs):
"""
Queries model_class with **kwargs, optionally adding select_for_update if
self.select_for_update is set
"""
query = model_class.objects
if self.select_for_update:
query = query.select_for_update()
query = query.filter(**kwargs)
return query
def _chunked_query(self, model_class, chunk_field, items, chunk_size=500, **kwargs):
"""
Queries model_class with `chunk_field` set to chunks of size `chunk_size`,
and all other parameters from `**kwargs`
This works around a limitation in sqlite3 on the number of parameters
that can be put into a single query
"""
res = chain.from_iterable(
self._query(model_class, **dict([(chunk_field, chunk)] + kwargs.items()))
for chunk in chunks(items, chunk_size)
)
return res
@property
def _all_usage_ids(self):
"""
Return a set of all usage_ids for the descriptors that this FieldDataCache is caching
against, and well as all asides for those descriptors.
"""
usage_ids = set()
for descriptor in self.descriptors:
usage_ids.add(descriptor.scope_ids.usage_id)
for aside_type in self.asides:
usage_ids.add(AsideUsageKeyV1(descriptor.scope_ids.usage_id, aside_type))
return usage_ids
@property
def _all_block_types(self):
"""
Return a set of all block_types that are cached by this FieldDataCache.
"""
block_types = set()
for descriptor in self.descriptors:
block_types.add(BlockTypeKeyV1(descriptor.entry_point, descriptor.scope_ids.block_type))
for aside_type in self.asides:
block_types.add(BlockTypeKeyV1(XBlockAside.entry_point, aside_type))
return block_types
def _retrieve_fields(self, scope, fields):
"""
Queries the database for all of the fields in the specified scope
"""
if scope == Scope.user_state:
return self._chunked_query(
StudentModule,
'module_state_key__in',
self._all_usage_ids,
course_id=self.course_id,
student=self.user.pk,
)
elif scope == Scope.user_state_summary:
return self._chunked_query(
XModuleUserStateSummaryField,
'usage_id__in',
self._all_usage_ids,
field_name__in=set(field.name for field in fields),
)
elif scope == Scope.preferences:
return self._chunked_query(
XModuleStudentPrefsField,
'module_type__in',
self._all_block_types,
student=self.user.pk,
field_name__in=set(field.name for field in fields),
)
elif scope == Scope.user_info:
return self._query(
XModuleStudentInfoField,
student=self.user.pk,
field_name__in=set(field.name for field in fields),
)
else:
return []
def _fields_to_cache(self):
"""
Returns a map of scopes to fields in that scope that should be cached
"""
scope_map = defaultdict(set)
for descriptor in self.descriptors:
for field in descriptor.fields.values():
scope_map[field.scope].add(field)
return scope_map
def _cache_key_from_kvs_key(self, key):
"""
Return the key used in the FieldDataCache for the specified KeyValueStore key
"""
if key.scope == Scope.user_state:
return (key.scope, key.block_scope_id)
elif key.scope == Scope.user_state_summary:
return (key.scope, key.block_scope_id, key.field_name)
elif key.scope == Scope.preferences:
return (key.scope, BlockTypeKeyV1(key.block_family, key.block_scope_id), key.field_name)
elif key.scope == Scope.user_info:
return (key.scope, key.field_name)
def _cache_key_from_field_object(self, scope, field_object):
"""
Return the key used in the FieldDataCache for the specified scope and
field
"""
if scope == Scope.user_state:
return (scope, field_object.module_state_key.map_into_course(self.course_id))
elif scope == Scope.user_state_summary:
return (scope, field_object.usage_id.map_into_course(self.course_id), field_object.field_name)
elif scope == Scope.preferences:
return (scope, field_object.module_type, field_object.field_name)
elif scope == Scope.user_info:
return (scope, field_object.field_name)
def find(self, key):
'''
Look for a model data object using an DjangoKeyValueStore.Key object
key: An `DjangoKeyValueStore.Key` object selecting the object to find
returns the found object, or None if the object doesn't exist
'''
if key.scope.user == UserScope.ONE and not self.user.is_anonymous():
# If we're getting user data, we expect that the key matches the
# user we were constructed for.
assert key.user_id == self.user.id
return self.cache.get(self._cache_key_from_kvs_key(key))
def find_or_create(self, key):
'''
Find a model data object in this cache, or create it if it doesn't
exist
'''
field_object = self.find(key)
if field_object is not None:
return field_object
if key.scope == Scope.user_state:
field_object, __ = StudentModule.objects.get_or_create(
course_id=self.course_id,
student_id=key.user_id,
module_state_key=key.block_scope_id,
defaults={
'state': json.dumps({}),
'module_type': key.block_scope_id.block_type,
},
)
elif key.scope == Scope.user_state_summary:
field_object, __ = XModuleUserStateSummaryField.objects.get_or_create(
field_name=key.field_name,
usage_id=key.block_scope_id
)
elif key.scope == Scope.preferences:
field_object, __ = XModuleStudentPrefsField.objects.get_or_create(
field_name=key.field_name,
module_type=BlockTypeKeyV1(key.block_family, key.block_scope_id),
student_id=key.user_id,
)
elif key.scope == Scope.user_info:
field_object, __ = XModuleStudentInfoField.objects.get_or_create(
field_name=key.field_name,
student_id=key.user_id,
)
cache_key = self._cache_key_from_kvs_key(key)
self.cache[cache_key] = field_object
return field_object
class DjangoKeyValueStore(KeyValueStore):
"""
This KeyValueStore will read and write data in the following scopes to django models
Scope.user_state_summary
Scope.user_state
Scope.preferences
Scope.user_info
Access to any other scopes will raise an InvalidScopeError
Data for Scope.user_state is stored as StudentModule objects via the django orm.
Data for the other scopes is stored in individual objects that are named for the
scope involved and have the field name as a key
If the key isn't found in the expected table during a read or a delete, then a KeyError will be raised
"""
_allowed_scopes = (
Scope.user_state_summary,
Scope.user_state,
Scope.preferences,
Scope.user_info,
)
def __init__(self, field_data_cache):
self._field_data_cache = field_data_cache
def get(self, key):
if key.scope not in self._allowed_scopes:
raise InvalidScopeError(key)
field_object = self._field_data_cache.find(key)
if field_object is None:
raise KeyError(key.field_name)
if key.scope == Scope.user_state:
return json.loads(field_object.state)[key.field_name]
else:
return json.loads(field_object.value)
def set(self, key, value):
"""
Set a single value in the KeyValueStore
"""
self.set_many({key: value})
def set_many(self, kv_dict):
"""
Provide a bulk save mechanism.
`kv_dict`: A dictionary of dirty fields that maps
xblock.KvsFieldData._key : value
"""
saved_fields = []
# field_objects maps a field_object to a list of associated fields
field_objects = dict()
for field in kv_dict:
# Check field for validity
if field.scope not in self._allowed_scopes:
raise InvalidScopeError(field)
# If the field is valid and isn't already in the dictionary, add it.
field_object = self._field_data_cache.find_or_create(field)
if field_object not in field_objects.keys():
field_objects[field_object] = []
# Update the list of associated fields
field_objects[field_object].append(field)
# Special case when scope is for the user state, because this scope saves fields in a single row
if field.scope == Scope.user_state:
state = json.loads(field_object.state)
state[field.field_name] = kv_dict[field]
field_object.state = json.dumps(state)
else:
# The remaining scopes save fields on different rows, so
# we don't have to worry about conflicts
field_object.value = json.dumps(kv_dict[field])
for field_object in field_objects:
try:
# Save the field object that we made above
field_object.save()
# If save is successful on this scope, add the saved fields to
# the list of successful saves
saved_fields.extend([field.field_name for field in field_objects[field_object]])
except DatabaseError:
log.exception('Error saving fields %r', field_objects[field_object])
raise KeyValueMultiSaveError(saved_fields)
def delete(self, key):
if key.scope not in self._allowed_scopes:
raise InvalidScopeError(key)
field_object = self._field_data_cache.find(key)
if field_object is None:
raise KeyError(key.field_name)
if key.scope == Scope.user_state:
state = json.loads(field_object.state)
del state[key.field_name]
field_object.state = json.dumps(state)
field_object.save()
else:
field_object.delete()
def has(self, key):
if key.scope not in self._allowed_scopes:
raise InvalidScopeError(key)
field_object = self._field_data_cache.find(key)
if field_object is None:
return False
if key.scope == Scope.user_state:
return key.field_name in json.loads(field_object.state)
else:
return True
| agpl-3.0 |
denfromufa/mipt-course | pysandbox/math_expr_evaluation_test.py | 1 | 19448 | # Copyright (c) 2012 Timur Iskhodzhanov and MIPT students. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import math
import operator
import unittest
import re
class Token(object):
def to_test_string(self):
return str(self)
class Value(Token):
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
class Bracket(Token):
def __init__(self, is_opening):
self.is_opening = is_opening
def __str__(self):
return '(' if self.is_opening else ')'
class Operation(Token):
pass
class UnaryOperation(Operation):
def __init__(self, name, is_postfix, func):
"""Create UnaryOperation Token.
Args:
name: operation name (e.g. '+' or '-')
is_postfix: True if this is postfix operation, False if prefix
func: the function itself
(a callable object which evaluates this operation)
"""
self.name = name
self.is_postfix = is_postfix
self.func = func
def evaluate(self, arg):
return self.func(arg)
def to_test_string(self):
if self.name in ['+', '-']:
return 'u%s' % self.name
else:
return str(self)
def __str__(self):
return self.name
class BinaryOperation(Operation):
def __init__(self, name, priority, is_right_assoc, func):
"""Create BinaryOperation Token.
Args:
name: operation name
priority: operation priority
is_right_assoc: True if right associative, False otherwise
Left-associativity is like this: a-b-c = (a-b)-c.
Right-associativity is like this: a^b^c = a^(b^c).
func: the function itself
"""
self.name = name
self.priority = priority
self.is_right_assoc = is_right_assoc
self.func = func
def evaluate(self, arg1, arg2):
return self.func(arg1, arg2)
def __str__(self):
return self.name
class InvalidExpressionError(Exception):
pass
class Evaluator(object):
functions = {
'sqrt': math.sqrt,
'sin': math.sin,
'cos': math.cos,
'abs': abs,
}
# 4-tuples: (name, priority, right associativity, func)
binary_ops = {
'+': ('+', 4, False, operator.add),
'-': ('-', 4, False, operator.sub),
'*': ('*', 5, False, operator.mul),
'/': ('/', 5, False, operator.div),
'^': ('^', 6, True, operator.pow),
}
# 3-tuples: (name, is postfix, func)
unary_ops = {
'+': ('+', False, lambda x: x),
'-': ('-', False, operator.neg),
}
def __convert_indexerror_to_invalidexpressionerror(func):
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except IndexError:
raise InvalidExpressionError()
return wrapped
def __tokenize_to_str(self, expr_str):
"""Splits expr_str into tokens as strings.
It's a helper function for __tokenize.
Raises ValueError exception on format errors.
A token is one of the following:
* a continuous sequence of letters, may include underscores
* a continuous sequence of decimal digits, may include dots
* one of the following symbols: ()+-*/^
Args:
self: instance of the Evaluator class
expr_str: arithmetic expression as a string
Returns:
A list of string tokens from given expr_str
Raises:
InvalidExpressionError: invalid expr_str, which is impossible
to tokenize
"""
token_exprs = [
'[a-zA-Z_0-9.]+', # numbers, functions, variables
'[()+\\-*/^]', # operators and brackets
]
tokens_str = re.findall('|'.join(token_exprs), expr_str)
if not ''.join(tokens_str) == expr_str.replace(' ', ''):
raise InvalidExpressionError('expression contains unknown symbols')
return tokens_str
def __tokenize(self, expr_str):
"""Splits expr_str into tokens.
Args:
self: instance of the Evaluator class
expr_str: arithmetic expression as a string
Returns:
A list of tokens from given expr_str
Raises:
InvalidExpressionError: invalid expr_str, which is impossible
to tokenize or tokens themselves are invalid
(e.g. unknown function)
ValueError: invalid number format, e.g. 2.2.2
"""
tokens_str = self.__tokenize_to_str(expr_str)
tokens = []
for i, token_str in enumerate(tokens_str):
prev = tokens_str[i - 1] if i > 0 else None
is_unary = (prev is None or
prev == '(' or
prev in (self.binary_ops.keys() +
self.unary_ops.keys() +
self.functions.keys()))
token = None
if re.match('[0-9.]+', token_str):
token = Value(float(token_str))
elif token_str in self.functions:
token = UnaryOperation(
token_str,
False,
self.functions[token_str])
elif token_str in self.unary_ops and is_unary:
token = UnaryOperation(*self.unary_ops[token_str])
elif token_str in self.binary_ops:
token = BinaryOperation(*self.binary_ops[token_str])
elif token_str == '(':
token = Bracket(True)
elif token_str == ')':
token = Bracket(False)
else:
raise InvalidExpressionError('invalid token "%s"' % token_str)
tokens.append(token)
return tokens
@__convert_indexerror_to_invalidexpressionerror
def __to_postfix(self, tokens):
"""Translates a string of tokens from infix form to postfix.
Args:
tokens: list of tokens representing expression in infix form
Returns:
List of the same tokens, reordered to form a postfix expression.
Raises:
InvalidExpressionError: if the expression is invalid and it's
impossible to translate to postfix form.
"""
# helper function
def peek(stack):
return stack[-1] if stack else None
stack = []
res = []
for token in tokens:
if isinstance(token, Value):
# Got an operand - append it to the result.
res.append(token)
elif isinstance(token, UnaryOperation) and token.is_postfix:
# Got an unary postfix operation - append it to the result.
res.append(token)
elif isinstance(token, UnaryOperation) and not token.is_postfix:
# Got an unary prefix operation (or function) -
# push it onto the stack.
stack.append(token)
elif isinstance(token, BinaryOperation):
# Got a binary operation - pop off from stack and push it there.
# Popping off while priority is higher or equal
# for left-associative operations,
# or while strictly higher for right-associative ones.
# Unaries are always treated as higher-priority operations.
if token.is_right_assoc:
priority_cond = operator.gt
else:
priority_cond = operator.ge
while (isinstance(peek(stack), UnaryOperation) or
(isinstance(peek(stack), BinaryOperation) and
priority_cond(peek(stack).priority, token.priority))):
# pop off the element and append to the result
res.append(stack.pop())
# push the token onto the stack
stack.append(token)
elif isinstance(token, Bracket) and token.is_opening:
# Got an opening bracket - push it onto the stack.
stack.append(token)
elif isinstance(token, Bracket) and not token.is_opening:
# Got a closing bracket -
# pop operators off the stack and append to the result
# while the top of the stack isn't an opening bracket.
while (stack and
not (isinstance(peek(stack), Bracket) and
peek(stack).is_opening)):
res.append(stack.pop())
# then pop the opening bracket
stack.pop()
# If the top of the stack is an unary prefix op,
# pop it and append to the result.
if (isinstance(peek(stack), UnaryOperation) and
not peek(stack).is_postfix):
res.append(stack.pop())
while stack:
res.append(stack.pop())
return res
@__convert_indexerror_to_invalidexpressionerror
def __evaluate_postfix(self, tokens):
"""Evaluates the list of tokens as a postfix expression.
Args:
tokens: list of tokens representing expression in postfix form
Returns:
Evaluation result of the expression.
Raises:
InvalidExpressionError: if the postfix expression is invalid.
"""
stack = []
for token in tokens:
if isinstance(token, Value):
stack.append(token.value)
elif isinstance(token, UnaryOperation):
arg = stack.pop()
stack.append(token.evaluate(arg))
elif isinstance(token, BinaryOperation):
arg2 = stack.pop()
arg1 = stack.pop()
stack.append(token.evaluate(arg1, arg2))
else:
raise AssertionError(
"error: can't evaluate unknown token '%s'" %
token.to_test_string())
return stack[0]
def evaluate(self, expr_str):
"""Evaluates expression given as a string.
Supported operators: + (binary and unary), - (binary and unary), *, /, ^
Supported functions: sqrt, sin, cos, abs
Args:
expr_str: string representing the expression to evaluate
Returns:
Result of the expression evaluation as a float.
Raises:
InvalidExpressionError: if the expression is invalid
ValueError: if or given mathematical operations can't be performed
(e.g. division by zero) or invalid number format (e.g. 2.2.2)
"""
tokens = self.__tokenize(expr_str)
tokens_postfix = self.__to_postfix(tokens)
return self.__evaluate_postfix(tokens_postfix)
class EvaluatorTest(unittest.TestCase):
def setUp(self):
self.evaluator = Evaluator()
self.eval = self.evaluator.evaluate
def test_constants(self):
self.assertEqual(self.eval('0'), 0)
self.assertEqual(self.eval('1'), 1)
self.assertEqual(self.eval('10.32423'), 10.32423)
def test_unary(self):
self.assertEqual(self.eval('-1.23'), -1.23)
self.assertEqual(self.eval('+1.23'), 1.23)
def test_binary(self):
self.assertEqual(self.eval('0+1'), 0 + 1)
self.assertEqual(self.eval('11-7'), 11 - 7)
self.assertEqual(self.eval('11*7'), 11 * 7)
self.assertAlmostEqual(self.eval('11/7'), 11.0 / 7)
self.assertEqual(self.eval('25^3'), 25 ** 3)
def test_functions(self):
self.assertAlmostEqual(self.eval('sqrt(2)'), math.sqrt(2))
self.assertAlmostEqual(self.eval('sin(3.333)'), math.sin(3.333))
self.assertAlmostEqual(self.eval('cos(3.333)'), math.cos(3.333))
self.assertAlmostEqual(self.eval('abs(-3.333)'), 3.333)
def test_multiple(self):
self.assertEqual(self.eval('7*45+10'), 7 * 45 + 10)
self.assertAlmostEqual(self.eval('113/41-5*11'), 113.0 / 41 - 5 * 11)
def test_brackets(self):
self.assertEqual(self.eval('7*(45+10)'), 7 * (45 + 10))
self.assertAlmostEqual(
self.eval('(13/(-11+145))^1.3'),
(13.0 / (-11 + 145)) ** 1.3)
self.assertAlmostEqual(self.eval('2.44 ^ (-1.3)'), 2.44 ** (-1.3))
def test_space_ignorance(self):
self.assertAlmostEqual(
self.eval(' - 11 +sqrt( sin( ( 13 /(- 11 + 145)) ^1.3))'),
-11 + math.sqrt(math.sin((13.0 / (-11 + 145)) ** 1.3)))
def test_errors(self):
self.assertRaises(InvalidExpressionError, self.eval, '*')
self.assertRaises(InvalidExpressionError, self.eval, '1/')
self.assertRaises(InvalidExpressionError, self.eval, ')')
self.assertRaises(InvalidExpressionError, self.eval, '/(2-3)')
self.assertRaisesRegexp(
ValueError,
'invalid literal for float()',
self.eval,
'2.2.2')
self.assertRaisesRegexp(
ValueError,
'invalid literal for float()',
self.eval,
'5x')
self.assertRaisesRegexp(
InvalidExpressionError,
'invalid token',
self.eval,
'foobar 5 + 3')
self.assertRaisesRegexp(
InvalidExpressionError,
'invalid token',
self.eval,
'sinx')
self.assertRaisesRegexp(
InvalidExpressionError,
'invalid token',
self.eval,
'sin41')
self.assertRaisesRegexp(
InvalidExpressionError,
'invalid token',
self.eval,
'foobar 5 + 3')
self.assertRaisesRegexp(
ValueError,
'negative number cannot be raised to a fractional power',
self.eval, '(-2.44) ^ (-1.3)')
self.assertRaisesRegexp(
ValueError,
'math domain error',
self.eval, 'sqrt(-1)')
def test_postfix(self):
def to_postfix(expr_str):
r = self.evaluator._Evaluator__to_postfix(
self.evaluator._Evaluator__tokenize(expr_str))
return map(lambda t: re.sub('.0$', '', t.to_test_string()), r)
self.assertEqual(to_postfix('4'), ['4'])
self.assertEqual(to_postfix('--4'), ['4', 'u-', 'u-'])
self.assertEqual(to_postfix('+-4'), ['4', 'u-', 'u+'])
self.assertEqual(to_postfix('-+4'), ['4', 'u+', 'u-'])
self.assertEqual(to_postfix('-4'), ['4', 'u-'])
self.assertEqual(to_postfix('+4'), ['4', 'u+'])
self.assertEqual(
to_postfix('+(5+8)-4'),
['5', '8', '+', 'u+', '4', '-'])
self.assertEqual(
to_postfix('-(+1-2)+4'),
['1', 'u+', '2', '-', 'u-', '4', '+'])
self.assertEqual(
to_postfix('3+4*5/6'),
['3', '4', '5', '*', '6', '/', '+'])
self.assertEqual(to_postfix(
'(300+23)*(43-21)/(84+7)'),
['300', '23', '+', '43', '21', '-', '*', '84', '7', '+', '/'])
self.assertEqual(to_postfix(
'(4+8)*(6-5)/((3-2)*(2+2))'),
['4', '8', '+', '6', '5', '-', '*', '3', '2', '-', '2', '2', '+',
'*', '/'])
self.assertEqual(to_postfix(
'3+4*2/(1-5)^5^10'),
['3', '4', '2', '*', '1', '5', '-', '5', '10', '^', '^', '/', '+'])
self.assertEqual(to_postfix('-5-4'), ['5', 'u-', '4', '-'])
self.assertEqual(to_postfix('-5+4'), ['5', 'u-', '4', '+'])
self.assertEqual(to_postfix('1^(2^3)'), ['1', '2', '3', '^', '^'])
self.assertEqual(to_postfix('(1^2)^3'), ['1', '2', '^', '3', '^'])
self.assertEqual(to_postfix('1^2^3'), ['1', '2', '3', '^', '^'])
self.assertEqual(to_postfix('sin 2'), ['2', 'sin'])
self.assertEqual(to_postfix('sin(2)'), ['2', 'sin'])
self.assertEqual(
to_postfix('sin(45)+cos 15'),
['45', 'sin', '15', 'cos', '+'])
self.assertEqual(
to_postfix('2+sin 30'),
['2', '30', 'sin', '+'])
self.assertEqual(
to_postfix('2+sin 30'),
['2', '30', 'sin', '+'])
self.assertEqual(
to_postfix('sin 30 + 2'),
['30', 'sin', '2', '+'])
self.assertEqual(
to_postfix('sin(30+2)'),
['30', '2', '+', 'sin'])
self.assertEqual(to_postfix('abs -4'), ['4', 'u-', 'abs'])
self.assertEqual(
to_postfix('sin 10 + cos 20 + abs -5'),
['10', 'sin', '20', 'cos', '+', '5', 'u-', 'abs', '+'])
self.assertEqual(
to_postfix('sin(25)+cos(15+sin(45))+3'),
['25', 'sin', '15', '45', 'sin', '+', 'cos', '+', '3', '+'])
def test_tokenize(self):
def tokenize(expr_str):
r = self.evaluator._Evaluator__tokenize(expr_str)
return map(lambda t: re.sub('.0$', '', str(t)), r)
tokenize_to_str = self.evaluator._Evaluator__tokenize_to_str
for tok_func in [tokenize_to_str, tokenize]:
self.assertEqual(tok_func('0'), ['0'])
self.assertEqual(tok_func('0'), ['0'])
self.assertEqual(tok_func('10.32423'), ['10.32423'])
self.assertEqual(tok_func('-11'), ['-', '11'])
self.assertEqual(tok_func('+11'), ['+', '11'])
self.assertEqual(
tok_func('abs(-3.333)'),
['abs', '(', '-', '3.333', ')'])
self.assertEqual(
tok_func('113/41-5*11'),
['113', '/', '41', '-', '5', '*', '11'])
self.assertEqual(
tok_func(' - 11 +sqrt( sin( ( 13 /(- 11 + 145)) ^1.3))'),
['-', '11', '+', 'sqrt', '(', 'sin', '(', '(', '13', '/', '(',
'-', '11', '+', '145', ')', ')', '^', '1.3', ')', ')'])
self.assertRaisesRegexp(
InvalidExpressionError,
'unknown symbols',
tok_func, '!";%:?=#@$&')
self.assertRaisesRegexp(
InvalidExpressionError,
'unknown symbols',
tok_func, '2 " 2')
# these tests are for tokenize function only
self.assertRaisesRegexp(
ValueError,
'invalid literal for float()',
tokenize,
'2.2.2')
self.assertRaisesRegexp(
ValueError,
'invalid literal for float()',
tokenize,
'5x')
self.assertRaisesRegexp(
InvalidExpressionError,
'invalid token',
tokenize,
'foobar 5 + 3')
self.assertRaisesRegexp(
InvalidExpressionError,
'invalid token',
tokenize,
'sinx')
self.assertRaisesRegexp(
InvalidExpressionError,
'invalid token',
tokenize,
'sin41')
self.assertRaisesRegexp(
InvalidExpressionError,
'invalid token',
tokenize,
'foobar 5 + 3')
if __name__ == '__main__':
unittest.main(testRunner=unittest.TextTestRunner(verbosity=2))
| bsd-3-clause |
mcsosa121/cafa | cafaenv/lib/python2.7/site-packages/django/contrib/auth/checks.py | 374 | 2098 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import apps
from django.conf import settings
from django.core import checks
def check_user_model(**kwargs):
errors = []
cls = apps.get_model(settings.AUTH_USER_MODEL)
# Check that REQUIRED_FIELDS is a list
if not isinstance(cls.REQUIRED_FIELDS, (list, tuple)):
errors.append(
checks.Error(
"'REQUIRED_FIELDS' must be a list or tuple.",
hint=None,
obj=cls,
id='auth.E001',
)
)
# Check that the USERNAME FIELD isn't included in REQUIRED_FIELDS.
if cls.USERNAME_FIELD in cls.REQUIRED_FIELDS:
errors.append(
checks.Error(
("The field named as the 'USERNAME_FIELD' "
"for a custom user model must not be included in 'REQUIRED_FIELDS'."),
hint=None,
obj=cls,
id='auth.E002',
)
)
# Check that the username field is unique
if not cls._meta.get_field(cls.USERNAME_FIELD).unique:
if (settings.AUTHENTICATION_BACKENDS ==
['django.contrib.auth.backends.ModelBackend']):
errors.append(
checks.Error(
"'%s.%s' must be unique because it is named as the 'USERNAME_FIELD'." % (
cls._meta.object_name, cls.USERNAME_FIELD
),
hint=None,
obj=cls,
id='auth.E003',
)
)
else:
errors.append(
checks.Warning(
"'%s.%s' is named as the 'USERNAME_FIELD', but it is not unique." % (
cls._meta.object_name, cls.USERNAME_FIELD
),
hint=('Ensure that your authentication backend(s) can handle '
'non-unique usernames.'),
obj=cls,
id='auth.W004',
)
)
return errors
| mit |
oVirt/ovirt-host-deploy | src/plugins/ovirt-host-deploy/vdsm/software.py | 1 | 2249 | #
# ovirt-host-deploy -- ovirt host deployer
# Copyright (C) 2012-2013 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
"""vdsm software prerequisites plugin."""
import gettext
import platform
from distutils.version import LooseVersion
from otopi import plugin
from otopi import util
def _(m):
return gettext.dgettext(message=m, domain='ovirt-host-deploy')
@util.export
class Plugin(plugin.PluginBase):
"""Software prerequisites plugin."""
_SUPPORTED = [
{
'distro': ('redhat', 'centos'),
'version': '7.5',
},
{
'distro': ('fedora', ),
'version': '24',
},
]
def __init__(self, context):
super(Plugin, self).__init__(context=context)
@plugin.event(
stage=plugin.Stages.STAGE_VALIDATION,
)
def _validation(self):
dist, ver = platform.linux_distribution(full_distribution_name=0)[:2]
supported = False
for entry in self._SUPPORTED:
if (
dist in entry['distro'] and
LooseVersion(ver) >= LooseVersion(entry['version'])
):
supported = True
break
if not supported:
raise RuntimeError(
_(
'Distribution {distribution} version {version} '
'is not supported'
).format(
distribution=dist,
version=ver,
)
)
# vim: expandtab tabstop=4 shiftwidth=4
| lgpl-2.1 |
vladimir-ipatov/ganeti | lib/utils/x509.py | 6 | 12570 | #
#
# Copyright (C) 2006, 2007, 2010, 2011, 2012 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Utility functions for X509.
"""
import time
import OpenSSL
import re
import datetime
import calendar
import errno
import logging
from ganeti import errors
from ganeti import constants
from ganeti import pathutils
from ganeti.utils import text as utils_text
from ganeti.utils import io as utils_io
from ganeti.utils import hash as utils_hash
HEX_CHAR_RE = r"[a-zA-Z0-9]"
VALID_X509_SIGNATURE_SALT = re.compile("^%s+$" % HEX_CHAR_RE, re.S)
X509_SIGNATURE = re.compile(r"^%s:\s*(?P<salt>%s+)/(?P<sign>%s+)$" %
(re.escape(constants.X509_CERT_SIGNATURE_HEADER),
HEX_CHAR_RE, HEX_CHAR_RE),
re.S | re.I)
# Certificate verification results
(CERT_WARNING,
CERT_ERROR) = range(1, 3)
#: ASN1 time regexp
_ASN1_TIME_REGEX = re.compile(r"^(\d+)([-+]\d\d)(\d\d)$")
def _ParseAsn1Generalizedtime(value):
"""Parses an ASN1 GENERALIZEDTIME timestamp as used by pyOpenSSL.
@type value: string
@param value: ASN1 GENERALIZEDTIME timestamp
@return: Seconds since the Epoch (1970-01-01 00:00:00 UTC)
"""
m = _ASN1_TIME_REGEX.match(value)
if m:
# We have an offset
asn1time = m.group(1)
hours = int(m.group(2))
minutes = int(m.group(3))
utcoffset = (60 * hours) + minutes
else:
if not value.endswith("Z"):
raise ValueError("Missing timezone")
asn1time = value[:-1]
utcoffset = 0
parsed = time.strptime(asn1time, "%Y%m%d%H%M%S")
tt = datetime.datetime(*(parsed[:7])) - datetime.timedelta(minutes=utcoffset)
return calendar.timegm(tt.utctimetuple())
def GetX509CertValidity(cert):
"""Returns the validity period of the certificate.
@type cert: OpenSSL.crypto.X509
@param cert: X509 certificate object
"""
# The get_notBefore and get_notAfter functions are only supported in
# pyOpenSSL 0.7 and above.
try:
get_notbefore_fn = cert.get_notBefore
except AttributeError:
not_before = None
else:
not_before_asn1 = get_notbefore_fn()
if not_before_asn1 is None:
not_before = None
else:
not_before = _ParseAsn1Generalizedtime(not_before_asn1)
try:
get_notafter_fn = cert.get_notAfter
except AttributeError:
not_after = None
else:
not_after_asn1 = get_notafter_fn()
if not_after_asn1 is None:
not_after = None
else:
not_after = _ParseAsn1Generalizedtime(not_after_asn1)
return (not_before, not_after)
def _VerifyCertificateInner(expired, not_before, not_after, now,
warn_days, error_days):
"""Verifies certificate validity.
@type expired: bool
@param expired: Whether pyOpenSSL considers the certificate as expired
@type not_before: number or None
@param not_before: Unix timestamp before which certificate is not valid
@type not_after: number or None
@param not_after: Unix timestamp after which certificate is invalid
@type now: number
@param now: Current time as Unix timestamp
@type warn_days: number or None
@param warn_days: How many days before expiration a warning should be reported
@type error_days: number or None
@param error_days: How many days before expiration an error should be reported
"""
if expired:
msg = "Certificate is expired"
if not_before is not None and not_after is not None:
msg += (" (valid from %s to %s)" %
(utils_text.FormatTime(not_before),
utils_text.FormatTime(not_after)))
elif not_before is not None:
msg += " (valid from %s)" % utils_text.FormatTime(not_before)
elif not_after is not None:
msg += " (valid until %s)" % utils_text.FormatTime(not_after)
return (CERT_ERROR, msg)
elif not_before is not None and not_before > now:
return (CERT_WARNING,
"Certificate not yet valid (valid from %s)" %
utils_text.FormatTime(not_before))
elif not_after is not None:
remaining_days = int((not_after - now) / (24 * 3600))
msg = "Certificate expires in about %d days" % remaining_days
if error_days is not None and remaining_days <= error_days:
return (CERT_ERROR, msg)
if warn_days is not None and remaining_days <= warn_days:
return (CERT_WARNING, msg)
return (None, None)
def VerifyX509Certificate(cert, warn_days, error_days):
"""Verifies a certificate for LUClusterVerify.
@type cert: OpenSSL.crypto.X509
@param cert: X509 certificate object
@type warn_days: number or None
@param warn_days: How many days before expiration a warning should be reported
@type error_days: number or None
@param error_days: How many days before expiration an error should be reported
"""
# Depending on the pyOpenSSL version, this can just return (None, None)
(not_before, not_after) = GetX509CertValidity(cert)
now = time.time() + constants.NODE_MAX_CLOCK_SKEW
return _VerifyCertificateInner(cert.has_expired(), not_before, not_after,
now, warn_days, error_days)
def SignX509Certificate(cert, key, salt):
"""Sign a X509 certificate.
An RFC822-like signature header is added in front of the certificate.
@type cert: OpenSSL.crypto.X509
@param cert: X509 certificate object
@type key: string
@param key: Key for HMAC
@type salt: string
@param salt: Salt for HMAC
@rtype: string
@return: Serialized and signed certificate in PEM format
"""
if not VALID_X509_SIGNATURE_SALT.match(salt):
raise errors.GenericError("Invalid salt: %r" % salt)
# Dumping as PEM here ensures the certificate is in a sane format
cert_pem = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert)
return ("%s: %s/%s\n\n%s" %
(constants.X509_CERT_SIGNATURE_HEADER, salt,
utils_hash.Sha1Hmac(key, cert_pem, salt=salt),
cert_pem))
def _ExtractX509CertificateSignature(cert_pem):
"""Helper function to extract signature from X509 certificate.
"""
# Extract signature from original PEM data
for line in cert_pem.splitlines():
if line.startswith("---"):
break
m = X509_SIGNATURE.match(line.strip())
if m:
return (m.group("salt"), m.group("sign"))
raise errors.GenericError("X509 certificate signature is missing")
def LoadSignedX509Certificate(cert_pem, key):
"""Verifies a signed X509 certificate.
@type cert_pem: string
@param cert_pem: Certificate in PEM format and with signature header
@type key: string
@param key: Key for HMAC
@rtype: tuple; (OpenSSL.crypto.X509, string)
@return: X509 certificate object and salt
"""
(salt, signature) = _ExtractX509CertificateSignature(cert_pem)
# Load and dump certificate to ensure it's in a sane format
(cert, sane_pem) = ExtractX509Certificate(cert_pem)
if not utils_hash.VerifySha1Hmac(key, sane_pem, signature, salt=salt):
raise errors.GenericError("X509 certificate signature is invalid")
return (cert, salt)
def GenerateSelfSignedX509Cert(common_name, validity):
"""Generates a self-signed X509 certificate.
@type common_name: string
@param common_name: commonName value
@type validity: int
@param validity: Validity for certificate in seconds
@return: a tuple of strings containing the PEM-encoded private key and
certificate
"""
# Create private and public key
key = OpenSSL.crypto.PKey()
key.generate_key(OpenSSL.crypto.TYPE_RSA, constants.RSA_KEY_BITS)
# Create self-signed certificate
cert = OpenSSL.crypto.X509()
if common_name:
cert.get_subject().CN = common_name
cert.set_serial_number(1)
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(validity)
cert.set_issuer(cert.get_subject())
cert.set_pubkey(key)
cert.sign(key, constants.X509_CERT_SIGN_DIGEST)
key_pem = OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key)
cert_pem = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert)
return (key_pem, cert_pem)
def GenerateSelfSignedSslCert(filename, common_name=constants.X509_CERT_CN,
validity=constants.X509_CERT_DEFAULT_VALIDITY):
"""Legacy function to generate self-signed X509 certificate.
@type filename: str
@param filename: path to write certificate to
@type common_name: string
@param common_name: commonName value
@type validity: int
@param validity: validity of certificate in number of days
@return: a tuple of strings containing the PEM-encoded private key and
certificate
"""
# TODO: Investigate using the cluster name instead of X505_CERT_CN for
# common_name, as cluster-renames are very seldom, and it'd be nice if RAPI
# and node daemon certificates have the proper Subject/Issuer.
(key_pem, cert_pem) = GenerateSelfSignedX509Cert(common_name,
validity * 24 * 60 * 60)
utils_io.WriteFile(filename, mode=0400, data=key_pem + cert_pem)
return (key_pem, cert_pem)
def ExtractX509Certificate(pem):
"""Extracts the certificate from a PEM-formatted string.
@type pem: string
@rtype: tuple; (OpenSSL.X509 object, string)
@return: Certificate object and PEM-formatted certificate
"""
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, pem)
return (cert,
OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert))
def PrepareX509CertKeyCheck(cert, key):
"""Get function for verifying certificate with a certain private key.
@type key: OpenSSL.crypto.PKey
@param key: Private key object
@type cert: OpenSSL.crypto.X509
@param cert: X509 certificate object
@rtype: callable
@return: Callable doing the actual check; will raise C{OpenSSL.SSL.Error} if
certificate is not signed by given private key
"""
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_METHOD)
ctx.use_privatekey(key)
ctx.use_certificate(cert)
return ctx.check_privatekey
def CheckNodeCertificate(cert, _noded_cert_file=pathutils.NODED_CERT_FILE):
"""Checks the local node daemon certificate against given certificate.
Both certificates must be signed with the same key (as stored in the local
L{pathutils.NODED_CERT_FILE} file). No error is raised if no local
certificate can be found.
@type cert: OpenSSL.crypto.X509
@param cert: X509 certificate object
@raise errors.X509CertError: When an error related to X509 occurred
@raise errors.GenericError: When the verification failed
"""
try:
noded_pem = utils_io.ReadFile(_noded_cert_file)
except EnvironmentError, err:
if err.errno != errno.ENOENT:
raise
logging.debug("Node certificate file '%s' was not found", _noded_cert_file)
return
try:
noded_cert = \
OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, noded_pem)
except Exception, err:
raise errors.X509CertError(_noded_cert_file,
"Unable to load certificate: %s" % err)
try:
noded_key = \
OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, noded_pem)
except Exception, err:
raise errors.X509CertError(_noded_cert_file,
"Unable to load private key: %s" % err)
# Check consistency of server.pem file
check_fn = PrepareX509CertKeyCheck(noded_cert, noded_key)
try:
check_fn()
except OpenSSL.SSL.Error:
# This should never happen as it would mean the certificate in server.pem
# is out of sync with the private key stored in the same file
raise errors.X509CertError(_noded_cert_file,
"Certificate does not match with private key")
# Check with supplied certificate with local key
check_fn = PrepareX509CertKeyCheck(cert, noded_key)
try:
check_fn()
except OpenSSL.SSL.Error:
raise errors.GenericError("Given cluster certificate does not match"
" local key")
| gpl-2.0 |
lanfker/vPRKS | .waf-1.6.7-0a94702c61504c487a251b8d0a04ca9a/waflib/Tools/fc.py | 4 | 4398 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/svn/docs/wafbook/single.html#_obtaining_the_waf_file
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import re
from waflib import Utils,Task,TaskGen,Logs
from waflib.Tools import ccroot,fc_config,fc_scan
from waflib.TaskGen import feature,before_method,after_method,extension
from waflib.Configure import conf
ccroot.USELIB_VARS['fc']=set(['FCFLAGS','DEFINES','INCLUDES'])
ccroot.USELIB_VARS['fcprogram_test']=ccroot.USELIB_VARS['fcprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
ccroot.USELIB_VARS['fcshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS'])
ccroot.USELIB_VARS['fcstlib']=set(['ARFLAGS','LINKDEPS'])
def dummy(self):
pass
def fc_hook(self,node):
return self.create_compiled_task('fc',node)
def modfile(conf,name):
return{'lower':name.lower()+'.mod','lower.MOD':name.upper()+'.MOD','UPPER.mod':name.upper()+'.mod','UPPER':name.upper()+'.MOD'}[conf.env.FC_MOD_CAPITALIZATION or'lower']
def get_fortran_tasks(tsk):
bld=tsk.generator.bld
tasks=bld.get_tasks_group(bld.get_group_idx(tsk.generator))
return[x for x in tasks if isinstance(x,fc)and not getattr(x,'nomod',None)and not getattr(x,'mod_fortran_done',None)]
class fc(Task.Task):
color='GREEN'
run_str='${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}'
vars=["FORTRANMODPATHFLAG"]
def scan(self):
tmp=fc_scan.fortran_parser(self.generator.includes_nodes)
tmp.task=self
tmp.start(self.inputs[0])
if Logs.verbose:
Logs.debug('deps: deps for %r: %r; unresolved %r'%(self.inputs,tmp.nodes,tmp.names))
return(tmp.nodes,tmp.names)
def runnable_status(self):
if getattr(self,'mod_fortran_done',None):
return super(fc,self).runnable_status()
bld=self.generator.bld
lst=get_fortran_tasks(self)
for tsk in lst:
tsk.mod_fortran_done=True
for tsk in lst:
ret=tsk.runnable_status()
if ret==Task.ASK_LATER:
for x in lst:
x.mod_fortran_done=None
return Task.ASK_LATER
ins=Utils.defaultdict(set)
outs=Utils.defaultdict(set)
for tsk in lst:
key=tsk.uid()
for x in bld.raw_deps[key]:
if x.startswith('MOD@'):
name=bld.modfile(x.replace('MOD@',''))
node=bld.srcnode.find_or_declare(name)
tsk.set_outputs(node)
outs[id(node)].add(tsk)
for tsk in lst:
key=tsk.uid()
for x in bld.raw_deps[key]:
if x.startswith('USE@'):
name=bld.modfile(x.replace('USE@',''))
node=bld.srcnode.find_resource(name)
if node and node not in tsk.outputs:
if not node in bld.node_deps[key]:
bld.node_deps[key].append(node)
ins[id(node)].add(tsk)
for k in ins.keys():
for a in ins[k]:
a.run_after.update(outs[k])
tmp=[]
for t in outs[k]:
tmp.extend(t.outputs)
a.dep_nodes.extend(tmp)
try:
a.dep_nodes.sort(key=lambda x:x.abspath())
except:
a.dep_nodes.sort(lambda x,y:cmp(x.abspath(),y.abspath()))
for tsk in lst:
try:
delattr(tsk,'cache_sig')
except AttributeError:
pass
return super(fc,self).runnable_status()
class fcprogram(ccroot.link_task):
color='YELLOW'
run_str='${FC} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LINKFLAGS}'
inst_to='${BINDIR}'
chmod=Utils.O755
class fcshlib(fcprogram):
inst_to='${LIBDIR}'
class fcprogram_test(fcprogram):
def can_retrieve_cache(self):
return False
def runnable_status(self):
ret=super(fcprogram_test,self).runnable_status()
if ret==Task.SKIP_ME:
ret=Task.RUN_ME
return ret
def exec_command(self,cmd,**kw):
bld=self.generator.bld
kw['shell']=isinstance(cmd,str)
kw['stdout']=kw['stderr']=Utils.subprocess.PIPE
kw['cwd']=bld.variant_dir
bld.out=bld.err=''
bld.to_log('command: %s\n'%cmd)
kw['output']=0
try:
(bld.out,bld.err)=bld.cmd_and_log(cmd,**kw)
except Exception ,e:
return-1
if bld.out:
bld.to_log("out: %s\n"%bld.out)
if bld.err:
bld.to_log("err: %s\n"%bld.err)
class fcstlib(ccroot.stlink_task):
pass
feature('fcprogram','fcshlib','fcstlib','fcprogram_test')(dummy)
extension('.f','.f90','.F','.F90','.for','.FOR')(fc_hook)
conf(modfile) | gpl-2.0 |
q437634645/three.js | utils/converters/obj/split_obj.py | 369 | 12687 | """Split single OBJ model into mutliple OBJ files by materials
-------------------------------------
How to use
-------------------------------------
python split_obj.py -i infile.obj -o outfile
Will generate:
outfile_000.obj
outfile_001.obj
...
outfile_XXX.obj
-------------------------------------
Parser based on format description
-------------------------------------
http://en.wikipedia.org/wiki/Obj
------
Author
------
AlteredQualia http://alteredqualia.com
"""
import fileinput
import operator
import random
import os.path
import getopt
import sys
import struct
import math
import glob
# #####################################################
# Configuration
# #####################################################
TRUNCATE = False
SCALE = 1.0
# #####################################################
# Templates
# #####################################################
TEMPLATE_OBJ = u"""\
################################
# OBJ generated by split_obj.py
################################
# Faces: %(nfaces)d
# Vertices: %(nvertices)d
# Normals: %(nnormals)d
# UVs: %(nuvs)d
################################
# vertices
%(vertices)s
# normals
%(normals)s
# uvs
%(uvs)s
# faces
%(faces)s
"""
TEMPLATE_VERTEX = "v %f %f %f"
TEMPLATE_VERTEX_TRUNCATE = "v %d %d %d"
TEMPLATE_NORMAL = "vn %.5g %.5g %.5g"
TEMPLATE_UV = "vt %.5g %.5g"
TEMPLATE_FACE3_V = "f %d %d %d"
TEMPLATE_FACE4_V = "f %d %d %d %d"
TEMPLATE_FACE3_VT = "f %d/%d %d/%d %d/%d"
TEMPLATE_FACE4_VT = "f %d/%d %d/%d %d/%d %d/%d"
TEMPLATE_FACE3_VN = "f %d//%d %d//%d %d//%d"
TEMPLATE_FACE4_VN = "f %d//%d %d//%d %d//%d %d//%d"
TEMPLATE_FACE3_VTN = "f %d/%d/%d %d/%d/%d %d/%d/%d"
TEMPLATE_FACE4_VTN = "f %d/%d/%d %d/%d/%d %d/%d/%d %d/%d/%d"
# #####################################################
# Utils
# #####################################################
def file_exists(filename):
"""Return true if file exists and is accessible for reading.
Should be safer than just testing for existence due to links and
permissions magic on Unix filesystems.
@rtype: boolean
"""
try:
f = open(filename, 'r')
f.close()
return True
except IOError:
return False
# #####################################################
# OBJ parser
# #####################################################
def parse_vertex(text):
"""Parse text chunk specifying single vertex.
Possible formats:
vertex index
vertex index / texture index
vertex index / texture index / normal index
vertex index / / normal index
"""
v = 0
t = 0
n = 0
chunks = text.split("/")
v = int(chunks[0])
if len(chunks) > 1:
if chunks[1]:
t = int(chunks[1])
if len(chunks) > 2:
if chunks[2]:
n = int(chunks[2])
return { 'v': v, 't': t, 'n': n }
def parse_obj(fname):
"""Parse OBJ file.
"""
vertices = []
normals = []
uvs = []
faces = []
materials = {}
mcounter = 0
mcurrent = 0
mtllib = ""
# current face state
group = 0
object = 0
smooth = 0
for line in fileinput.input(fname):
chunks = line.split()
if len(chunks) > 0:
# Vertices as (x,y,z) coordinates
# v 0.123 0.234 0.345
if chunks[0] == "v" and len(chunks) == 4:
x = float(chunks[1])
y = float(chunks[2])
z = float(chunks[3])
vertices.append([x,y,z])
# Normals in (x,y,z) form; normals might not be unit
# vn 0.707 0.000 0.707
if chunks[0] == "vn" and len(chunks) == 4:
x = float(chunks[1])
y = float(chunks[2])
z = float(chunks[3])
normals.append([x,y,z])
# Texture coordinates in (u,v[,w]) coordinates, w is optional
# vt 0.500 -1.352 [0.234]
if chunks[0] == "vt" and len(chunks) >= 3:
u = float(chunks[1])
v = float(chunks[2])
w = 0
if len(chunks)>3:
w = float(chunks[3])
uvs.append([u,v,w])
# Face
if chunks[0] == "f" and len(chunks) >= 4:
vertex_index = []
uv_index = []
normal_index = []
for v in chunks[1:]:
vertex = parse_vertex(v)
if vertex['v']:
vertex_index.append(vertex['v'])
if vertex['t']:
uv_index.append(vertex['t'])
if vertex['n']:
normal_index.append(vertex['n'])
faces.append({
'vertex':vertex_index,
'uv':uv_index,
'normal':normal_index,
'material':mcurrent,
'group':group,
'object':object,
'smooth':smooth,
})
# Group
if chunks[0] == "g" and len(chunks) == 2:
group = chunks[1]
# Object
if chunks[0] == "o" and len(chunks) == 2:
object = chunks[1]
# Materials definition
if chunks[0] == "mtllib" and len(chunks) == 2:
mtllib = chunks[1]
# Material
if chunks[0] == "usemtl" and len(chunks) == 2:
material = chunks[1]
if not material in materials:
mcurrent = mcounter
materials[material] = mcounter
mcounter += 1
else:
mcurrent = materials[material]
# Smooth shading
if chunks[0] == "s" and len(chunks) == 2:
smooth = chunks[1]
return faces, vertices, uvs, normals, materials, mtllib
# #############################################################################
# API - Breaker
# #############################################################################
def break_obj(infile, outfile):
"""Break infile.obj to outfile.obj
"""
if not file_exists(infile):
print "Couldn't find [%s]" % infile
return
faces, vertices, uvs, normals, materials, mtllib = parse_obj(infile)
# sort faces by materials
chunks = {}
for face in faces:
material = face["material"]
if not material in chunks:
chunks[material] = {"faces": [], "vertices": set(), "normals": set(), "uvs": set()}
chunks[material]["faces"].append(face)
# extract unique vertex / normal / uv indices used per chunk
for material in chunks:
chunk = chunks[material]
for face in chunk["faces"]:
for i in face["vertex"]:
chunk["vertices"].add(i)
for i in face["normal"]:
chunk["normals"].add(i)
for i in face["uv"]:
chunk["uvs"].add(i)
# generate new OBJs
for mi, material in enumerate(chunks):
chunk = chunks[material]
# generate separate vertex / normal / uv index lists for each chunk
# (including mapping from original to new indices)
# get well defined order
new_vertices = list(chunk["vertices"])
new_normals = list(chunk["normals"])
new_uvs = list(chunk["uvs"])
# map original => new indices
vmap = {}
for i, v in enumerate(new_vertices):
vmap[v] = i + 1
nmap = {}
for i, n in enumerate(new_normals):
nmap[n] = i + 1
tmap = {}
for i, t in enumerate(new_uvs):
tmap[t] = i + 1
# vertices
pieces = []
for i in new_vertices:
vertex = vertices[i-1]
txt = TEMPLATE_VERTEX % (vertex[0], vertex[1], vertex[2])
pieces.append(txt)
str_vertices = "\n".join(pieces)
# normals
pieces = []
for i in new_normals:
normal = normals[i-1]
txt = TEMPLATE_NORMAL % (normal[0], normal[1], normal[2])
pieces.append(txt)
str_normals = "\n".join(pieces)
# uvs
pieces = []
for i in new_uvs:
uv = uvs[i-1]
txt = TEMPLATE_UV % (uv[0], uv[1])
pieces.append(txt)
str_uvs = "\n".join(pieces)
# faces
pieces = []
for face in chunk["faces"]:
txt = ""
fv = face["vertex"]
fn = face["normal"]
ft = face["uv"]
if len(fv) == 3:
va = vmap[fv[0]]
vb = vmap[fv[1]]
vc = vmap[fv[2]]
if len(fn) == 3 and len(ft) == 3:
na = nmap[fn[0]]
nb = nmap[fn[1]]
nc = nmap[fn[2]]
ta = tmap[ft[0]]
tb = tmap[ft[1]]
tc = tmap[ft[2]]
txt = TEMPLATE_FACE3_VTN % (va, ta, na, vb, tb, nb, vc, tc, nc)
elif len(fn) == 3:
na = nmap[fn[0]]
nb = nmap[fn[1]]
nc = nmap[fn[2]]
txt = TEMPLATE_FACE3_VN % (va, na, vb, nb, vc, nc)
elif len(ft) == 3:
ta = tmap[ft[0]]
tb = tmap[ft[1]]
tc = tmap[ft[2]]
txt = TEMPLATE_FACE3_VT % (va, ta, vb, tb, vc, tc)
else:
txt = TEMPLATE_FACE3_V % (va, vb, vc)
elif len(fv) == 4:
va = vmap[fv[0]]
vb = vmap[fv[1]]
vc = vmap[fv[2]]
vd = vmap[fv[3]]
if len(fn) == 4 and len(ft) == 4:
na = nmap[fn[0]]
nb = nmap[fn[1]]
nc = nmap[fn[2]]
nd = nmap[fn[3]]
ta = tmap[ft[0]]
tb = tmap[ft[1]]
tc = tmap[ft[2]]
td = tmap[ft[3]]
txt = TEMPLATE_FACE4_VTN % (va, ta, na, vb, tb, nb, vc, tc, nc, vd, td, nd)
elif len(fn) == 4:
na = nmap[fn[0]]
nb = nmap[fn[1]]
nc = nmap[fn[2]]
nd = nmap[fn[3]]
txt = TEMPLATE_FACE4_VN % (va, na, vb, nb, vc, nc, vd, nd)
elif len(ft) == 4:
ta = tmap[ft[0]]
tb = tmap[ft[1]]
tc = tmap[ft[2]]
td = tmap[ft[3]]
txt = TEMPLATE_FACE4_VT % (va, ta, vb, tb, vc, tc, vd, td)
else:
txt = TEMPLATE_FACE4_V % (va, vb, vc, vd)
pieces.append(txt)
str_faces = "\n".join(pieces)
# generate OBJ string
content = TEMPLATE_OBJ % {
"nfaces" : len(chunk["faces"]),
"nvertices" : len(new_vertices),
"nnormals" : len(new_normals),
"nuvs" : len(new_uvs),
"vertices" : str_vertices,
"normals" : str_normals,
"uvs" : str_uvs,
"faces" : str_faces
}
# write OBJ file
outname = "%s_%03d.obj" % (outfile, mi)
f = open(outname, "w")
f.write(content)
f.close()
# #############################################################################
# Helpers
# #############################################################################
def usage():
print "Usage: %s -i filename.obj -o prefix" % os.path.basename(sys.argv[0])
# #####################################################
# Main
# #####################################################
if __name__ == "__main__":
# get parameters from the command line
try:
opts, args = getopt.getopt(sys.argv[1:], "hi:o:x:", ["help", "input=", "output=", "truncatescale="])
except getopt.GetoptError:
usage()
sys.exit(2)
infile = outfile = ""
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-i", "--input"):
infile = a
elif o in ("-o", "--output"):
outfile = a
elif o in ("-x", "--truncatescale"):
TRUNCATE = True
SCALE = float(a)
if infile == "" or outfile == "":
usage()
sys.exit(2)
print "Splitting [%s] into [%s_XXX.obj] ..." % (infile, outfile)
break_obj(infile, outfile)
| mit |
Pluto-tv/chromium-crosswalk | tools/telemetry/third_party/webpagereplay/proxyshaper_test.py | 31 | 4988 | #!/usr/bin/env python
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for proxyshaper.
Usage:
$ ./proxyshaper_test.py
"""
import proxyshaper
import StringIO
import unittest
# pylint: disable=bad-whitespace
VALID_RATES = (
# input, expected_bps
( '384Kbit/s', 384000),
('1536Kbit/s', 1536000),
( '1Mbit/s', 1000000),
( '5Mbit/s', 5000000),
( '2MByte/s', 16000000),
( '0', 0),
( '5', 5),
( 384000, 384000),
)
ERROR_RATES = (
'1536KBit/s', # Older versions of dummynet used capital 'B' for bytes.
'1Mbyte/s', # Require capital 'B' for bytes.
'5bps',
)
class TimedTestCase(unittest.TestCase):
def assertValuesAlmostEqual(self, expected, actual, tolerance=0.05):
"""Like the following with nicer default message:
assertTrue(expected <= actual + tolerance &&
expected >= actual - tolerance)
"""
delta = tolerance * expected
if actual > expected + delta or actual < expected - delta:
self.fail('%s is not equal to expected %s +/- %s%%' % (
actual, expected, 100 * tolerance))
class RateLimitedFileTest(TimedTestCase):
def testReadLimitedBasic(self):
num_bytes = 1024
bps = 384000
request_counter = lambda: 1
f = StringIO.StringIO(' ' * num_bytes)
limited_f = proxyshaper.RateLimitedFile(request_counter, f, bps)
start = proxyshaper.TIMER()
self.assertEqual(num_bytes, len(limited_f.read()))
expected_ms = 8.0 * num_bytes / bps * 1000.0
actual_ms = (proxyshaper.TIMER() - start) * 1000.0
self.assertValuesAlmostEqual(expected_ms, actual_ms)
def testReadlineLimitedBasic(self):
num_bytes = 1024 * 8 + 512
bps = 384000
request_counter = lambda: 1
f = StringIO.StringIO(' ' * num_bytes)
limited_f = proxyshaper.RateLimitedFile(request_counter, f, bps)
start = proxyshaper.TIMER()
self.assertEqual(num_bytes, len(limited_f.readline()))
expected_ms = 8.0 * num_bytes / bps * 1000.0
actual_ms = (proxyshaper.TIMER() - start) * 1000.0
self.assertValuesAlmostEqual(expected_ms, actual_ms)
def testReadLimitedSlowedByMultipleRequests(self):
num_bytes = 1024
bps = 384000
request_count = 2
request_counter = lambda: request_count
f = StringIO.StringIO(' ' * num_bytes)
limited_f = proxyshaper.RateLimitedFile(request_counter, f, bps)
start = proxyshaper.TIMER()
num_read_bytes = limited_f.read()
self.assertEqual(num_bytes, len(num_read_bytes))
expected_ms = 8.0 * num_bytes / (bps / float(request_count)) * 1000.0
actual_ms = (proxyshaper.TIMER() - start) * 1000.0
self.assertValuesAlmostEqual(expected_ms, actual_ms)
def testWriteLimitedBasic(self):
num_bytes = 1024 * 10 + 350
bps = 384000
request_counter = lambda: 1
f = StringIO.StringIO()
limited_f = proxyshaper.RateLimitedFile(request_counter, f, bps)
start = proxyshaper.TIMER()
limited_f.write(' ' * num_bytes)
self.assertEqual(num_bytes, len(limited_f.getvalue()))
expected_ms = 8.0 * num_bytes / bps * 1000.0
actual_ms = (proxyshaper.TIMER() - start) * 1000.0
self.assertValuesAlmostEqual(expected_ms, actual_ms)
def testWriteLimitedSlowedByMultipleRequests(self):
num_bytes = 1024 * 10
bps = 384000
request_count = 2
request_counter = lambda: request_count
f = StringIO.StringIO(' ' * num_bytes)
limited_f = proxyshaper.RateLimitedFile(request_counter, f, bps)
start = proxyshaper.TIMER()
limited_f.write(' ' * num_bytes)
self.assertEqual(num_bytes, len(limited_f.getvalue()))
expected_ms = 8.0 * num_bytes / (bps / float(request_count)) * 1000.0
actual_ms = (proxyshaper.TIMER() - start) * 1000.0
self.assertValuesAlmostEqual(expected_ms, actual_ms)
class GetBitsPerSecondTest(unittest.TestCase):
def testConvertsValidValues(self):
for dummynet_option, expected_bps in VALID_RATES:
bps = proxyshaper.GetBitsPerSecond(dummynet_option)
self.assertEqual(
expected_bps, bps, 'Unexpected result for %s: %s != %s' % (
dummynet_option, expected_bps, bps))
def testRaisesOnUnexpectedValues(self):
for dummynet_option in ERROR_RATES:
self.assertRaises(proxyshaper.BandwidthValueError,
proxyshaper.GetBitsPerSecond, dummynet_option)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
imclab/datashape | docs/source/conf.py | 1 | 8044 | # -*- coding: utf-8 -*-
#
# DataShape documentation build configuration file, created by
# sphinx-quickstart on Thu Dec 12 15:38:28 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'DataShape'
copyright = u'2013, Continuum Analytics'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1-dev'
# The full version, including alpha/beta/rc tags.
release = '0.0.1-dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'DataShapedoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'DataShape.tex', u'DataShape Documentation',
u'Continuum Analytics', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'datashape', u'DataShape Documentation',
[u'Continuum Analytics'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'DataShape', u'DataShape Documentation',
u'Continuum Analytics', 'DataShape', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| bsd-2-clause |
seanwestfall/django | tests/forms_tests/tests/test_error_messages.py | 169 | 11182 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.files.uploadedfile import SimpleUploadedFile
from django.forms import (
BooleanField, CharField, ChoiceField, DateField, DateTimeField,
DecimalField, EmailField, FileField, FloatField, Form,
GenericIPAddressField, IntegerField, ModelChoiceField,
ModelMultipleChoiceField, MultipleChoiceField, RegexField,
SplitDateTimeField, TimeField, URLField, ValidationError, utils,
)
from django.test import SimpleTestCase, TestCase
from django.utils.encoding import python_2_unicode_compatible
from django.utils.safestring import mark_safe
class AssertFormErrorsMixin(object):
def assertFormErrors(self, expected, the_callable, *args, **kwargs):
try:
the_callable(*args, **kwargs)
self.fail("Testing the 'clean' method on %s failed to raise a ValidationError.")
except ValidationError as e:
self.assertEqual(e.messages, expected)
class FormsErrorMessagesTestCase(SimpleTestCase, AssertFormErrorsMixin):
def test_charfield(self):
e = {
'required': 'REQUIRED',
'min_length': 'LENGTH %(show_value)s, MIN LENGTH %(limit_value)s',
'max_length': 'LENGTH %(show_value)s, MAX LENGTH %(limit_value)s',
}
f = CharField(min_length=5, max_length=10, error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['LENGTH 4, MIN LENGTH 5'], f.clean, '1234')
self.assertFormErrors(['LENGTH 11, MAX LENGTH 10'], f.clean, '12345678901')
def test_integerfield(self):
e = {
'required': 'REQUIRED',
'invalid': 'INVALID',
'min_value': 'MIN VALUE IS %(limit_value)s',
'max_value': 'MAX VALUE IS %(limit_value)s',
}
f = IntegerField(min_value=5, max_value=10, error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['INVALID'], f.clean, 'abc')
self.assertFormErrors(['MIN VALUE IS 5'], f.clean, '4')
self.assertFormErrors(['MAX VALUE IS 10'], f.clean, '11')
def test_floatfield(self):
e = {
'required': 'REQUIRED',
'invalid': 'INVALID',
'min_value': 'MIN VALUE IS %(limit_value)s',
'max_value': 'MAX VALUE IS %(limit_value)s',
}
f = FloatField(min_value=5, max_value=10, error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['INVALID'], f.clean, 'abc')
self.assertFormErrors(['MIN VALUE IS 5'], f.clean, '4')
self.assertFormErrors(['MAX VALUE IS 10'], f.clean, '11')
def test_decimalfield(self):
e = {
'required': 'REQUIRED',
'invalid': 'INVALID',
'min_value': 'MIN VALUE IS %(limit_value)s',
'max_value': 'MAX VALUE IS %(limit_value)s',
'max_digits': 'MAX DIGITS IS %(max)s',
'max_decimal_places': 'MAX DP IS %(max)s',
'max_whole_digits': 'MAX DIGITS BEFORE DP IS %(max)s',
}
f = DecimalField(min_value=5, max_value=10, error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['INVALID'], f.clean, 'abc')
self.assertFormErrors(['MIN VALUE IS 5'], f.clean, '4')
self.assertFormErrors(['MAX VALUE IS 10'], f.clean, '11')
f2 = DecimalField(max_digits=4, decimal_places=2, error_messages=e)
self.assertFormErrors(['MAX DIGITS IS 4'], f2.clean, '123.45')
self.assertFormErrors(['MAX DP IS 2'], f2.clean, '1.234')
self.assertFormErrors(['MAX DIGITS BEFORE DP IS 2'], f2.clean, '123.4')
def test_datefield(self):
e = {
'required': 'REQUIRED',
'invalid': 'INVALID',
}
f = DateField(error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['INVALID'], f.clean, 'abc')
def test_timefield(self):
e = {
'required': 'REQUIRED',
'invalid': 'INVALID',
}
f = TimeField(error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['INVALID'], f.clean, 'abc')
def test_datetimefield(self):
e = {
'required': 'REQUIRED',
'invalid': 'INVALID',
}
f = DateTimeField(error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['INVALID'], f.clean, 'abc')
def test_regexfield(self):
e = {
'required': 'REQUIRED',
'invalid': 'INVALID',
'min_length': 'LENGTH %(show_value)s, MIN LENGTH %(limit_value)s',
'max_length': 'LENGTH %(show_value)s, MAX LENGTH %(limit_value)s',
}
f = RegexField(r'^[0-9]+$', min_length=5, max_length=10, error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['INVALID'], f.clean, 'abcde')
self.assertFormErrors(['LENGTH 4, MIN LENGTH 5'], f.clean, '1234')
self.assertFormErrors(['LENGTH 11, MAX LENGTH 10'], f.clean, '12345678901')
def test_emailfield(self):
e = {
'required': 'REQUIRED',
'invalid': 'INVALID',
'min_length': 'LENGTH %(show_value)s, MIN LENGTH %(limit_value)s',
'max_length': 'LENGTH %(show_value)s, MAX LENGTH %(limit_value)s',
}
f = EmailField(min_length=8, max_length=10, error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['INVALID'], f.clean, 'abcdefgh')
self.assertFormErrors(['LENGTH 7, MIN LENGTH 8'], f.clean, 'a@b.com')
self.assertFormErrors(['LENGTH 11, MAX LENGTH 10'], f.clean, 'aye@bee.com')
def test_filefield(self):
e = {
'required': 'REQUIRED',
'invalid': 'INVALID',
'missing': 'MISSING',
'empty': 'EMPTY FILE',
}
f = FileField(error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['INVALID'], f.clean, 'abc')
self.assertFormErrors(['EMPTY FILE'], f.clean, SimpleUploadedFile('name', None))
self.assertFormErrors(['EMPTY FILE'], f.clean, SimpleUploadedFile('name', ''))
def test_urlfield(self):
e = {
'required': 'REQUIRED',
'invalid': 'INVALID',
'max_length': '"%(value)s" has more than %(limit_value)d characters.',
}
f = URLField(error_messages=e, max_length=17)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['INVALID'], f.clean, 'abc.c')
self.assertFormErrors(['"http://djangoproject.com" has more than 17 characters.'], f.clean, 'djangoproject.com')
def test_booleanfield(self):
e = {
'required': 'REQUIRED',
}
f = BooleanField(error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
def test_choicefield(self):
e = {
'required': 'REQUIRED',
'invalid_choice': '%(value)s IS INVALID CHOICE',
}
f = ChoiceField(choices=[('a', 'aye')], error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['b IS INVALID CHOICE'], f.clean, 'b')
def test_multiplechoicefield(self):
e = {
'required': 'REQUIRED',
'invalid_choice': '%(value)s IS INVALID CHOICE',
'invalid_list': 'NOT A LIST',
}
f = MultipleChoiceField(choices=[('a', 'aye')], error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['NOT A LIST'], f.clean, 'b')
self.assertFormErrors(['b IS INVALID CHOICE'], f.clean, ['b'])
def test_splitdatetimefield(self):
e = {
'required': 'REQUIRED',
'invalid_date': 'INVALID DATE',
'invalid_time': 'INVALID TIME',
}
f = SplitDateTimeField(error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['INVALID DATE', 'INVALID TIME'], f.clean, ['a', 'b'])
def test_generic_ipaddressfield(self):
e = {
'required': 'REQUIRED',
'invalid': 'INVALID IP ADDRESS',
}
f = GenericIPAddressField(error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['INVALID IP ADDRESS'], f.clean, '127.0.0')
def test_subclassing_errorlist(self):
class TestForm(Form):
first_name = CharField()
last_name = CharField()
birthday = DateField()
def clean(self):
raise ValidationError("I like to be awkward.")
@python_2_unicode_compatible
class CustomErrorList(utils.ErrorList):
def __str__(self):
return self.as_divs()
def as_divs(self):
if not self:
return ''
return mark_safe('<div class="error">%s</div>' % ''.join('<p>%s</p>' % e for e in self))
# This form should print errors the default way.
form1 = TestForm({'first_name': 'John'})
self.assertHTMLEqual(str(form1['last_name'].errors), '<ul class="errorlist"><li>This field is required.</li></ul>')
self.assertHTMLEqual(str(form1.errors['__all__']), '<ul class="errorlist nonfield"><li>I like to be awkward.</li></ul>')
# This one should wrap error groups in the customized way.
form2 = TestForm({'first_name': 'John'}, error_class=CustomErrorList)
self.assertHTMLEqual(str(form2['last_name'].errors), '<div class="error"><p>This field is required.</p></div>')
self.assertHTMLEqual(str(form2.errors['__all__']), '<div class="error"><p>I like to be awkward.</p></div>')
class ModelChoiceFieldErrorMessagesTestCase(TestCase, AssertFormErrorsMixin):
def test_modelchoicefield(self):
# Create choices for the model choice field tests below.
from forms_tests.models import ChoiceModel
ChoiceModel.objects.create(pk=1, name='a')
ChoiceModel.objects.create(pk=2, name='b')
ChoiceModel.objects.create(pk=3, name='c')
# ModelChoiceField
e = {
'required': 'REQUIRED',
'invalid_choice': 'INVALID CHOICE',
}
f = ModelChoiceField(queryset=ChoiceModel.objects.all(), error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['INVALID CHOICE'], f.clean, '4')
# ModelMultipleChoiceField
e = {
'required': 'REQUIRED',
'invalid_choice': '%(value)s IS INVALID CHOICE',
'list': 'NOT A LIST OF VALUES',
}
f = ModelMultipleChoiceField(queryset=ChoiceModel.objects.all(), error_messages=e)
self.assertFormErrors(['REQUIRED'], f.clean, '')
self.assertFormErrors(['NOT A LIST OF VALUES'], f.clean, '3')
self.assertFormErrors(['4 IS INVALID CHOICE'], f.clean, ['4'])
| bsd-3-clause |
HyperBaton/ansible | lib/ansible/modules/network/fortios/fortios_wireless_controller_hotspot20_h2qp_wan_metric.py | 7 | 11983 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_wireless_controller_hotspot20_h2qp_wan_metric
short_description: Configure WAN metrics in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify wireless_controller_hotspot20 feature and h2qp_wan_metric category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
wireless_controller_hotspot20_h2qp_wan_metric:
description:
- Configure WAN metrics.
default: null
type: dict
suboptions:
downlink_load:
description:
- Downlink load.
type: int
downlink_speed:
description:
- Downlink speed (in kilobits/s).
type: int
link_at_capacity:
description:
- Link at capacity.
type: str
choices:
- enable
- disable
link_status:
description:
- Link status.
type: str
choices:
- up
- down
- in-test
load_measurement_duration:
description:
- Load measurement duration (in tenths of a second).
type: int
name:
description:
- WAN metric name.
required: true
type: str
symmetric_wan_link:
description:
- WAN link symmetry.
type: str
choices:
- symmetric
- asymmetric
uplink_load:
description:
- Uplink load.
type: int
uplink_speed:
description:
- Uplink speed (in kilobits/s).
type: int
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure WAN metrics.
fortios_wireless_controller_hotspot20_h2qp_wan_metric:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
wireless_controller_hotspot20_h2qp_wan_metric:
downlink_load: "3"
downlink_speed: "4"
link_at_capacity: "enable"
link_status: "up"
load_measurement_duration: "7"
name: "default_name_8"
symmetric_wan_link: "symmetric"
uplink_load: "10"
uplink_speed: "11"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_wireless_controller_hotspot20_h2qp_wan_metric_data(json):
option_list = ['downlink_load', 'downlink_speed', 'link_at_capacity',
'link_status', 'load_measurement_duration', 'name',
'symmetric_wan_link', 'uplink_load', 'uplink_speed']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for i, elem in enumerate(data):
data[i] = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def wireless_controller_hotspot20_h2qp_wan_metric(data, fos):
vdom = data['vdom']
state = data['state']
wireless_controller_hotspot20_h2qp_wan_metric_data = data['wireless_controller_hotspot20_h2qp_wan_metric']
filtered_data = underscore_to_hyphen(filter_wireless_controller_hotspot20_h2qp_wan_metric_data(wireless_controller_hotspot20_h2qp_wan_metric_data))
if state == "present":
return fos.set('wireless-controller.hotspot20',
'h2qp-wan-metric',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('wireless-controller.hotspot20',
'h2qp-wan-metric',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_wireless_controller_hotspot20(data, fos):
if data['wireless_controller_hotspot20_h2qp_wan_metric']:
resp = wireless_controller_hotspot20_h2qp_wan_metric(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"wireless_controller_hotspot20_h2qp_wan_metric": {
"required": False, "type": "dict", "default": None,
"options": {
"downlink_load": {"required": False, "type": "int"},
"downlink_speed": {"required": False, "type": "int"},
"link_at_capacity": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"link_status": {"required": False, "type": "str",
"choices": ["up", "down", "in-test"]},
"load_measurement_duration": {"required": False, "type": "int"},
"name": {"required": True, "type": "str"},
"symmetric_wan_link": {"required": False, "type": "str",
"choices": ["symmetric", "asymmetric"]},
"uplink_load": {"required": False, "type": "int"},
"uplink_speed": {"required": False, "type": "int"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_wireless_controller_hotspot20(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_wireless_controller_hotspot20(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
shubhdev/edxOnBaadal | lms/djangoapps/courseware/tests/test_microsites.py | 42 | 10298 | """
Tests related to the Microsites feature
"""
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from nose.plugins.attrib import attr
from courseware.tests.helpers import LoginEnrollmentTestCase
from course_modes.models import CourseMode
from xmodule.course_module import (
CATALOG_VISIBILITY_CATALOG_AND_ABOUT, CATALOG_VISIBILITY_NONE)
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
@attr('shard_1')
class TestMicrosites(ModuleStoreTestCase, LoginEnrollmentTestCase):
"""
This is testing of the Microsite feature
"""
STUDENT_INFO = [('view@test.com', 'foo'), ('view2@test.com', 'foo')]
def setUp(self):
super(TestMicrosites, self).setUp()
# use a different hostname to test Microsites since they are
# triggered on subdomain mappings
#
# NOTE: The Microsite Configuration is in lms/envs/test.py. The content for the Test Microsite is in
# test_microsites/test_microsite.
#
# IMPORTANT: For these tests to work, this domain must be defined via
# DNS configuration (either local or published)
self.course = CourseFactory.create(display_name='Robot_Super_Course', org='TestMicrositeX')
self.chapter0 = ItemFactory.create(parent_location=self.course.location,
display_name='Overview')
self.chapter9 = ItemFactory.create(parent_location=self.course.location,
display_name='factory_chapter')
self.section0 = ItemFactory.create(parent_location=self.chapter0.location,
display_name='Welcome')
self.section9 = ItemFactory.create(parent_location=self.chapter9.location,
display_name='factory_section')
self.course_outside_microsite = CourseFactory.create(display_name='Robot_Course_Outside_Microsite', org='FooX')
# have a course which explicitly sets visibility in catalog to False
self.course_hidden_visibility = CourseFactory.create(
display_name='Hidden_course',
org='TestMicrositeX',
catalog_visibility=CATALOG_VISIBILITY_NONE,
)
# have a course which explicitly sets visibility in catalog and about to true
self.course_with_visibility = CourseFactory.create(
display_name='visible_course',
org='TestMicrositeX',
course="foo",
catalog_visibility=CATALOG_VISIBILITY_CATALOG_AND_ABOUT,
)
def setup_users(self):
# Create student accounts and activate them.
for i in range(len(self.STUDENT_INFO)):
email, password = self.STUDENT_INFO[i]
username = 'u{0}'.format(i)
self.create_account(username, email, password)
self.activate_user(email)
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
def test_microsite_anonymous_homepage_content(self):
"""
Verify that the homepage, when accessed via a Microsite domain, returns
HTML that reflects the Microsite branding elements
"""
resp = self.client.get('/', HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
# assert various branding definitions on this Microsite
# as per the configuration and Microsite overrides
self.assertContains(resp, 'This is a Test Microsite Overlay') # Overlay test message
self.assertContains(resp, 'test_microsite/images/header-logo.png') # logo swap
self.assertContains(resp, 'test_microsite/css/test_microsite') # css override
self.assertContains(resp, 'Test Microsite') # page title
# assert that test course display name is visible
self.assertContains(resp, 'Robot_Super_Course')
# assert that test course with 'visible_in_catalog' to True is showing up
self.assertContains(resp, 'visible_course')
# assert that test course that is outside microsite is not visible
self.assertNotContains(resp, 'Robot_Course_Outside_Microsite')
# assert that a course that has visible_in_catalog=False is not visible
self.assertNotContains(resp, 'Hidden_course')
# assert that footer template has been properly overriden on homepage
self.assertContains(resp, 'This is a Test Microsite footer')
# assert that the edX partners section is not in the HTML
self.assertNotContains(resp, '<section class="university-partners university-partners2x6">')
# assert that the edX partners tag line is not in the HTML
self.assertNotContains(resp, 'Explore free courses from')
def test_not_microsite_anonymous_homepage_content(self):
"""
Make sure we see the right content on the homepage if we are not in a microsite
"""
resp = self.client.get('/')
self.assertEqual(resp.status_code, 200)
# assert various branding definitions on this Microsite ARE NOT VISIBLE
self.assertNotContains(resp, 'This is a Test Microsite Overlay') # Overlay test message
self.assertNotContains(resp, 'test_microsite/images/header-logo.png') # logo swap
self.assertNotContains(resp, 'test_microsite/css/test_microsite') # css override
self.assertNotContains(resp, '<title>Test Microsite</title>') # page title
# assert that test course display name IS NOT VISIBLE, since that is a Microsite only course
self.assertNotContains(resp, 'Robot_Super_Course')
# assert that test course that is outside microsite IS VISIBLE
self.assertContains(resp, 'Robot_Course_Outside_Microsite')
# assert that footer template has been properly overriden on homepage
self.assertNotContains(resp, 'This is a Test Microsite footer')
def test_no_redirect_on_homepage_when_no_enrollments(self):
"""
Verify that a user going to homepage will not redirect if he/she has no course enrollments
"""
self.setup_users()
email, password = self.STUDENT_INFO[0]
self.login(email, password)
resp = self.client.get(reverse('root'), HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEquals(resp.status_code, 200)
def test_no_redirect_on_homepage_when_has_enrollments(self):
"""
Verify that a user going to homepage will not redirect to dashboard if he/she has
a course enrollment
"""
self.setup_users()
email, password = self.STUDENT_INFO[0]
self.login(email, password)
self.enroll(self.course, True)
resp = self.client.get(reverse('root'), HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEquals(resp.status_code, 200)
def test_microsite_course_enrollment(self):
"""
Enroll user in a course scoped in a Microsite and one course outside of a Microsite
and make sure that they are only visible in the right Dashboards
"""
self.setup_users()
email, password = self.STUDENT_INFO[1]
self.login(email, password)
self.enroll(self.course, True)
self.enroll(self.course_outside_microsite, True)
# Access the microsite dashboard and make sure the right courses appear
resp = self.client.get(reverse('dashboard'), HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertContains(resp, 'Robot_Super_Course')
self.assertNotContains(resp, 'Robot_Course_Outside_Microsite')
# Now access the non-microsite dashboard and make sure the right courses appear
resp = self.client.get(reverse('dashboard'))
self.assertNotContains(resp, 'Robot_Super_Course')
self.assertContains(resp, 'Robot_Course_Outside_Microsite')
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
def test_visible_about_page_settings(self):
"""
Make sure the Microsite is honoring the visible_about_page permissions that is
set in configuration
"""
url = reverse('about_course', args=[self.course_with_visibility.id.to_deprecated_string()])
resp = self.client.get(url, HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
url = reverse('about_course', args=[self.course_hidden_visibility.id.to_deprecated_string()])
resp = self.client.get(url, HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 404)
@override_settings(SITE_NAME=settings.MICROSITE_TEST_HOSTNAME)
def test_paid_course_registration(self):
"""
Make sure that Microsite overrides on the ENABLE_SHOPPING_CART and
ENABLE_PAID_COURSE_ENROLLMENTS are honored
"""
course_mode = CourseMode(
course_id=self.course_with_visibility.id,
mode_slug="honor",
mode_display_name="honor cert",
min_price=10,
)
course_mode.save()
# first try on the non microsite, which
# should pick up the global configuration (where ENABLE_PAID_COURSE_REGISTRATIONS = False)
url = reverse('about_course', args=[self.course_with_visibility.id.to_deprecated_string()])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertIn("Register for {}".format(self.course_with_visibility.id.course), resp.content)
self.assertNotIn("Add {} to Cart ($10)".format(self.course_with_visibility.id.course), resp.content)
# now try on the microsite
url = reverse('about_course', args=[self.course_with_visibility.id.to_deprecated_string()])
resp = self.client.get(url, HTTP_HOST=settings.MICROSITE_TEST_HOSTNAME)
self.assertEqual(resp.status_code, 200)
self.assertNotIn("Register for {}".format(self.course_with_visibility.id.course), resp.content)
self.assertIn("Add {} to Cart ($10)".format(self.course_with_visibility.id.course), resp.content)
self.assertIn('$("#add_to_cart_post").click', resp.content)
| agpl-3.0 |
davjohnst/fundamentals | fundamentals/recursion/copy_paste.py | 1 | 2597 | #!/usr/bin/env python
from fundamentals.adv_python.context_management.cm_examples import time_print
"""
You have four keys at your disposal.
You can type keys 'n' times.
You want to maximize the number of characters in your 'text editor'
Keys:
- type an 'a'
- select all
- copy
- append paste buffer
"""
# recursive...
class CopyPaste(object):
def __init__(self, n):
self.n = n
def max_len(self):
if self.n < 1:
raise ValueError("must be positive")
return self._max_len(self.n, 0, 0, 0)
def _max_len(self, moves_left, len_copy, len_sel, len_text):
if moves_left == 0:
return len_text
# type an 'a'
nchar_a = self._max_len(moves_left - 1, len_copy, len_sel, len_text + 1)
# type 'select-all'
nchar_s = self._max_len(moves_left - 1, len_copy, len_text, len_text)
# type 'copy'
nchar_c = self._max_len(moves_left - 1, len_sel, len_sel, len_text)
# type 'paste'
nchar_p = self._max_len(moves_left - 1, len_copy, len_sel, len_text + len_copy)
return max(nchar_a, nchar_s, nchar_c, nchar_p)
class CopyPasteDP(object):
def __init__(self, n):
self.n = n
self.memo_table = {}
def max_len(self):
if self.n < 1:
raise ValueError("must be positive")
return self._max_len(self.n, 0, 0, 0)
def _max_len(self, moves_left, len_copy, len_sel, len_text):
if moves_left == 0:
return len_text
subproblem_tuple = (moves_left, len_copy, len_sel, len_text)
if subproblem_tuple in self.memo_table:
return self.memo_table[subproblem_tuple]
# type an 'a'
nchar_a = self._max_len(moves_left - 1, len_copy, len_sel, len_text + 1)
# type 'select-all'
nchar_s = self._max_len(moves_left - 1, len_copy, len_text, len_text)
# type 'copy'
nchar_c = self._max_len(moves_left - 1, len_sel, len_sel, len_text)
# type 'paste'
nchar_p = self._max_len(moves_left - 1, len_copy, len_sel, len_text + len_copy)
max_chars = max(nchar_a, nchar_s, nchar_c, nchar_p)
self.memo_table[subproblem_tuple] = max_chars
return max_chars
def main():
print CopyPaste(1).max_len()
print CopyPaste(6).max_len()
print CopyPaste(7).max_len()
print CopyPaste(8).max_len()
with time_print("recursive 11"):
print CopyPaste(11).max_len()
with time_print("dynamic programming 11"):
print CopyPasteDP(11).max_len()
if __name__ == "__main__":
main() | apache-2.0 |
xuxiao19910803/edx-platform | common/test/acceptance/pages/studio/html_component_editor.py | 115 | 1139 | from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from component_editor import ComponentEditorView
class HtmlComponentEditorView(ComponentEditorView):
"""
Represents the rendered view of an HTML component editor.
"""
def set_content_and_save(self, content):
"""
Types content into the html component and presses Save.
"""
self.set_content(content)
self.save()
def set_content_and_cancel(self, content):
"""
Types content into the html component and presses Cancel to abort the change.
"""
self.set_content(content)
self.cancel()
def set_content(self, content):
"""
Types content into the html component, leaving the component open.
"""
self.q(css='.edit-xblock-modal .editor-modes .editor-button').click()
editor = self.q(css=self._bounded_selector('.html-editor .mce-edit-area'))[0]
ActionChains(self.browser).click(editor).\
send_keys([Keys.CONTROL, 'a']).key_up(Keys.CONTROL).send_keys(content).perform()
| agpl-3.0 |
EmadMokhtar/Django | tests/auth_tests/test_decorators.py | 13 | 4143 | from django.conf import settings
from django.contrib.auth import models
from django.contrib.auth.decorators import login_required, permission_required
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.test import TestCase, override_settings
from django.test.client import RequestFactory
from .test_views import AuthViewsTestCase
@override_settings(ROOT_URLCONF='auth_tests.urls')
class LoginRequiredTestCase(AuthViewsTestCase):
"""
Tests the login_required decorators
"""
def testCallable(self):
"""
login_required is assignable to callable objects.
"""
class CallableView:
def __call__(self, *args, **kwargs):
pass
login_required(CallableView())
def testView(self):
"""
login_required is assignable to normal views.
"""
def normal_view(request):
pass
login_required(normal_view)
def testLoginRequired(self, view_url='/login_required/', login_url=None):
"""
login_required works on a simple view wrapped in a login_required
decorator.
"""
if login_url is None:
login_url = settings.LOGIN_URL
response = self.client.get(view_url)
self.assertEqual(response.status_code, 302)
self.assertIn(login_url, response.url)
self.login()
response = self.client.get(view_url)
self.assertEqual(response.status_code, 200)
def testLoginRequiredNextUrl(self):
"""
login_required works on a simple view wrapped in a login_required
decorator with a login_url set.
"""
self.testLoginRequired(view_url='/login_required_login_url/', login_url='/somewhere/')
class PermissionsRequiredDecoratorTest(TestCase):
"""
Tests for the permission_required decorator
"""
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username='joe', password='qwerty')
# Add permissions auth.add_customuser and auth.change_customuser
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
cls.user.user_permissions.add(*perms)
def test_many_permissions_pass(self):
@permission_required(['auth_tests.add_customuser', 'auth_tests.change_customuser'])
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 200)
def test_many_permissions_in_set_pass(self):
@permission_required({'auth_tests.add_customuser', 'auth_tests.change_customuser'})
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 200)
def test_single_permission_pass(self):
@permission_required('auth_tests.add_customuser')
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 200)
def test_permissioned_denied_redirect(self):
@permission_required(['auth_tests.add_customuser', 'auth_tests.change_customuser', 'nonexistent-permission'])
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 302)
def test_permissioned_denied_exception_raised(self):
@permission_required([
'auth_tests.add_customuser', 'auth_tests.change_customuser', 'nonexistent-permission'
], raise_exception=True)
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
with self.assertRaises(PermissionDenied):
a_view(request)
| mit |
Krossom/python-for-android | python3-alpha/extra_modules/gdata/exif/__init__.py | 45 | 6980 | # -*-*- encoding: utf-8 -*-*-
#
# This is gdata.photos.exif, implementing the exif namespace in gdata
#
# $Id: __init__.py 81 2007-10-03 14:41:42Z havard.gulldahl $
#
# Copyright 2007 Håvard Gulldahl
# Portions copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module maps elements from the {EXIF} namespace[1] to GData objects.
These elements describe image data, using exif attributes[2].
Picasa Web Albums uses the exif namespace to represent Exif data encoded
in a photo [3].
Picasa Web Albums uses the following exif elements:
exif:distance
exif:exposure
exif:flash
exif:focallength
exif:fstop
exif:imageUniqueID
exif:iso
exif:make
exif:model
exif:tags
exif:time
[1]: http://schemas.google.com/photos/exif/2007.
[2]: http://en.wikipedia.org/wiki/Exif
[3]: http://code.google.com/apis/picasaweb/reference.html#exif_reference
"""
__author__ = 'havard@gulldahl.no'# (Håvard Gulldahl)' #BUG: pydoc chokes on non-ascii chars in __author__
__license__ = 'Apache License v2'
import atom
import gdata
EXIF_NAMESPACE = 'http://schemas.google.com/photos/exif/2007'
class ExifBaseElement(atom.AtomBase):
"""Base class for elements in the EXIF_NAMESPACE (%s). To add new elements, you only need to add the element tag name to self._tag
""" % EXIF_NAMESPACE
_tag = ''
_namespace = EXIF_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
def __init__(self, name=None, extension_elements=None,
extension_attributes=None, text=None):
self.name = name
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class Distance(ExifBaseElement):
"(float) The distance to the subject, e.g. 0.0"
_tag = 'distance'
def DistanceFromString(xml_string):
return atom.CreateClassFromXMLString(Distance, xml_string)
class Exposure(ExifBaseElement):
"(float) The exposure time used, e.g. 0.025 or 8.0E4"
_tag = 'exposure'
def ExposureFromString(xml_string):
return atom.CreateClassFromXMLString(Exposure, xml_string)
class Flash(ExifBaseElement):
"""(string) Boolean value indicating whether the flash was used.
The .text attribute will either be `true' or `false'
As a convenience, this object's .bool method will return what you want,
so you can say:
flash_used = bool(Flash)
"""
_tag = 'flash'
def __bool__(self):
if self.text.lower() in ('true','false'):
return self.text.lower() == 'true'
def FlashFromString(xml_string):
return atom.CreateClassFromXMLString(Flash, xml_string)
class Focallength(ExifBaseElement):
"(float) The focal length used, e.g. 23.7"
_tag = 'focallength'
def FocallengthFromString(xml_string):
return atom.CreateClassFromXMLString(Focallength, xml_string)
class Fstop(ExifBaseElement):
"(float) The fstop value used, e.g. 5.0"
_tag = 'fstop'
def FstopFromString(xml_string):
return atom.CreateClassFromXMLString(Fstop, xml_string)
class ImageUniqueID(ExifBaseElement):
"(string) The unique image ID for the photo. Generated by Google Photo servers"
_tag = 'imageUniqueID'
def ImageUniqueIDFromString(xml_string):
return atom.CreateClassFromXMLString(ImageUniqueID, xml_string)
class Iso(ExifBaseElement):
"(int) The iso equivalent value used, e.g. 200"
_tag = 'iso'
def IsoFromString(xml_string):
return atom.CreateClassFromXMLString(Iso, xml_string)
class Make(ExifBaseElement):
"(string) The make of the camera used, e.g. Fictitious Camera Company"
_tag = 'make'
def MakeFromString(xml_string):
return atom.CreateClassFromXMLString(Make, xml_string)
class Model(ExifBaseElement):
"(string) The model of the camera used,e.g AMAZING-100D"
_tag = 'model'
def ModelFromString(xml_string):
return atom.CreateClassFromXMLString(Model, xml_string)
class Time(ExifBaseElement):
"""(int) The date/time the photo was taken, e.g. 1180294337000.
Represented as the number of milliseconds since January 1st, 1970.
The value of this element will always be identical to the value
of the <gphoto:timestamp>.
Look at this object's .isoformat() for a human friendly datetime string:
photo_epoch = Time.text # 1180294337000
photo_isostring = Time.isoformat() # '2007-05-27T19:32:17.000Z'
Alternatively:
photo_datetime = Time.datetime() # (requires python >= 2.3)
"""
_tag = 'time'
def isoformat(self):
"""(string) Return the timestamp as a ISO 8601 formatted string,
e.g. '2007-05-27T19:32:17.000Z'
"""
import time
epoch = float(self.text)/1000
return time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime(epoch))
def datetime(self):
"""(datetime.datetime) Return the timestamp as a datetime.datetime object
Requires python 2.3
"""
import datetime
epoch = float(self.text)/1000
return datetime.datetime.fromtimestamp(epoch)
def TimeFromString(xml_string):
return atom.CreateClassFromXMLString(Time, xml_string)
class Tags(ExifBaseElement):
"""The container for all exif elements.
The <exif:tags> element can appear as a child of a photo entry.
"""
_tag = 'tags'
_children = atom.AtomBase._children.copy()
_children['{%s}fstop' % EXIF_NAMESPACE] = ('fstop', Fstop)
_children['{%s}make' % EXIF_NAMESPACE] = ('make', Make)
_children['{%s}model' % EXIF_NAMESPACE] = ('model', Model)
_children['{%s}distance' % EXIF_NAMESPACE] = ('distance', Distance)
_children['{%s}exposure' % EXIF_NAMESPACE] = ('exposure', Exposure)
_children['{%s}flash' % EXIF_NAMESPACE] = ('flash', Flash)
_children['{%s}focallength' % EXIF_NAMESPACE] = ('focallength', Focallength)
_children['{%s}iso' % EXIF_NAMESPACE] = ('iso', Iso)
_children['{%s}time' % EXIF_NAMESPACE] = ('time', Time)
_children['{%s}imageUniqueID' % EXIF_NAMESPACE] = ('imageUniqueID', ImageUniqueID)
def __init__(self, extension_elements=None, extension_attributes=None, text=None):
ExifBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
self.fstop=None
self.make=None
self.model=None
self.distance=None
self.exposure=None
self.flash=None
self.focallength=None
self.iso=None
self.time=None
self.imageUniqueID=None
def TagsFromString(xml_string):
return atom.CreateClassFromXMLString(Tags, xml_string)
| apache-2.0 |
mhugo/QGIS | python/plugins/processing/algs/qgis/voronoi.py | 4 | 29796 | # -*- coding: utf-8 -*-
"""
***************************************************************************
voronoi.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
#############################################################################
#
# Voronoi diagram calculator/ Delaunay triangulator
# Translated to Python by Bill Simons
# September, 2005
#
# Additional changes by Carson Farmer added November 2010
#
# Calculate Delaunay triangulation or the Voronoi polygons for a set of
# 2D input points.
#
# Derived from code bearing the following notice:
#
# The author of this software is Steven Fortune. Copyright (c) 1994 by AT&T
# Bell Laboratories.
# Permission to use, copy, modify, and distribute this software for any
# purpose without fee is hereby granted, provided that this entire notice
# is included in all copies of any software which is or includes a copy
# or modification of this software and in all copies of the supporting
# documentation for such software.
# THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR IMPLIED
# WARRANTY. IN PARTICULAR, NEITHER THE AUTHORS NOR AT&T MAKE ANY
# REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE MERCHANTABILITY
# OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR PURPOSE.
#
# Comments were incorporated from Shane O'Sullivan's translation of the
# original code into C++ (http://mapviewer.skynet.ie/voronoi.html)
#
# Steve Fortune's homepage: http://netlib.bell-labs.com/cm/cs/who/sjf/index.html
#
#############################################################################
def usage():
# fix_print_with_import
print("""
voronoi - compute Voronoi diagram or Delaunay triangulation
voronoi [-t -p -d] [filename]
Voronoi reads from filename (or standard input if no filename given) for a set
of points in the plane and writes either the Voronoi diagram or the Delaunay
triangulation to the standard output. Each input line should consist of two
real numbers, separated by white space.
If option -t is present, the Delaunay triangulation is produced.
Each output line is a triple i j k, which are the indices of the three points
in a Delaunay triangle. Points are numbered starting at 0.
If option -t is not present, the Voronoi diagram is produced.
There are four output record types.
s a b indicates that an input point at coordinates a b was seen.
l a b c indicates a line with equation ax + by = c.
v a b indicates a vertex at a b.
e l v1 v2 indicates a Voronoi segment which is a subsegment of line number l
with endpoints numbered v1 and v2. If v1 or v2 is -1, the line
extends to infinity.
Other options include:
d Print debugging info
p Produce output suitable for input to plot (1), rather than the forms
described above.
On unsorted data uniformly distributed in the unit square, voronoi uses about
20n+140 bytes of storage.
AUTHOR
Steve J. Fortune (1987) A Sweepline Algorithm for Voronoi Diagrams,
Algorithmica 2, 153-174.
""")
#############################################################################
#
# For programmatic use two functions are available:
#
# computeVoronoiDiagram(points)
#
# Takes a list of point objects (which must have x and y fields).
# Returns a 3-tuple of:
#
# (1) a list of 2-tuples, which are the x,y coordinates of the
# Voronoi diagram vertices
# (2) a list of 3-tuples (a,b,c) which are the equations of the
# lines in the Voronoi diagram: a*x + b*y = c
# (3) a list of 3-tuples, (l, v1, v2) representing edges of the
# Voronoi diagram. l is the index of the line, v1 and v2 are
# the indices of the vetices at the end of the edge. If
# v1 or v2 is -1, the line extends to infinity.
#
# computeDelaunayTriangulation(points):
#
# Takes a list of point objects (which must have x and y fields).
# Returns a list of 3-tuples: the indices of the points that form a
# Delaunay triangle.
#
#############################################################################
import math
import sys
import getopt
TOLERANCE = 1e-9
BIG_FLOAT = 1e38
# ------------------------------------------------------------------
class Context(object):
def __init__(self):
self.doPrint = 0
self.debug = 0
self.plot = 0
self.triangulate = False
self.vertices = [] # list of vertex 2-tuples: (x,y)
self.lines = [] # equation of line 3-tuple (a b c), for the equation of the line a*x+b*y = c
self.edges = [] # edge 3-tuple: (line index, vertex 1 index, vertex 2 index) if either vertex index is -1, the edge extends to infiinity
self.triangles = [] # 3-tuple of vertex indices
self.polygons = {} # a dict of site:[edges] pairs
def circle(self, x, y, rad):
pass
def clip_line(self, edge):
pass
def line(self, x0, y0, x1, y1):
pass
def outSite(self, s):
if self.debug:
# fix_print_with_import
print("site (%d) at %f %f" % (s.sitenum, s.x, s.y))
elif(self.triangulate):
pass
elif self.plot:
self.circle(s.x, s.y, None) # No radius?
elif(self.doPrint):
# fix_print_with_import
print("s %f %f" % (s.x, s.y))
def outVertex(self, s):
self.vertices.append((s.x, s.y))
if(self.debug):
# fix_print_with_import
print("vertex(%d) at %f %f" % (s.sitenum, s.x, s.y))
elif(self.triangulate):
pass
elif(self.doPrint and not self.plot):
# fix_print_with_import
print("v %f %f" % (s.x, s.y))
def outTriple(self, s1, s2, s3):
self.triangles.append((s1.sitenum, s2.sitenum, s3.sitenum))
if(self.debug):
# fix_print_with_import
print("circle through left=%d right=%d bottom=%d" % (s1.sitenum, s2.sitenum, s3.sitenum))
elif(self.triangulate and self.doPrint and not self.plot):
# fix_print_with_import
print("%d %d %d" % (s1.sitenum, s2.sitenum, s3.sitenum))
def outBisector(self, edge):
self.lines.append((edge.a, edge.b, edge.c))
if(self.debug):
# fix_print_with_import
print("line(%d) %gx+%gy=%g, bisecting %d %d" % (edge.edgenum, edge.a, edge.b, edge.c, edge.reg[0].sitenum, edge.reg[1].sitenum))
elif(self.triangulate):
if(self.plot):
self.line(edge.reg[0].x, edge.reg[0].y, edge.reg[1].x, edge.reg[1].y)
elif(self.doPrint and not self.plot):
# fix_print_with_import
print("l %f %f %f" % (edge.a, edge.b, edge.c))
def outEdge(self, edge):
sitenumL = -1
if edge.ep[Edge.LE] is not None:
sitenumL = edge.ep[Edge.LE].sitenum
sitenumR = -1
if edge.ep[Edge.RE] is not None:
sitenumR = edge.ep[Edge.RE].sitenum
if edge.reg[0].sitenum not in self.polygons:
self.polygons[edge.reg[0].sitenum] = []
if edge.reg[1].sitenum not in self.polygons:
self.polygons[edge.reg[1].sitenum] = []
self.polygons[edge.reg[0].sitenum].append((edge.edgenum, sitenumL, sitenumR))
self.polygons[edge.reg[1].sitenum].append((edge.edgenum, sitenumL, sitenumR))
self.edges.append((edge.edgenum, sitenumL, sitenumR))
if(not self.triangulate):
if self.plot:
self.clip_line(edge)
elif(self.doPrint):
# fix_print_with_import
print("e %d %d %d" % (edge.edgenum, sitenumL, sitenumR))
# ------------------------------------------------------------------
def voronoi(siteList, context):
edgeList = EdgeList(siteList.xmin, siteList.xmax, len(siteList))
priorityQ = PriorityQueue(siteList.ymin, siteList.ymax, len(siteList))
siteIter = siteList.iterator()
bottomsite = next(siteIter)
context.outSite(bottomsite)
newsite = next(siteIter)
minpt = Site(-BIG_FLOAT, -BIG_FLOAT)
while True:
if not priorityQ.isEmpty():
minpt = priorityQ.getMinPt()
if (newsite and (priorityQ.isEmpty() or cmp(newsite, minpt) < 0)):
# newsite is smallest - this is a site event
context.outSite(newsite)
# get first Halfedge to the LEFT and RIGHT of the new site
lbnd = edgeList.leftbnd(newsite)
rbnd = lbnd.right
# if this halfedge has no edge, bot = bottom site (whatever that is)
# create a new edge that bisects
bot = lbnd.rightreg(bottomsite)
edge = Edge.bisect(bot, newsite)
context.outBisector(edge)
# create a new Halfedge, setting its pm field to 0 and insert
# this new bisector edge between the left and right vectors in
# a linked list
bisector = Halfedge(edge, Edge.LE)
edgeList.insert(lbnd, bisector)
# if the new bisector intersects with the left edge, remove
# the left edge's vertex, and put in the new one
p = lbnd.intersect(bisector)
if p is not None:
priorityQ.delete(lbnd)
priorityQ.insert(lbnd, p, newsite.distance(p))
# create a new Halfedge, setting its pm field to 1
# insert the new Halfedge to the right of the original bisector
lbnd = bisector
bisector = Halfedge(edge, Edge.RE)
edgeList.insert(lbnd, bisector)
# if this new bisector intersects with the right Halfedge
p = bisector.intersect(rbnd)
if p is not None:
# push the Halfedge into the ordered linked list of vertices
priorityQ.insert(bisector, p, newsite.distance(p))
newsite = next(siteIter)
elif not priorityQ.isEmpty():
# intersection is smallest - this is a vector (circle) event
# pop the Halfedge with the lowest vector off the ordered list of
# vectors. Get the Halfedge to the left and right of the above HE
# and also the Halfedge to the right of the right HE
lbnd = priorityQ.popMinHalfedge()
llbnd = lbnd.left
rbnd = lbnd.right
rrbnd = rbnd.right
# get the Site to the left of the left HE and to the right of
# the right HE which it bisects
bot = lbnd.leftreg(bottomsite)
top = rbnd.rightreg(bottomsite)
# output the triple of sites, stating that a circle goes through them
mid = lbnd.rightreg(bottomsite)
context.outTriple(bot, top, mid)
# get the vertex that caused this event and set the vertex number
# couldn't do this earlier since we didn't know when it would be processed
v = lbnd.vertex
siteList.setSiteNumber(v)
context.outVertex(v)
# set the endpoint of the left and right Halfedge to be this vector
if lbnd.edge.setEndpoint(lbnd.pm, v):
context.outEdge(lbnd.edge)
if rbnd.edge.setEndpoint(rbnd.pm, v):
context.outEdge(rbnd.edge)
# delete the lowest HE, remove all vertex events to do with the
# right HE and delete the right HE
edgeList.delete(lbnd)
priorityQ.delete(rbnd)
edgeList.delete(rbnd)
# if the site to the left of the event is higher than the Site
# to the right of it, then swap them and set 'pm' to RIGHT
pm = Edge.LE
if bot.y > top.y:
bot, top = top, bot
pm = Edge.RE
# Create an Edge (or line) that is between the two Sites. This
# creates the formula of the line, and assigns a line number to it
edge = Edge.bisect(bot, top)
context.outBisector(edge)
# create a HE from the edge
bisector = Halfedge(edge, pm)
# insert the new bisector to the right of the left HE
# set one endpoint to the new edge to be the vector point 'v'
# If the site to the left of this bisector is higher than the right
# Site, then this endpoint is put in position 0; otherwise in pos 1
edgeList.insert(llbnd, bisector)
if edge.setEndpoint(Edge.RE - pm, v):
context.outEdge(edge)
# if left HE and the new bisector don't intersect, then delete
# the left HE, and reinsert it
p = llbnd.intersect(bisector)
if p is not None:
priorityQ.delete(llbnd)
priorityQ.insert(llbnd, p, bot.distance(p))
# if right HE and the new bisector don't intersect, then reinsert it
p = bisector.intersect(rrbnd)
if p is not None:
priorityQ.insert(bisector, p, bot.distance(p))
else:
break
he = edgeList.leftend.right
while he is not edgeList.rightend:
context.outEdge(he.edge)
he = he.right
Edge.EDGE_NUM = 0
# ------------------------------------------------------------------
def isEqual(a, b, relativeError=TOLERANCE):
# is nearly equal to within the allowed relative error
norm = max(abs(a), abs(b))
return (norm < relativeError) or (abs(a - b) < (relativeError * norm))
# ------------------------------------------------------------------
class Site(object):
def __init__(self, x=0.0, y=0.0, sitenum=0):
self.x = x
self.y = y
self.sitenum = sitenum
def dump(self):
# fix_print_with_import
print("Site #%d (%g, %g)" % (self.sitenum, self.x, self.y))
def __eq__(self, other):
return (self.x == other.x) and (self.y == other.y)
def __lt__(self, other):
if self.y < other.y:
return True
elif self.y > other.y:
return False
elif self.x < other.x:
return True
else:
return False
def distance(self, other):
dx = self.x - other.x
dy = self.y - other.y
return math.sqrt(dx * dx + dy * dy)
# ------------------------------------------------------------------
class Edge(object):
LE = 0
RE = 1
EDGE_NUM = 0
DELETED = {} # marker value
def __init__(self):
self.a = 0.0
self.b = 0.0
self.c = 0.0
self.ep = [None, None]
self.reg = [None, None]
self.edgenum = 0
def dump(self):
# fix_print_with_import
print("(#%d a=%g, b=%g, c=%g)" % (self.edgenum, self.a, self.b, self.c))
# fix_print_with_import
print("ep", self.ep)
# fix_print_with_import
print("reg", self.reg)
def setEndpoint(self, lrFlag, site):
self.ep[lrFlag] = site
if self.ep[Edge.RE - lrFlag] is None:
return False
return True
@staticmethod
def bisect(s1, s2):
newedge = Edge()
newedge.reg[0] = s1 # store the sites that this edge is bisecting
newedge.reg[1] = s2
# to begin with, there are no endpoints on the bisector - it goes to infinity
# ep[0] and ep[1] are None
# get the difference in x dist between the sites
dx = float(s2.x - s1.x)
dy = float(s2.y - s1.y)
adx = abs(dx) # make sure that the difference in positive
ady = abs(dy)
# get the slope of the line
newedge.c = float(s1.x * dx + s1.y * dy + (dx * dx + dy * dy) * 0.5)
if adx > ady:
# set formula of line, with x fixed to 1
newedge.a = 1.0
newedge.b = dy / dx
newedge.c /= dx
else:
# set formula of line, with y fixed to 1
newedge.b = 1.0
newedge.a = dx / dy
newedge.c /= dy
newedge.edgenum = Edge.EDGE_NUM
Edge.EDGE_NUM += 1
return newedge
# ------------------------------------------------------------------
class Halfedge(object):
def __init__(self, edge=None, pm=Edge.LE):
self.left = None # left Halfedge in the edge list
self.right = None # right Halfedge in the edge list
self.qnext = None # priority queue linked list pointer
self.edge = edge # edge list Edge
self.pm = pm
self.vertex = None # Site()
self.ystar = BIG_FLOAT
def dump(self):
# fix_print_with_import
print("Halfedge--------------------------")
# fix_print_with_import
print("left: ", self.left)
# fix_print_with_import
print("right: ", self.right)
# fix_print_with_import
print("edge: ", self.edge)
# fix_print_with_import
print("pm: ", self.pm)
# fix_print_with_import
print("vertex:")
if self.vertex:
self.vertex.dump()
else:
# fix_print_with_import
print("None")
# fix_print_with_import
print("ystar: ", self.ystar)
def __eq__(self, other):
return (self.vertex.x == other.vertex.x) and (self.ystar == other.ystar)
def __lt__(self, other):
if self.ystar < other.ystar:
return True
elif self.ystar > other.ystar:
return False
elif self.vertex.x < other.vertex.x:
return True
else:
return False
def leftreg(self, default):
if not self.edge:
return default
elif self.pm == Edge.LE:
return self.edge.reg[Edge.LE]
else:
return self.edge.reg[Edge.RE]
def rightreg(self, default):
if not self.edge:
return default
elif self.pm == Edge.LE:
return self.edge.reg[Edge.RE]
else:
return self.edge.reg[Edge.LE]
# returns True if p is to right of halfedge self
def isPointRightOf(self, pt):
e = self.edge
topsite = e.reg[1]
right_of_site = pt.x > topsite.x
if(right_of_site and self.pm == Edge.LE):
return True
if(not right_of_site and self.pm == Edge.RE):
return False
if(e.a == 1.0):
dyp = pt.y - topsite.y
dxp = pt.x - topsite.x
fast = 0
if ((not right_of_site and e.b < 0.0) or (right_of_site and e.b >= 0.0)):
above = dyp >= e.b * dxp
fast = above
else:
above = pt.x + pt.y * e.b > e.c
if(e.b < 0.0):
above = not above
if (not above):
fast = 1
if (not fast):
dxs = topsite.x - (e.reg[0]).x
above = e.b * (dxp * dxp - dyp * dyp) < dxs * dyp * (1.0 + 2.0 * dxp / dxs + e.b * e.b)
if(e.b < 0.0):
above = not above
else: # e.b == 1.0
yl = e.c - e.a * pt.x
t1 = pt.y - yl
t2 = pt.x - topsite.x
t3 = yl - topsite.y
above = t1 * t1 > t2 * t2 + t3 * t3
if(self.pm == Edge.LE):
return above
else:
return not above
# --------------------------
# create a new site where the Halfedges el1 and el2 intersect
def intersect(self, other):
e1 = self.edge
e2 = other.edge
if (e1 is None) or (e2 is None):
return None
# if the two edges bisect the same parent return None
if e1.reg[1] is e2.reg[1]:
return None
d = e1.a * e2.b - e1.b * e2.a
if isEqual(d, 0.0):
return None
xint = (e1.c * e2.b - e2.c * e1.b) / d
yint = (e2.c * e1.a - e1.c * e2.a) / d
if(cmp(e1.reg[1], e2.reg[1]) < 0):
he = self
e = e1
else:
he = other
e = e2
rightOfSite = xint >= e.reg[1].x
if((rightOfSite and he.pm == Edge.LE) or
(not rightOfSite and he.pm == Edge.RE)):
return None
# create a new site at the point of intersection - this is a new
# vector event waiting to happen
return Site(xint, yint)
# ------------------------------------------------------------------
class EdgeList(object):
def __init__(self, xmin, xmax, nsites):
if xmin > xmax:
xmin, xmax = xmax, xmin
self.hashsize = int(2 * math.sqrt(nsites + 4))
self.xmin = xmin
self.deltax = float(xmax - xmin)
self.hash = [None] * self.hashsize
self.leftend = Halfedge()
self.rightend = Halfedge()
self.leftend.right = self.rightend
self.rightend.left = self.leftend
self.hash[0] = self.leftend
self.hash[-1] = self.rightend
def insert(self, left, he):
he.left = left
he.right = left.right
left.right.left = he
left.right = he
def delete(self, he):
he.left.right = he.right
he.right.left = he.left
he.edge = Edge.DELETED
# Get entry from hash table, pruning any deleted nodes
def gethash(self, b):
if(b < 0 or b >= self.hashsize):
return None
he = self.hash[b]
if he is None or he.edge is not Edge.DELETED:
return he
# Hash table points to deleted half edge. Patch as necessary.
self.hash[b] = None
return None
def leftbnd(self, pt):
# Use hash table to get close to desired halfedge
bucket = int(((pt.x - self.xmin) / self.deltax * self.hashsize))
if(bucket < 0):
bucket = 0
if(bucket >= self.hashsize):
bucket = self.hashsize - 1
he = self.gethash(bucket)
if(he is None):
i = 1
while True:
he = self.gethash(bucket - i)
if (he is not None):
break
he = self.gethash(bucket + i)
if (he is not None):
break
i += 1
# Now search linear list of halfedges for the corect one
if (he is self.leftend) or (he is not self.rightend and he.isPointRightOf(pt)):
he = he.right
while he is not self.rightend and he.isPointRightOf(pt):
he = he.right
he = he.left
else:
he = he.left
while (he is not self.leftend and not he.isPointRightOf(pt)):
he = he.left
# Update hash table and reference counts
if(bucket > 0 and bucket < self.hashsize - 1):
self.hash[bucket] = he
return he
# ------------------------------------------------------------------
class PriorityQueue(object):
def __init__(self, ymin, ymax, nsites):
self.ymin = ymin
self.deltay = ymax - ymin
self.hashsize = int(4 * math.sqrt(nsites))
self.count = 0
self.minidx = 0
self.hash = []
for i in range(self.hashsize):
self.hash.append(Halfedge())
def __len__(self):
return self.count
def isEmpty(self):
return self.count == 0
def insert(self, he, site, offset):
he.vertex = site
he.ystar = site.y + offset
last = self.hash[self.getBucket(he)]
next = last.qnext
while((next is not None) and cmp(he, next) > 0):
last = next
next = last.qnext
he.qnext = last.qnext
last.qnext = he
self.count += 1
def delete(self, he):
if (he.vertex is not None):
last = self.hash[self.getBucket(he)]
while last.qnext is not he:
last = last.qnext
last.qnext = he.qnext
self.count -= 1
he.vertex = None
def getBucket(self, he):
bucket = int(((he.ystar - self.ymin) / self.deltay) * self.hashsize)
if bucket < 0:
bucket = 0
if bucket >= self.hashsize:
bucket = self.hashsize - 1
if bucket < self.minidx:
self.minidx = bucket
return bucket
def getMinPt(self):
while(self.hash[self.minidx].qnext is None):
self.minidx += 1
he = self.hash[self.minidx].qnext
x = he.vertex.x
y = he.ystar
return Site(x, y)
def popMinHalfedge(self):
curr = self.hash[self.minidx].qnext
self.hash[self.minidx].qnext = curr.qnext
self.count -= 1
return curr
# ------------------------------------------------------------------
class SiteList(object):
def __init__(self, pointList):
self.__sites = []
self.__sitenum = 0
self.__xmin = pointList[0].x
self.__ymin = pointList[0].y
self.__xmax = pointList[0].x
self.__ymax = pointList[0].y
for i, pt in enumerate(pointList):
self.__sites.append(Site(pt.x, pt.y, i))
if pt.x < self.__xmin:
self.__xmin = pt.x
if pt.y < self.__ymin:
self.__ymin = pt.y
if pt.x > self.__xmax:
self.__xmax = pt.x
if pt.y > self.__ymax:
self.__ymax = pt.y
self.__sites.sort()
def setSiteNumber(self, site):
site.sitenum = self.__sitenum
self.__sitenum += 1
class Iterator(object):
def __init__(this, lst):
this.generator = (s for s in lst)
def __iter__(this):
return this
def __next__(this):
try:
return next(this.generator)
except StopIteration:
return None
def iterator(self):
return SiteList.Iterator(self.__sites)
def __iter__(self):
return SiteList.Iterator(self.__sites)
def __len__(self):
return len(self.__sites)
def _getxmin(self):
return self.__xmin
def _getymin(self):
return self.__ymin
def _getxmax(self):
return self.__xmax
def _getymax(self):
return self.__ymax
xmin = property(_getxmin)
ymin = property(_getymin)
xmax = property(_getxmax)
ymax = property(_getymax)
# ------------------------------------------------------------------
def computeVoronoiDiagram(points):
""" Takes a list of point objects (which must have x and y fields).
Returns a 3-tuple of:
(1) a list of 2-tuples, which are the x,y coordinates of the
Voronoi diagram vertices
(2) a list of 3-tuples (a,b,c) which are the equations of the
lines in the Voronoi diagram: a*x + b*y = c
(3) a list of 3-tuples, (l, v1, v2) representing edges of the
Voronoi diagram. l is the index of the line, v1 and v2 are
the indices of the vetices at the end of the edge. If
v1 or v2 is -1, the line extends to infinity.
"""
siteList = SiteList(points)
context = Context()
voronoi(siteList, context)
return (context.vertices, context.lines, context.edges)
# ------------------------------------------------------------------
def computeDelaunayTriangulation(points):
""" Takes a list of point objects (which must have x and y fields).
Returns a list of 3-tuples: the indices of the points that form a
Delaunay triangle.
"""
siteList = SiteList(points)
context = Context()
context.triangulate = True
voronoi(siteList, context)
return context.triangles
# -----------------------------------------------------------------------------
if __name__ == "__main__":
try:
optlist, args = getopt.getopt(sys.argv[1:], "thdp")
except getopt.GetoptError:
usage()
sys.exit(2)
doHelp = 0
c = Context()
c.doPrint = 1
for opt in optlist:
if opt[0] == "-d":
c.debug = 1
if opt[0] == "-p":
c.plot = 1
if opt[0] == "-t":
c.triangulate = 1
if opt[0] == "-h":
doHelp = 1
if not doHelp:
pts = []
fp = sys.stdin
if len(args) > 0:
fp = open(args[0], 'r')
for line in fp:
fld = line.split()
x = float(fld[0])
y = float(fld[1])
pts.append(Site(x, y))
if len(args) > 0:
fp.close()
if doHelp or len(pts) == 0:
usage()
sys.exit(2)
sl = SiteList(pts)
voronoi(sl, c)
def cmp(a, b):
"""Compare the two objects x and y and return an integer according to the
outcome. The return value is negative if x < y, zero if x == y and strictly
positive if x > y.
In python 2 cmp() was a built in function but in python 3 is gone.
"""
return (b < a) - (a < b)
| gpl-2.0 |
google/airdialogue | airdialogue/prepro/standardize_data_lib.py | 1 | 4706 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""library to standardize data."""
from tensorflow.compat.v1.io import gfile
from tqdm import tqdm
import string
import json
printable = set(string.printable)
def add_dot(utt):
if utt.strip()[-1] != '.' and utt.strip()[-1] != '?':
return utt.strip() + '.'
else:
return utt.strip()
def standardize_message(utterances, time_stamp=None):
"""this function combines adjacent utternaces that belong to the same talker
into one. Sometimes time_stamp could be None.
For example
<t1> how are you. <t2> I am good. <t2> And you? <eod> <t1>
will be combined into
<t1> how are you. <t2> I am good. And you? <eod> <t1>
"""
new_utterance = []
new_time_stamp = []
for i, utt in enumerate(utterances):
if len(utt.strip()) == 0:
continue
utts = utt.split(':')
talker = utts[0]
sentence = ':'.join(utts[1:]).strip()
if len(sentence) == 0:
continue
if len(new_utterance) == 0 or talker != new_utterance[-1].split(':')[0]:
new_utterance.append(add_dot(utt))
if time_stamp:
new_time_stamp.append(time_stamp[i])
else:
new_utterance[-1] += ' ' + add_dot(sentence)
if time_stamp:
new_time_stamp[-1] = time_stamp[i]
return new_utterance, new_time_stamp
def delete_non_ascii(s):
return ''.join([x for x in s if x in printable])
def load_and_drop(data_file, kb_file, drop_incorrect=True, verbose=False):
""" this function filter incorrect samples without standardization."""
fin_data = gfile.GFile(data_file)
fin_kb = gfile.GFile(kb_file)
total_in_file = 0
loaded_data = []
loaded_kb = []
for line1 in tqdm(fin_data, desc='loading data'):
if len(line1.strip()) < 10:
continue
line2 = fin_kb.readline()
if len(line2.strip()) < 10:
continue
line1 = delete_non_ascii(line1)
line2 = delete_non_ascii(line2)
data_obj = json.loads(line1)
kb_obj = json.loads(line2)
if (not drop_incorrect) or (
'correct_sample' not in data_obj) or data_obj['correct_sample']:
loaded_data.append(data_obj)
loaded_kb.append(kb_obj)
total_in_file += 1
if verbose:
print(('loaded: ', len(loaded_data), '/', total_in_file, '=',
len(loaded_data) * 1.0 / total_in_file))
return loaded_data, loaded_kb
def load_and_drop_stream(data_file,
kb_file,
drop_incorrect=True,
verbose=False):
""" this function filter incorrect samples without standardization."""
if verbose:
print('loading stream')
fin_data = gfile.GFile(data_file)
if gfile.exists(kb_file):
fin_kb = gfile.GFile(kb_file)
else:
fin_kb = None
if verbose:
print('gfile loaded: ', fin_data)
for line1 in fin_data:
if verbose:
print(line1)
if len(line1.strip()) < 10:
continue
line1 = delete_non_ascii(line1)
data_obj = json.loads(line1)
if fin_kb:
line2 = fin_kb.readline()
if len(line2.strip()) < 10:
continue
line2 = delete_non_ascii(line2)
kb_obj = json.loads(line2)
else:
kb_obj = None
if (not drop_incorrect) or (
'correct_sample' not in data_obj) or data_obj['correct_sample']:
yield data_obj, kb_obj
def standardize_and_drop(data_file,
kb_file,
drop_incorrect=True,
verbose=False):
""" this function filter incorrect samples and standardize them
the same time.
"""
loaded_data, loaded_kb = load_and_drop(data_file, kb_file, drop_incorrect,
verbose)
for data_obj in tqdm(loaded_data, desc='standardizing data'):
org_time = data_obj['timestamps'] if 'timestamps' in data_obj else None
org_diag = data_obj['dialogue'] if 'dialogue' in data_obj else None
if org_diag:
new_diag, new_time = standardize_message(org_diag, org_time)
data_obj['dialogue'] = new_diag
if new_time:
data_obj['timestamps'] = new_time
assert len(data_obj['dialogue']) == len(data_obj['timestamps'])
return loaded_data, loaded_kb
| apache-2.0 |
shaftoe/home-assistant | homeassistant/components/sensor/fitbit.py | 2 | 15144 | """
Support for the Fitbit API.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.fitbit/
"""
import os
import json
import logging
import datetime
import time
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.helpers.entity import Entity
from homeassistant.loader import get_component
from homeassistant.util import Throttle
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['fitbit==0.2.3']
_CONFIGURING = {}
_LOGGER = logging.getLogger(__name__)
ATTR_ACCESS_TOKEN = 'access_token'
ATTR_REFRESH_TOKEN = 'refresh_token'
ATTR_CLIENT_ID = 'client_id'
ATTR_CLIENT_SECRET = 'client_secret'
ATTR_LAST_SAVED_AT = 'last_saved_at'
CONF_MONITORED_RESOURCES = 'monitored_resources'
DEPENDENCIES = ['http']
FITBIT_AUTH_CALLBACK_PATH = '/auth/fitbit/callback'
FITBIT_AUTH_START = '/auth/fitbit'
FITBIT_CONFIG_FILE = 'fitbit.conf'
FITBIT_DEFAULT_RESOURCES = ['activities/steps']
ICON = 'mdi:walk'
MIN_TIME_BETWEEN_UPDATES = datetime.timedelta(minutes=30)
DEFAULT_CONFIG = {
'client_id': 'CLIENT_ID_HERE',
'client_secret': 'CLIENT_SECRET_HERE'
}
FITBIT_RESOURCES_LIST = {
'activities/activityCalories': 'cal',
'activities/calories': 'cal',
'activities/caloriesBMR': 'cal',
'activities/distance': '',
'activities/elevation': '',
'activities/floors': 'floors',
'activities/heart': 'bpm',
'activities/minutesFairlyActive': 'minutes',
'activities/minutesLightlyActive': 'minutes',
'activities/minutesSedentary': 'minutes',
'activities/minutesVeryActive': 'minutes',
'activities/steps': 'steps',
'activities/tracker/activityCalories': 'cal',
'activities/tracker/calories': 'cal',
'activities/tracker/distance': '',
'activities/tracker/elevation': '',
'activities/tracker/floors': 'floors',
'activities/tracker/minutesFairlyActive': 'minutes',
'activities/tracker/minutesLightlyActive': 'minutes',
'activities/tracker/minutesSedentary': 'minutes',
'activities/tracker/minutesVeryActive': 'minutes',
'activities/tracker/steps': 'steps',
'body/bmi': 'BMI',
'body/fat': '%',
'sleep/awakeningsCount': 'times awaken',
'sleep/efficiency': '%',
'sleep/minutesAfterWakeup': 'minutes',
'sleep/minutesAsleep': 'minutes',
'sleep/minutesAwake': 'minutes',
'sleep/minutesToFallAsleep': 'minutes',
'sleep/startTime': 'start time',
'sleep/timeInBed': 'time in bed',
'body/weight': ''
}
FITBIT_MEASUREMENTS = {
'en_US': {
'duration': 'ms',
'distance': 'mi',
'elevation': 'ft',
'height': 'in',
'weight': 'lbs',
'body': 'in',
'liquids': 'fl. oz.',
'blood glucose': 'mg/dL',
},
'en_GB': {
'duration': 'milliseconds',
'distance': 'kilometers',
'elevation': 'meters',
'height': 'centimeters',
'weight': 'stone',
'body': 'centimeters',
'liquids': 'milliliters',
'blood glucose': 'mmol/L'
},
'metric': {
'duration': 'milliseconds',
'distance': 'kilometers',
'elevation': 'meters',
'height': 'centimeters',
'weight': 'kilograms',
'body': 'centimeters',
'liquids': 'milliliters',
'blood glucose': 'mmol/L'
}
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_MONITORED_RESOURCES, default=FITBIT_DEFAULT_RESOURCES):
vol.All(cv.ensure_list, [vol.In(FITBIT_RESOURCES_LIST)]),
})
def config_from_file(filename, config=None):
"""Small configuration file management function."""
if config:
# We"re writing configuration
try:
with open(filename, 'w') as fdesc:
fdesc.write(json.dumps(config))
except IOError as error:
_LOGGER.error("Saving config file failed: %s", error)
return False
return config
else:
# We"re reading config
if os.path.isfile(filename):
try:
with open(filename, 'r') as fdesc:
return json.loads(fdesc.read())
except IOError as error:
_LOGGER.error("Reading config file failed: %s", error)
# This won"t work yet
return False
else:
return {}
def request_app_setup(hass, config, add_devices, config_path,
discovery_info=None):
"""Assist user with configuring the Fitbit dev application."""
configurator = get_component('configurator')
# pylint: disable=unused-argument
def fitbit_configuration_callback(callback_data):
"""Handle configuration updates."""
config_path = hass.config.path(FITBIT_CONFIG_FILE)
if os.path.isfile(config_path):
config_file = config_from_file(config_path)
if config_file == DEFAULT_CONFIG:
error_msg = ("You didn't correctly modify fitbit.conf",
" please try again")
configurator.notify_errors(_CONFIGURING['fitbit'], error_msg)
else:
setup_platform(hass, config, add_devices, discovery_info)
else:
setup_platform(hass, config, add_devices, discovery_info)
start_url = "{}{}".format(hass.config.api.base_url,
FITBIT_AUTH_CALLBACK_PATH)
description = """Please create a Fitbit developer app at
https://dev.fitbit.com/apps/new.
For the OAuth 2.0 Application Type choose Personal.
Set the Callback URL to {}.
They will provide you a Client ID and secret.
These need to be saved into the file located at: {}.
Then come back here and hit the below button.
""".format(start_url, config_path)
submit = "I have saved my Client ID and Client Secret into fitbit.conf."
_CONFIGURING['fitbit'] = configurator.request_config(
hass, 'Fitbit', fitbit_configuration_callback,
description=description, submit_caption=submit,
description_image="/static/images/config_fitbit_app.png"
)
def request_oauth_completion(hass):
"""Request user complete Fitbit OAuth2 flow."""
configurator = get_component('configurator')
if "fitbit" in _CONFIGURING:
configurator.notify_errors(
_CONFIGURING['fitbit'], "Failed to register, please try again.")
return
# pylint: disable=unused-argument
def fitbit_configuration_callback(callback_data):
"""Handle configuration updates."""
start_url = '{}{}'.format(hass.config.api.base_url, FITBIT_AUTH_START)
description = "Please authorize Fitbit by visiting {}".format(start_url)
_CONFIGURING['fitbit'] = configurator.request_config(
hass, 'Fitbit', fitbit_configuration_callback,
description=description,
submit_caption="I have authorized Fitbit."
)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Fitbit sensor."""
config_path = hass.config.path(FITBIT_CONFIG_FILE)
if os.path.isfile(config_path):
config_file = config_from_file(config_path)
if config_file == DEFAULT_CONFIG:
request_app_setup(
hass, config, add_devices, config_path, discovery_info=None)
return False
else:
config_file = config_from_file(config_path, DEFAULT_CONFIG)
request_app_setup(
hass, config, add_devices, config_path, discovery_info=None)
return False
if "fitbit" in _CONFIGURING:
get_component('configurator').request_done(_CONFIGURING.pop("fitbit"))
import fitbit
access_token = config_file.get(ATTR_ACCESS_TOKEN)
refresh_token = config_file.get(ATTR_REFRESH_TOKEN)
if None not in (access_token, refresh_token):
authd_client = fitbit.Fitbit(config_file.get(ATTR_CLIENT_ID),
config_file.get(ATTR_CLIENT_SECRET),
access_token=access_token,
refresh_token=refresh_token)
if int(time.time()) - config_file.get(ATTR_LAST_SAVED_AT, 0) > 3600:
authd_client.client.refresh_token()
authd_client.system = authd_client.user_profile_get()["user"]["locale"]
if authd_client.system != 'en_GB':
if hass.config.units.is_metric:
authd_client.system = 'metric'
else:
authd_client.system = 'en_US'
dev = []
for resource in config.get(CONF_MONITORED_RESOURCES):
dev.append(FitbitSensor(
authd_client, config_path, resource,
hass.config.units.is_metric))
add_devices(dev)
else:
oauth = fitbit.api.FitbitOauth2Client(
config_file.get(ATTR_CLIENT_ID),
config_file.get(ATTR_CLIENT_SECRET))
redirect_uri = '{}{}'.format(hass.config.api.base_url,
FITBIT_AUTH_CALLBACK_PATH)
fitbit_auth_start_url, _ = oauth.authorize_token_url(
redirect_uri=redirect_uri,
scope=['activity', 'heartrate', 'nutrition', 'profile',
'settings', 'sleep', 'weight'])
hass.http.register_redirect(FITBIT_AUTH_START, fitbit_auth_start_url)
hass.http.register_view(FitbitAuthCallbackView(
config, add_devices, oauth))
request_oauth_completion(hass)
class FitbitAuthCallbackView(HomeAssistantView):
"""Handle OAuth finish callback requests."""
requires_auth = False
url = '/auth/fitbit/callback'
name = 'auth:fitbit:callback'
def __init__(self, config, add_devices, oauth):
"""Initialize the OAuth callback view."""
self.config = config
self.add_devices = add_devices
self.oauth = oauth
@callback
def get(self, request):
"""Finish OAuth callback request."""
from oauthlib.oauth2.rfc6749.errors import MismatchingStateError
from oauthlib.oauth2.rfc6749.errors import MissingTokenError
hass = request.app['hass']
data = request.GET
response_message = """Fitbit has been successfully authorized!
You can close this window now!"""
if data.get('code') is not None:
redirect_uri = '{}{}'.format(
hass.config.api.base_url, FITBIT_AUTH_CALLBACK_PATH)
try:
self.oauth.fetch_access_token(data.get('code'), redirect_uri)
except MissingTokenError as error:
_LOGGER.error("Missing token: %s", error)
response_message = """Something went wrong when
attempting authenticating with Fitbit. The error
encountered was {}. Please try again!""".format(error)
except MismatchingStateError as error:
_LOGGER.error("Mismatched state, CSRF error: %s", error)
response_message = """Something went wrong when
attempting authenticating with Fitbit. The error
encountered was {}. Please try again!""".format(error)
else:
_LOGGER.error("Unknown error when authing")
response_message = """Something went wrong when
attempting authenticating with Fitbit.
An unknown error occurred. Please try again!
"""
html_response = """<html><head><title>Fitbit Auth</title></head>
<body><h1>{}</h1></body></html>""".format(response_message)
config_contents = {
ATTR_ACCESS_TOKEN: self.oauth.token['access_token'],
ATTR_REFRESH_TOKEN: self.oauth.token['refresh_token'],
ATTR_CLIENT_ID: self.oauth.client_id,
ATTR_CLIENT_SECRET: self.oauth.client_secret
}
if not config_from_file(hass.config.path(FITBIT_CONFIG_FILE),
config_contents):
_LOGGER.error("Failed to save config file")
hass.async_add_job(setup_platform, hass, self.config, self.add_devices)
return html_response
class FitbitSensor(Entity):
"""Implementation of a Fitbit sensor."""
def __init__(self, client, config_path, resource_type, is_metric):
"""Initialize the Fitbit sensor."""
self.client = client
self.config_path = config_path
self.resource_type = resource_type
pretty_resource = self.resource_type.replace('activities/', '')
pretty_resource = pretty_resource.replace('/', ' ')
pretty_resource = pretty_resource.title()
if pretty_resource == 'Body Bmi':
pretty_resource = 'BMI'
elif pretty_resource == 'Heart':
pretty_resource = 'Resting Heart Rate'
self._name = pretty_resource
unit_type = FITBIT_RESOURCES_LIST[self.resource_type]
if unit_type == "":
split_resource = self.resource_type.split('/')
try:
measurement_system = FITBIT_MEASUREMENTS[self.client.system]
except KeyError:
if is_metric:
measurement_system = FITBIT_MEASUREMENTS['metric']
else:
measurement_system = FITBIT_MEASUREMENTS['en_US']
unit_type = measurement_system[split_resource[-1]]
self._unit_of_measurement = unit_type
self._state = 0
self.update()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from the Fitbit API and update the states."""
container = self.resource_type.replace("/", "-")
response = self.client.time_series(self.resource_type, period='7d')
self._state = response[container][-1].get('value')
if self.resource_type == 'activities/heart':
self._state = response[container][-1]. \
get('value').get('restingHeartRate')
config_contents = {
ATTR_ACCESS_TOKEN: self.client.client.token['access_token'],
ATTR_REFRESH_TOKEN: self.client.client.token['refresh_token'],
ATTR_CLIENT_ID: self.client.client.client_id,
ATTR_CLIENT_SECRET: self.client.client.client_secret,
ATTR_LAST_SAVED_AT: int(time.time())
}
if not config_from_file(self.config_path, config_contents):
_LOGGER.error("Failed to save config file")
| apache-2.0 |
kawasaki2013/python-for-android-x86 | python-modules/twisted/twisted/python/compat.py | 59 | 5773 | # -*- test-case-name: twisted.test.test_compat -*-
#
# Copyright (c) 2001-2007 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Compatibility module to provide backwards compatibility for useful Python
features.
This is mainly for use of internal Twisted code. We encourage you to use
the latest version of Python directly from your code, if possible.
"""
import sys, string, socket, struct
def inet_pton(af, addr):
if af == socket.AF_INET:
return socket.inet_aton(addr)
elif af == getattr(socket, 'AF_INET6', 'AF_INET6'):
if [x for x in addr if x not in string.hexdigits + ':.']:
raise ValueError("Illegal characters: %r" % (''.join(x),))
parts = addr.split(':')
elided = parts.count('')
ipv4Component = '.' in parts[-1]
if len(parts) > (8 - ipv4Component) or elided > 3:
raise ValueError("Syntactically invalid address")
if elided == 3:
return '\x00' * 16
if elided:
zeros = ['0'] * (8 - len(parts) - ipv4Component + elided)
if addr.startswith('::'):
parts[:2] = zeros
elif addr.endswith('::'):
parts[-2:] = zeros
else:
idx = parts.index('')
parts[idx:idx+1] = zeros
if len(parts) != 8 - ipv4Component:
raise ValueError("Syntactically invalid address")
else:
if len(parts) != (8 - ipv4Component):
raise ValueError("Syntactically invalid address")
if ipv4Component:
if parts[-1].count('.') != 3:
raise ValueError("Syntactically invalid address")
rawipv4 = socket.inet_aton(parts[-1])
unpackedipv4 = struct.unpack('!HH', rawipv4)
parts[-1:] = [hex(x)[2:] for x in unpackedipv4]
parts = [int(x, 16) for x in parts]
return struct.pack('!8H', *parts)
else:
raise socket.error(97, 'Address family not supported by protocol')
def inet_ntop(af, addr):
if af == socket.AF_INET:
return socket.inet_ntoa(addr)
elif af == socket.AF_INET6:
if len(addr) != 16:
raise ValueError("address length incorrect")
parts = struct.unpack('!8H', addr)
curBase = bestBase = None
for i in range(8):
if not parts[i]:
if curBase is None:
curBase = i
curLen = 0
curLen += 1
else:
if curBase is not None:
if bestBase is None or curLen > bestLen:
bestBase = curBase
bestLen = curLen
curBase = None
if curBase is not None and (bestBase is None or curLen > bestLen):
bestBase = curBase
bestLen = curLen
parts = [hex(x)[2:] for x in parts]
if bestBase is not None:
parts[bestBase:bestBase + bestLen] = ['']
if parts[0] == '':
parts.insert(0, '')
if parts[-1] == '':
parts.insert(len(parts) - 1, '')
return ':'.join(parts)
else:
raise socket.error(97, 'Address family not supported by protocol')
try:
socket.inet_pton(socket.AF_INET6, "::")
except (AttributeError, NameError, socket.error):
socket.inet_pton = inet_pton
socket.inet_ntop = inet_ntop
socket.AF_INET6 = 'AF_INET6'
adict = dict
# OpenSSL/__init__.py imports OpenSSL.tsafe. OpenSSL/tsafe.py imports
# threading. threading imports thread. All to make this stupid threadsafe
# version of its Connection class. We don't even care about threadsafe
# Connections. In the interest of not screwing over some crazy person
# calling into OpenSSL from another thread and trying to use Twisted's SSL
# support, we don't totally destroy OpenSSL.tsafe, but we will replace it
# with our own version which imports threading as late as possible.
class tsafe(object):
class Connection:
"""
OpenSSL.tsafe.Connection, defined in such a way as to not blow.
"""
__module__ = 'OpenSSL.tsafe'
def __init__(self, *args):
from OpenSSL import SSL as _ssl
self._ssl_conn = apply(_ssl.Connection, args)
from threading import _RLock
self._lock = _RLock()
for f in ('get_context', 'pending', 'send', 'write', 'recv',
'read', 'renegotiate', 'bind', 'listen', 'connect',
'accept', 'setblocking', 'fileno', 'shutdown',
'close', 'get_cipher_list', 'getpeername',
'getsockname', 'getsockopt', 'setsockopt',
'makefile', 'get_app_data', 'set_app_data',
'state_string', 'sock_shutdown',
'get_peer_certificate', 'want_read', 'want_write',
'set_connect_state', 'set_accept_state',
'connect_ex', 'sendall'):
exec """def %s(self, *args):
self._lock.acquire()
try:
return apply(self._ssl_conn.%s, args)
finally:
self._lock.release()\n""" % (f, f)
sys.modules['OpenSSL.tsafe'] = tsafe
import operator
try:
operator.attrgetter
except AttributeError:
class attrgetter(object):
def __init__(self, name):
self.name = name
def __call__(self, obj):
return getattr(obj, self.name)
operator.attrgetter = attrgetter
try:
set = set
except NameError:
from sets import Set as set
try:
frozenset = frozenset
except NameError:
from sets import ImmutableSet as frozenset
try:
from functools import reduce
except ImportError:
reduce = reduce
| apache-2.0 |
Linkid/fofix | fofix/tests/game/test_menu.py | 3 | 1568 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# FoFiX
# Copyright (C) 2017 FoFiX team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import unittest
from fofix.core import Config
from fofix.core import Version
from fofix.core.GameEngine import GameEngine
from fofix.game.Menu import Menu
def my_callback():
print("I'm a callback")
class MenuTest(unittest.TestCase):
def setUp(self):
# set config file
config_file = Version.PROGRAM_UNIXSTYLE_NAME + ".ini"
self.config = Config.load(config_file, setAsDefault=True)
# set choices
choices = [
("Choice 1", my_callback),
]
# init the engine
engine = GameEngine(self.config)
# init the menu
self.menu = Menu(engine, choices)
def test_init(self):
self.assertGreater(len(self.menu.choices), 0)
self.assertEqual(self.menu.currentIndex, 0)
| gpl-2.0 |
Immortalin/python-for-android | python3-alpha/python3-src/Lib/xml/etree/ElementPath.py | 785 | 9477 | #
# ElementTree
# $Id: ElementPath.py 3375 2008-02-13 08:05:08Z fredrik $
#
# limited xpath support for element trees
#
# history:
# 2003-05-23 fl created
# 2003-05-28 fl added support for // etc
# 2003-08-27 fl fixed parsing of periods in element names
# 2007-09-10 fl new selection engine
# 2007-09-12 fl fixed parent selector
# 2007-09-13 fl added iterfind; changed findall to return a list
# 2007-11-30 fl added namespaces support
# 2009-10-30 fl added child element value filter
#
# Copyright (c) 2003-2009 by Fredrik Lundh. All rights reserved.
#
# fredrik@pythonware.com
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2009 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/psf/license for licensing details.
##
# Implementation module for XPath support. There's usually no reason
# to import this module directly; the <b>ElementTree</b> does this for
# you, if needed.
##
import re
xpath_tokenizer_re = re.compile(
"("
"'[^']*'|\"[^\"]*\"|"
"::|"
"//?|"
"\.\.|"
"\(\)|"
"[/.*:\[\]\(\)@=])|"
"((?:\{[^}]+\})?[^/\[\]\(\)@=\s]+)|"
"\s+"
)
def xpath_tokenizer(pattern, namespaces=None):
for token in xpath_tokenizer_re.findall(pattern):
tag = token[1]
if tag and tag[0] != "{" and ":" in tag:
try:
prefix, uri = tag.split(":", 1)
if not namespaces:
raise KeyError
yield token[0], "{%s}%s" % (namespaces[prefix], uri)
except KeyError:
raise SyntaxError("prefix %r not found in prefix map" % prefix)
else:
yield token
def get_parent_map(context):
parent_map = context.parent_map
if parent_map is None:
context.parent_map = parent_map = {}
for p in context.root.iter():
for e in p:
parent_map[e] = p
return parent_map
def prepare_child(next, token):
tag = token[1]
def select(context, result):
for elem in result:
for e in elem:
if e.tag == tag:
yield e
return select
def prepare_star(next, token):
def select(context, result):
for elem in result:
for e in elem:
yield e
return select
def prepare_self(next, token):
def select(context, result):
for elem in result:
yield elem
return select
def prepare_descendant(next, token):
token = next()
if token[0] == "*":
tag = "*"
elif not token[0]:
tag = token[1]
else:
raise SyntaxError("invalid descendant")
def select(context, result):
for elem in result:
for e in elem.iter(tag):
if e is not elem:
yield e
return select
def prepare_parent(next, token):
def select(context, result):
# FIXME: raise error if .. is applied at toplevel?
parent_map = get_parent_map(context)
result_map = {}
for elem in result:
if elem in parent_map:
parent = parent_map[elem]
if parent not in result_map:
result_map[parent] = None
yield parent
return select
def prepare_predicate(next, token):
# FIXME: replace with real parser!!! refs:
# http://effbot.org/zone/simple-iterator-parser.htm
# http://javascript.crockford.com/tdop/tdop.html
signature = []
predicate = []
while 1:
token = next()
if token[0] == "]":
break
if token[0] and token[0][:1] in "'\"":
token = "'", token[0][1:-1]
signature.append(token[0] or "-")
predicate.append(token[1])
signature = "".join(signature)
# use signature to determine predicate type
if signature == "@-":
# [@attribute] predicate
key = predicate[1]
def select(context, result):
for elem in result:
if elem.get(key) is not None:
yield elem
return select
if signature == "@-='":
# [@attribute='value']
key = predicate[1]
value = predicate[-1]
def select(context, result):
for elem in result:
if elem.get(key) == value:
yield elem
return select
if signature == "-" and not re.match("\d+$", predicate[0]):
# [tag]
tag = predicate[0]
def select(context, result):
for elem in result:
if elem.find(tag) is not None:
yield elem
return select
if signature == "-='" and not re.match("\d+$", predicate[0]):
# [tag='value']
tag = predicate[0]
value = predicate[-1]
def select(context, result):
for elem in result:
for e in elem.findall(tag):
if "".join(e.itertext()) == value:
yield elem
break
return select
if signature == "-" or signature == "-()" or signature == "-()-":
# [index] or [last()] or [last()-index]
if signature == "-":
index = int(predicate[0]) - 1
else:
if predicate[0] != "last":
raise SyntaxError("unsupported function")
if signature == "-()-":
try:
index = int(predicate[2]) - 1
except ValueError:
raise SyntaxError("unsupported expression")
else:
index = -1
def select(context, result):
parent_map = get_parent_map(context)
for elem in result:
try:
parent = parent_map[elem]
# FIXME: what if the selector is "*" ?
elems = list(parent.findall(elem.tag))
if elems[index] is elem:
yield elem
except (IndexError, KeyError):
pass
return select
raise SyntaxError("invalid predicate")
ops = {
"": prepare_child,
"*": prepare_star,
".": prepare_self,
"..": prepare_parent,
"//": prepare_descendant,
"[": prepare_predicate,
}
_cache = {}
class _SelectorContext:
parent_map = None
def __init__(self, root):
self.root = root
# --------------------------------------------------------------------
##
# Generate all matching objects.
def iterfind(elem, path, namespaces=None):
# compile selector pattern
if path[-1:] == "/":
path = path + "*" # implicit all (FIXME: keep this?)
try:
selector = _cache[path]
except KeyError:
if len(_cache) > 100:
_cache.clear()
if path[:1] == "/":
raise SyntaxError("cannot use absolute path on element")
next = iter(xpath_tokenizer(path, namespaces)).__next__
token = next()
selector = []
while 1:
try:
selector.append(ops[token[0]](next, token))
except StopIteration:
raise SyntaxError("invalid path")
try:
token = next()
if token[0] == "/":
token = next()
except StopIteration:
break
_cache[path] = selector
# execute selector pattern
result = [elem]
context = _SelectorContext(elem)
for select in selector:
result = select(context, result)
return result
##
# Find first matching object.
def find(elem, path, namespaces=None):
try:
return next(iterfind(elem, path, namespaces))
except StopIteration:
return None
##
# Find all matching objects.
def findall(elem, path, namespaces=None):
return list(iterfind(elem, path, namespaces))
##
# Find text for first matching object.
def findtext(elem, path, default=None, namespaces=None):
try:
elem = next(iterfind(elem, path, namespaces))
return elem.text or ""
except StopIteration:
return default
| apache-2.0 |
leiferikb/bitpop-private | chrome/test/functional/chromeos_onc.py | 4 | 4794 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import pyauto_functional # must come before pyauto.
import policy_base
import pyauto
class ChromeosONC(policy_base.PolicyTestBase):
"""
Tests for Open Network Configuration (ONC).
Open Network Configuration (ONC) files is a json dictionary
that contains network configurations and is pulled via policies.
These tests verify that ONC files that are formatted correctly
add the network/certificate to the device.
"""
ONC_PATH = os.path.join(pyauto.PyUITest.DataDir(), 'chromeos', 'cros')
def setUp(self):
self.CleanupFlimflamDirsOnChromeOS()
policy_base.PolicyTestBase.setUp(self)
self.LoginWithTestAccount()
def _ReadONCFileAndSet(self, filename):
"""Reads the specified ONC file and sends it as a policy.
Inputs:
filename: The filename of the ONC file. ONC files should
all be stored in the path defined by ONC_PATH.
"""
with open(os.path.join(self.ONC_PATH, filename)) as fp:
self.SetUserPolicy({'OpenNetworkConfiguration': fp.read()})
def _VerifyRememberedWifiNetworks(self, wifi_expect):
"""Verify the list of remembered networks contains those in wifi_expect.
Inputs:
wifi_expect: A dictionary of wifi networks where the key is the ssid
and the value is the encryption type of the network.
"""
# Sometimes there is a race condition where upon restarting chrome
# NetworkScan has not populated the network lists yet. We should
# scan until the device is online.
self.WaitUntil(lambda: not self.NetworkScan().get('offline_mode', True))
networks = self.NetworkScan()
# Temprorary dictionary to keep track of which wifi networks
# have been visited by removing them as we see them.
wifi_expect_temp = dict(wifi_expect)
for service, wifi_dict in networks['remembered_wifi'].iteritems():
if isinstance(wifi_dict, dict) and \
'encryption' in wifi_dict and \
'name' in wifi_dict:
msg = ('Wifi network %s was in the remembered_network list but '
'shouldn\'t be.' % wifi_dict['name'])
# wifi_dict['encryption'] will always be a string and not None.
self.assertTrue(wifi_expect.get(wifi_dict['name'], None) ==
wifi_dict['encryption'], msg)
del wifi_expect_temp[wifi_dict['name']]
# Error if wifi_expect_temp is not empty.
self.assertFalse(wifi_expect_temp, 'The following networks '
'were not remembered: %s' % self.pformat(wifi_expect_temp))
def testONCAddOpenWifi(self):
"""Test adding open network."""
wifi_networks = {
'ssid-none': '',
}
self._ReadONCFileAndSet('network-wifi-none.onc')
self._VerifyRememberedWifiNetworks(wifi_networks)
def testONCAddWEPWifi(self):
"""Test adding WEP network."""
wifi_networks = {
'ssid-wep': 'WEP',
}
self._ReadONCFileAndSet('network-wifi-wep.onc')
self._VerifyRememberedWifiNetworks(wifi_networks)
def testONCAddPSKWifi(self):
"""Test adding WPA network."""
wifi_networks = {
'ssid-wpa': 'WPA',
}
self._ReadONCFileAndSet('network-wifi-wpa.onc')
self._VerifyRememberedWifiNetworks(wifi_networks)
def testAddBacktoBackONC(self):
"""Test adding three different ONC files one after the other."""
test_dict = {
'network-wifi-none.onc': { 'ssid-none': '' },
'network-wifi-wep.onc': { 'ssid-wep': 'WEP' },
'network-wifi-wpa.onc': { 'ssid-wpa': 'WPA' },
}
for onc, wifi_networks in test_dict.iteritems():
self._ReadONCFileAndSet(onc)
self._VerifyRememberedWifiNetworks(wifi_networks)
def testAddBacktoBackONC2(self):
"""Test adding three different ONC files one after the other.
Due to inconsistent behaviors as addressed in crosbug.com/27862
this test does not perform a network scan/verification between
the setting of policies.
"""
wifi_networks = {
'ssid-wpa': 'WPA',
}
self._ReadONCFileAndSet('network-wifi-none.onc')
self._ReadONCFileAndSet('network-wifi-wep.onc')
self._ReadONCFileAndSet('network-wifi-wpa.onc')
# Verify that only the most recent onc is updated.
self._VerifyRememberedWifiNetworks(wifi_networks)
def testAddONCWithUnknownFields(self):
"""Test adding an ONC file with unknown fields."""
wifi_networks = {
'ssid-none': '',
'ssid-wpa': 'WPA'
}
self._ReadONCFileAndSet('network-multiple-unknown.onc')
self._VerifyRememberedWifiNetworks(wifi_networks)
if __name__ == '__main__':
pyauto_functional.Main()
| bsd-3-clause |
KaiRo-at/socorro | socorro/unittest/external/postgresql/test_server_status.py | 2 | 6792 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import datetime
import os
import socorro
from nose.tools import eq_
from socorro.external.postgresql import server_status
from socorrolib.lib import datetimeutil
from unittestbase import PostgreSQLTestCase
class IntegrationTestServerStatus(PostgreSQLTestCase):
"""Test socorro.external.postgresql.server_status.ServerStatus class. """
def setUp(self):
"""Set up this test class by populating the database with fake data.
"""
super(IntegrationTestServerStatus, self).setUp()
# Create fake revision files
self.basedir = os.path.dirname(socorro.__file__)
open(os.path.join(
self.basedir, 'socorro_revision.txt'
), 'w').write('42')
open(os.path.join(
self.basedir, 'breakpad_revision.txt'
), 'w').write('43')
cursor = self.connection.cursor()
# Insert data
self.now = datetimeutil.utc_now()
date1 = datetime.datetime(
self.now.year, self.now.month, self.now.day, 12, 00, 00,
tzinfo=self.now.tzinfo
)
date2 = date1 - datetime.timedelta(minutes=15)
date3 = date2 - datetime.timedelta(minutes=15)
date4 = date3 - datetime.timedelta(minutes=15)
cursor.execute("""
INSERT INTO server_status
(id, date_recently_completed, date_oldest_job_queued,
avg_process_sec, avg_wait_sec, waiting_job_count,
processors_count, date_created)
VALUES
(
1,
'%(date1)s',
'%(date1)s',
2,
5,
3,
2,
'%(date1)s'
),
(
2,
'%(date2)s',
'%(date2)s',
3,
3.12,
2,
2,
'%(date2)s'
),
(
3,
'%(date3)s',
'%(date3)s',
1,
2,
4,
1,
'%(date3)s'
),
(
4,
NULL,
NULL,
1,
2,
4,
1,
'%(date4)s'
);
""" % {"date1": date1, "date2": date2, "date3": date3, "date4": date4})
# Prepare data for the schema revision
# Clean up from init routine
cursor.execute("TRUNCATE alembic_version CASCADE;")
cursor.execute("""
INSERT INTO alembic_version
(version_num)
VALUES
(
'aaaaaaaaaaaa'
)
""")
self.connection.commit()
def tearDown(self):
"""Clean up the database. """
# Delete fake revision files
os.remove(os.path.join(self.basedir, 'socorro_revision.txt'))
os.remove(os.path.join(self.basedir, 'breakpad_revision.txt'))
cursor = self.connection.cursor()
cursor.execute("TRUNCATE server_status, alembic_version CASCADE;")
self.connection.commit()
super(IntegrationTestServerStatus, self).tearDown()
def test_get(self):
status = server_status.ServerStatus(config=self.config)
date1 = datetime.datetime(
self.now.year, self.now.month, self.now.day, 12, 00, 00,
tzinfo=self.now.tzinfo
)
date2 = date1 - datetime.timedelta(minutes=15)
date3 = date2 - datetime.timedelta(minutes=15)
date4 = date3 - datetime.timedelta(minutes=15)
date1 = datetimeutil.date_to_string(date1)
date2 = datetimeutil.date_to_string(date2)
date3 = datetimeutil.date_to_string(date3)
date4 = datetimeutil.date_to_string(date4)
#......................................................................
# Test 1: default behavior
res = status.get()
res_expected = {
"hits": [
{
"id": 1,
"date_recently_completed": date1,
"date_oldest_job_queued": date1,
"avg_process_sec": 2,
"avg_wait_sec": 5,
"waiting_job_count": 3,
"processors_count": 2,
"date_created": date1
},
{
"id": 2,
"date_recently_completed": date2,
"date_oldest_job_queued": date2,
"avg_process_sec": 3,
"avg_wait_sec": 3.12,
"waiting_job_count": 2,
"processors_count": 2,
"date_created": date2
},
{
"id": 3,
"date_recently_completed": date3,
"date_oldest_job_queued": date3,
"avg_process_sec": 1,
"avg_wait_sec": 2,
"waiting_job_count": 4,
"processors_count": 1,
"date_created": date3
},
{
"id": 4,
"date_recently_completed": None,
"date_oldest_job_queued": None,
"avg_process_sec": 1,
"avg_wait_sec": 2,
"waiting_job_count": 4,
"processors_count": 1,
"date_created": date4
}
],
"socorro_revision": "42",
"breakpad_revision": "43",
"schema_revision": "aaaaaaaaaaaa",
"total": 4
}
eq_(res, res_expected)
#......................................................................
# Test 2: with duration
params = {
"duration": 1
}
res = status.get(**params)
res_expected = {
"hits": [
{
"id": 1,
"date_recently_completed": date1,
"date_oldest_job_queued": date1,
"avg_process_sec": 2,
"avg_wait_sec": 5,
"waiting_job_count": 3,
"processors_count": 2,
"date_created": date1
}
],
"socorro_revision": "42",
"breakpad_revision": "43",
"schema_revision": "aaaaaaaaaaaa",
"total": 1
}
eq_(res, res_expected)
| mpl-2.0 |
arifgursel/pyglet | pyglet/canvas/win32.py | 26 | 3425 | #!/usr/bin/python
# $Id:$
from base import Display, Screen, ScreenMode, Canvas
from pyglet.libs.win32 import _kernel32, _user32, types, constants
from pyglet.libs.win32.constants import *
from pyglet.libs.win32.types import *
class Win32Display(Display):
def get_screens(self):
screens = []
def enum_proc(hMonitor, hdcMonitor, lprcMonitor, dwData):
r = lprcMonitor.contents
width = r.right - r.left
height = r.bottom - r.top
screens.append(
Win32Screen(self, hMonitor, r.left, r.top, width, height))
return True
enum_proc_ptr = MONITORENUMPROC(enum_proc)
_user32.EnumDisplayMonitors(None, None, enum_proc_ptr, 0)
return screens
class Win32Screen(Screen):
_initial_mode = None
def __init__(self, display, handle, x, y, width, height):
super(Win32Screen, self).__init__(display, x, y, width, height)
self._handle = handle
def get_matching_configs(self, template):
canvas = Win32Canvas(self.display, 0, _user32.GetDC(0))
configs = template.match(canvas)
# XXX deprecate config's being screen-specific
for config in configs:
config.screen = self
return configs
def get_device_name(self):
info = MONITORINFOEX()
info.cbSize = sizeof(MONITORINFOEX)
_user32.GetMonitorInfoW(self._handle, byref(info))
return info.szDevice
def get_modes(self):
device_name = self.get_device_name()
i = 0
modes = []
while True:
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
r = _user32.EnumDisplaySettingsW(device_name, i, byref(mode))
if not r:
break
modes.append(Win32ScreenMode(self, mode))
i += 1
return modes
def get_mode(self):
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
_user32.EnumDisplaySettingsW(self.get_device_name(),
ENUM_CURRENT_SETTINGS,
byref(mode))
return Win32ScreenMode(self, mode)
def set_mode(self, mode):
assert mode.screen is self
if not self._initial_mode:
self._initial_mode = self.get_mode()
r = _user32.ChangeDisplaySettingsExW(self.get_device_name(),
byref(mode._mode),
None,
CDS_FULLSCREEN,
None)
if r == DISP_CHANGE_SUCCESSFUL:
self.width = mode.width
self.height = mode.height
def restore_mode(self):
if self._initial_mode:
self.set_mode(self._initial_mode)
class Win32ScreenMode(ScreenMode):
def __init__(self, screen, mode):
super(Win32ScreenMode, self).__init__(screen)
self._mode = mode
self.width = mode.dmPelsWidth
self.height = mode.dmPelsHeight
self.depth = mode.dmBitsPerPel
self.rate = mode.dmDisplayFrequency
class Win32Canvas(Canvas):
def __init__(self, display, hwnd, hdc):
super(Win32Canvas, self).__init__(display)
self.hwnd = hwnd
self.hdc = hdc
| bsd-3-clause |
117111302/jenkinsapi | jenkinsapi_tests/unittests/test_node.py | 6 | 2352 | import mock
# To run unittests on python 2.6 please use unittest2 library
try:
import unittest2 as unittest
except ImportError:
import unittest
from jenkinsapi.node import Node
class TestNode(unittest.TestCase):
DATA = {"actions": [],
"displayName": "bobnit",
"executors": [{}],
"icon": "computer.png",
"idle": True,
"jnlpAgent": False,
"launchSupported": True,
"loadStatistics": {},
"manualLaunchAllowed": True,
"monitorData": {"hudson.node_monitors.SwapSpaceMonitor": {"availablePhysicalMemory": 7681417216,
"availableSwapSpace": 12195983360,
"totalPhysicalMemory": 8374497280,
"totalSwapSpace": 12195983360},
"hudson.node_monitors.ArchitectureMonitor": "Linux (amd64)",
"hudson.node_monitors.ResponseTimeMonitor": {"average": 64},
"hudson.node_monitors.TemporarySpaceMonitor": {"path": "/tmp", "size": 250172776448},
"hudson.node_monitors.DiskSpaceMonitor": {"path": "/home/sal/jenkins", "size": 170472026112},
"hudson.node_monitors.ClockMonitor": {"diff": 6736}},
"numExecutors": 1,
"offline": False,
"offlineCause": None,
"oneOffExecutors": [],
"temporarilyOffline": False}
@mock.patch.object(Node, '_poll')
def setUp(self, _poll):
_poll.return_value = self.DATA
# def __init__(self, baseurl, nodename, jenkins_obj):
self.J = mock.MagicMock() # Jenkins object
self.n = Node('http://', 'bobnit', self.J)
def testRepr(self):
# Can we produce a repr string for this object
repr(self.n)
def testName(self):
with self.assertRaises(AttributeError):
self.n.id()
self.assertEquals(self.n.name, 'bobnit')
@mock.patch.object(Node, '_poll')
def test_online(self, _poll):
_poll.return_value = self.DATA
return self.assertEquals(self.n.is_online(), True)
if __name__ == '__main__':
unittest.main()
| mit |
Khan/git-bigfile | vendor/boto/rds/optiongroup.py | 180 | 15665 | # Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents an OptionGroup
"""
from boto.rds.dbsecuritygroup import DBSecurityGroup
from boto.resultset import ResultSet
class OptionGroup(object):
"""
Represents an RDS option group
Properties reference available from the AWS documentation at
http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_OptionGroup.html
:ivar connection: :py:class:`boto.rds.RDSConnection` associated with the
current object
:ivar name: Name of the option group
:ivar description: The description of the option group
:ivar engine_name: The name of the database engine to use
:ivar major_engine_version: The major version number of the engine to use
:ivar allow_both_vpc_and_nonvpc: Indicates whether this option group can be
applied to both VPC and non-VPC instances.
The value ``True`` indicates the option
group can be applied to both VPC and
non-VPC instances.
:ivar vpc_id: If AllowsVpcAndNonVpcInstanceMemberships is 'false', this
field is blank. If AllowsVpcAndNonVpcInstanceMemberships is
``True`` and this field is blank, then this option group can
be applied to both VPC and non-VPC instances. If this field
contains a value, then this option group can only be applied
to instances that are in the VPC indicated by this field.
:ivar options: The list of :py:class:`boto.rds.optiongroup.Option` objects
associated with the group
"""
def __init__(self, connection=None, name=None, engine_name=None,
major_engine_version=None, description=None,
allow_both_vpc_and_nonvpc=False, vpc_id=None):
self.name = name
self.engine_name = engine_name
self.major_engine_version = major_engine_version
self.description = description
self.allow_both_vpc_and_nonvpc = allow_both_vpc_and_nonvpc
self.vpc_id = vpc_id
self.options = []
def __repr__(self):
return 'OptionGroup:%s' % self.name
def startElement(self, name, attrs, connection):
if name == 'Options':
self.options = ResultSet([
('Options', Option)
])
else:
return None
def endElement(self, name, value, connection):
if name == 'OptionGroupName':
self.name = value
elif name == 'EngineName':
self.engine_name = value
elif name == 'MajorEngineVersion':
self.major_engine_version = value
elif name == 'OptionGroupDescription':
self.description = value
elif name == 'AllowsVpcAndNonVpcInstanceMemberships':
if value.lower() == 'true':
self.allow_both_vpc_and_nonvpc = True
else:
self.allow_both_vpc_and_nonvpc = False
elif name == 'VpcId':
self.vpc_id = value
else:
setattr(self, name, value)
def delete(self):
return self.connection.delete_option_group(self.name)
class Option(object):
"""
Describes a Option for use in an OptionGroup
:ivar name: The name of the option
:ivar description: The description of the option.
:ivar permanent: Indicate if this option is permanent.
:ivar persistent: Indicate if this option is persistent.
:ivar port: If required, the port configured for this option to use.
:ivar settings: The option settings for this option.
:ivar db_security_groups: If the option requires access to a port, then
this DB Security Group allows access to the port.
:ivar vpc_security_groups: If the option requires access to a port, then
this VPC Security Group allows access to the
port.
"""
def __init__(self, name=None, description=None, permanent=False,
persistent=False, port=None, settings=None,
db_security_groups=None, vpc_security_groups=None):
self.name = name
self.description = description
self.permanent = permanent
self.persistent = persistent
self.port = port
self.settings = settings
self.db_security_groups = db_security_groups
self.vpc_security_groups = vpc_security_groups
if self.settings is None:
self.settings = []
if self.db_security_groups is None:
self.db_security_groups = []
if self.vpc_security_groups is None:
self.vpc_security_groups = []
def __repr__(self):
return 'Option:%s' % self.name
def startElement(self, name, attrs, connection):
if name == 'OptionSettings':
self.settings = ResultSet([
('OptionSettings', OptionSetting)
])
elif name == 'DBSecurityGroupMemberships':
self.db_security_groups = ResultSet([
('DBSecurityGroupMemberships', DBSecurityGroup)
])
elif name == 'VpcSecurityGroupMemberships':
self.vpc_security_groups = ResultSet([
('VpcSecurityGroupMemberships', VpcSecurityGroup)
])
else:
return None
def endElement(self, name, value, connection):
if name == 'OptionName':
self.name = value
elif name == 'OptionDescription':
self.description = value
elif name == 'Permanent':
if value.lower() == 'true':
self.permenant = True
else:
self.permenant = False
elif name == 'Persistent':
if value.lower() == 'true':
self.persistent = True
else:
self.persistent = False
elif name == 'Port':
self.port = int(value)
else:
setattr(self, name, value)
class OptionSetting(object):
"""
Describes a OptionSetting for use in an Option
:ivar name: The name of the option that has settings that you can set.
:ivar description: The description of the option setting.
:ivar value: The current value of the option setting.
:ivar default_value: The default value of the option setting.
:ivar allowed_values: The allowed values of the option setting.
:ivar data_type: The data type of the option setting.
:ivar apply_type: The DB engine specific parameter type.
:ivar is_modifiable: A Boolean value that, when true, indicates the option
setting can be modified from the default.
:ivar is_collection: Indicates if the option setting is part of a
collection.
"""
def __init__(self, name=None, description=None, value=None,
default_value=False, allowed_values=None, data_type=None,
apply_type=None, is_modifiable=False, is_collection=False):
self.name = name
self.description = description
self.value = value
self.default_value = default_value
self.allowed_values = allowed_values
self.data_type = data_type
self.apply_type = apply_type
self.is_modifiable = is_modifiable
self.is_collection = is_collection
def __repr__(self):
return 'OptionSetting:%s' % self.name
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Name':
self.name = value
elif name == 'Description':
self.description = value
elif name == 'Value':
self.value = value
elif name == 'DefaultValue':
self.default_value = value
elif name == 'AllowedValues':
self.allowed_values = value
elif name == 'DataType':
self.data_type = value
elif name == 'ApplyType':
self.apply_type = value
elif name == 'IsModifiable':
if value.lower() == 'true':
self.is_modifiable = True
else:
self.is_modifiable = False
elif name == 'IsCollection':
if value.lower() == 'true':
self.is_collection = True
else:
self.is_collection = False
else:
setattr(self, name, value)
class VpcSecurityGroup(object):
"""
Describes a VPC security group for use in a OptionGroup
"""
def __init__(self, vpc_id=None, status=None):
self.vpc_id = vpc_id
self.status = status
def __repr__(self):
return 'VpcSecurityGroup:%s' % self.vpc_id
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'VpcSecurityGroupId':
self.vpc_id = value
elif name == 'Status':
self.status = value
else:
setattr(self, name, value)
class OptionGroupOption(object):
"""
Describes a OptionGroupOption for use in an OptionGroup
:ivar name: The name of the option
:ivar description: The description of the option.
:ivar engine_name: Engine name that this option can be applied to.
:ivar major_engine_version: Indicates the major engine version that the
option is available for.
:ivar min_minor_engine_version: The minimum required engine version for the
option to be applied.
:ivar permanent: Indicate if this option is permanent.
:ivar persistent: Indicate if this option is persistent.
:ivar port_required: Specifies whether the option requires a port.
:ivar default_port: If the option requires a port, specifies the default
port for the option.
:ivar settings: The option settings for this option.
:ivar depends_on: List of all options that are prerequisites for this
option.
"""
def __init__(self, name=None, description=None, engine_name=None,
major_engine_version=None, min_minor_engine_version=None,
permanent=False, persistent=False, port_required=False,
default_port=None, settings=None, depends_on=None):
self.name = name
self.description = description
self.engine_name = engine_name
self.major_engine_version = major_engine_version
self.min_minor_engine_version = min_minor_engine_version
self.permanent = permanent
self.persistent = persistent
self.port_required = port_required
self.default_port = default_port
self.settings = settings
self.depends_on = depends_on
if self.settings is None:
self.settings = []
if self.depends_on is None:
self.depends_on = []
def __repr__(self):
return 'OptionGroupOption:%s' % self.name
def startElement(self, name, attrs, connection):
if name == 'OptionGroupOptionSettings':
self.settings = ResultSet([
('OptionGroupOptionSettings', OptionGroupOptionSetting)
])
elif name == 'OptionsDependedOn':
self.depends_on = []
else:
return None
def endElement(self, name, value, connection):
if name == 'Name':
self.name = value
elif name == 'Description':
self.description = value
elif name == 'EngineName':
self.engine_name = value
elif name == 'MajorEngineVersion':
self.major_engine_version = value
elif name == 'MinimumRequiredMinorEngineVersion':
self.min_minor_engine_version = value
elif name == 'Permanent':
if value.lower() == 'true':
self.permenant = True
else:
self.permenant = False
elif name == 'Persistent':
if value.lower() == 'true':
self.persistent = True
else:
self.persistent = False
elif name == 'PortRequired':
if value.lower() == 'true':
self.port_required = True
else:
self.port_required = False
elif name == 'DefaultPort':
self.default_port = int(value)
else:
setattr(self, name, value)
class OptionGroupOptionSetting(object):
"""
Describes a OptionGroupOptionSetting for use in an OptionGroupOption.
:ivar name: The name of the option that has settings that you can set.
:ivar description: The description of the option setting.
:ivar value: The current value of the option setting.
:ivar default_value: The default value of the option setting.
:ivar allowed_values: The allowed values of the option setting.
:ivar data_type: The data type of the option setting.
:ivar apply_type: The DB engine specific parameter type.
:ivar is_modifiable: A Boolean value that, when true, indicates the option
setting can be modified from the default.
:ivar is_collection: Indicates if the option setting is part of a
collection.
"""
def __init__(self, name=None, description=None, default_value=False,
allowed_values=None, apply_type=None, is_modifiable=False):
self.name = name
self.description = description
self.default_value = default_value
self.allowed_values = allowed_values
self.apply_type = apply_type
self.is_modifiable = is_modifiable
def __repr__(self):
return 'OptionGroupOptionSetting:%s' % self.name
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'SettingName':
self.name = value
elif name == 'SettingDescription':
self.description = value
elif name == 'DefaultValue':
self.default_value = value
elif name == 'AllowedValues':
self.allowed_values = value
elif name == 'ApplyType':
self.apply_type = value
elif name == 'IsModifiable':
if value.lower() == 'true':
self.is_modifiable = True
else:
self.is_modifiable = False
else:
setattr(self, name, value)
| mit |
damonkohler/sl4a | python/src/Demo/rpc/xdr.py | 42 | 5047 | # Implement (a subset of) Sun XDR -- RFC1014.
try:
import struct
except ImportError:
struct = None
Long = type(0L)
class Packer:
def __init__(self):
self.reset()
def reset(self):
self.buf = ''
def get_buf(self):
return self.buf
def pack_uint(self, x):
self.buf = self.buf + \
(chr(int(x>>24 & 0xff)) + chr(int(x>>16 & 0xff)) + \
chr(int(x>>8 & 0xff)) + chr(int(x & 0xff)))
if struct and struct.pack('l', 1) == '\0\0\0\1':
def pack_uint(self, x):
if type(x) == Long:
x = int((x + 0x80000000L) % 0x100000000L \
- 0x80000000L)
self.buf = self.buf + struct.pack('l', x)
pack_int = pack_uint
pack_enum = pack_int
def pack_bool(self, x):
if x: self.buf = self.buf + '\0\0\0\1'
else: self.buf = self.buf + '\0\0\0\0'
def pack_uhyper(self, x):
self.pack_uint(int(x>>32 & 0xffffffff))
self.pack_uint(int(x & 0xffffffff))
pack_hyper = pack_uhyper
def pack_float(self, x):
# XXX
self.buf = self.buf + struct.pack('f', x)
def pack_double(self, x):
# XXX
self.buf = self.buf + struct.pack('d', x)
def pack_fstring(self, n, s):
if n < 0:
raise ValueError, 'fstring size must be nonnegative'
n = ((n + 3)//4)*4
data = s[:n]
data = data + (n - len(data)) * '\0'
self.buf = self.buf + data
pack_fopaque = pack_fstring
def pack_string(self, s):
n = len(s)
self.pack_uint(n)
self.pack_fstring(n, s)
pack_opaque = pack_string
def pack_list(self, list, pack_item):
for item in list:
self.pack_uint(1)
pack_item(item)
self.pack_uint(0)
def pack_farray(self, n, list, pack_item):
if len(list) <> n:
raise ValueError, 'wrong array size'
for item in list:
pack_item(item)
def pack_array(self, list, pack_item):
n = len(list)
self.pack_uint(n)
self.pack_farray(n, list, pack_item)
class Unpacker:
def __init__(self, data):
self.reset(data)
def reset(self, data):
self.buf = data
self.pos = 0
def done(self):
if self.pos < len(self.buf):
raise RuntimeError, 'unextracted data remains'
def unpack_uint(self):
i = self.pos
self.pos = j = i+4
data = self.buf[i:j]
if len(data) < 4:
raise EOFError
x = long(ord(data[0]))<<24 | ord(data[1])<<16 | \
ord(data[2])<<8 | ord(data[3])
# Return a Python long only if the value is not representable
# as a nonnegative Python int
if x < 0x80000000L: x = int(x)
return x
if struct and struct.unpack('l', '\0\0\0\1') == 1:
def unpack_uint(self):
i = self.pos
self.pos = j = i+4
data = self.buf[i:j]
if len(data) < 4:
raise EOFError
return struct.unpack('l', data)
def unpack_int(self):
x = self.unpack_uint()
if x >= 0x80000000L: x = x - 0x100000000L
return int(x)
unpack_enum = unpack_int
unpack_bool = unpack_int
def unpack_uhyper(self):
hi = self.unpack_uint()
lo = self.unpack_uint()
return long(hi)<<32 | lo
def unpack_hyper(self):
x = self.unpack_uhyper()
if x >= 0x8000000000000000L: x = x - 0x10000000000000000L
return x
def unpack_float(self):
# XXX
i = self.pos
self.pos = j = i+4
data = self.buf[i:j]
if len(data) < 4:
raise EOFError
return struct.unpack('f', data)[0]
def unpack_double(self):
# XXX
i = self.pos
self.pos = j = i+8
data = self.buf[i:j]
if len(data) < 8:
raise EOFError
return struct.unpack('d', data)[0]
def unpack_fstring(self, n):
if n < 0:
raise ValueError, 'fstring size must be nonnegative'
i = self.pos
j = i + (n+3)//4*4
if j > len(self.buf):
raise EOFError
self.pos = j
return self.buf[i:i+n]
unpack_fopaque = unpack_fstring
def unpack_string(self):
n = self.unpack_uint()
return self.unpack_fstring(n)
unpack_opaque = unpack_string
def unpack_list(self, unpack_item):
list = []
while 1:
x = self.unpack_uint()
if x == 0: break
if x <> 1:
raise RuntimeError, '0 or 1 expected, got %r' % (x, )
item = unpack_item()
list.append(item)
return list
def unpack_farray(self, n, unpack_item):
list = []
for i in range(n):
list.append(unpack_item())
return list
def unpack_array(self, unpack_item):
n = self.unpack_uint()
return self.unpack_farray(n, unpack_item)
| apache-2.0 |
bikong2/scikit-learn | sklearn/mixture/tests/test_dpgmm.py | 261 | 4490 | import unittest
import sys
import numpy as np
from sklearn.mixture import DPGMM, VBGMM
from sklearn.mixture.dpgmm import log_normalize
from sklearn.datasets import make_blobs
from sklearn.utils.testing import assert_array_less, assert_equal
from sklearn.mixture.tests.test_gmm import GMMTester
from sklearn.externals.six.moves import cStringIO as StringIO
np.seterr(all='warn')
def test_class_weights():
# check that the class weights are updated
# simple 3 cluster dataset
X, y = make_blobs(random_state=1)
for Model in [DPGMM, VBGMM]:
dpgmm = Model(n_components=10, random_state=1, alpha=20, n_iter=50)
dpgmm.fit(X)
# get indices of components that are used:
indices = np.unique(dpgmm.predict(X))
active = np.zeros(10, dtype=np.bool)
active[indices] = True
# used components are important
assert_array_less(.1, dpgmm.weights_[active])
# others are not
assert_array_less(dpgmm.weights_[~active], .05)
def test_verbose_boolean():
# checks that the output for the verbose output is the same
# for the flag values '1' and 'True'
# simple 3 cluster dataset
X, y = make_blobs(random_state=1)
for Model in [DPGMM, VBGMM]:
dpgmm_bool = Model(n_components=10, random_state=1, alpha=20,
n_iter=50, verbose=True)
dpgmm_int = Model(n_components=10, random_state=1, alpha=20,
n_iter=50, verbose=1)
old_stdout = sys.stdout
sys.stdout = StringIO()
try:
# generate output with the boolean flag
dpgmm_bool.fit(X)
verbose_output = sys.stdout
verbose_output.seek(0)
bool_output = verbose_output.readline()
# generate output with the int flag
dpgmm_int.fit(X)
verbose_output = sys.stdout
verbose_output.seek(0)
int_output = verbose_output.readline()
assert_equal(bool_output, int_output)
finally:
sys.stdout = old_stdout
def test_verbose_first_level():
# simple 3 cluster dataset
X, y = make_blobs(random_state=1)
for Model in [DPGMM, VBGMM]:
dpgmm = Model(n_components=10, random_state=1, alpha=20, n_iter=50,
verbose=1)
old_stdout = sys.stdout
sys.stdout = StringIO()
try:
dpgmm.fit(X)
finally:
sys.stdout = old_stdout
def test_verbose_second_level():
# simple 3 cluster dataset
X, y = make_blobs(random_state=1)
for Model in [DPGMM, VBGMM]:
dpgmm = Model(n_components=10, random_state=1, alpha=20, n_iter=50,
verbose=2)
old_stdout = sys.stdout
sys.stdout = StringIO()
try:
dpgmm.fit(X)
finally:
sys.stdout = old_stdout
def test_log_normalize():
v = np.array([0.1, 0.8, 0.01, 0.09])
a = np.log(2 * v)
assert np.allclose(v, log_normalize(a), rtol=0.01)
def do_model(self, **kwds):
return VBGMM(verbose=False, **kwds)
class DPGMMTester(GMMTester):
model = DPGMM
do_test_eval = False
def score(self, g, train_obs):
_, z = g.score_samples(train_obs)
return g.lower_bound(train_obs, z)
class TestDPGMMWithSphericalCovars(unittest.TestCase, DPGMMTester):
covariance_type = 'spherical'
setUp = GMMTester._setUp
class TestDPGMMWithDiagCovars(unittest.TestCase, DPGMMTester):
covariance_type = 'diag'
setUp = GMMTester._setUp
class TestDPGMMWithTiedCovars(unittest.TestCase, DPGMMTester):
covariance_type = 'tied'
setUp = GMMTester._setUp
class TestDPGMMWithFullCovars(unittest.TestCase, DPGMMTester):
covariance_type = 'full'
setUp = GMMTester._setUp
class VBGMMTester(GMMTester):
model = do_model
do_test_eval = False
def score(self, g, train_obs):
_, z = g.score_samples(train_obs)
return g.lower_bound(train_obs, z)
class TestVBGMMWithSphericalCovars(unittest.TestCase, VBGMMTester):
covariance_type = 'spherical'
setUp = GMMTester._setUp
class TestVBGMMWithDiagCovars(unittest.TestCase, VBGMMTester):
covariance_type = 'diag'
setUp = GMMTester._setUp
class TestVBGMMWithTiedCovars(unittest.TestCase, VBGMMTester):
covariance_type = 'tied'
setUp = GMMTester._setUp
class TestVBGMMWithFullCovars(unittest.TestCase, VBGMMTester):
covariance_type = 'full'
setUp = GMMTester._setUp
| bsd-3-clause |
wouwei/PiLapse | picam/picamEnv/Lib/site-packages/pip/_vendor/requests/packages/urllib3/util/connection.py | 353 | 3380 | from __future__ import absolute_import
import socket
try:
from select import poll, POLLIN
except ImportError: # `poll` doesn't exist on OSX and other platforms
poll = False
try:
from select import select
except ImportError: # `select` doesn't exist on AppEngine.
select = False
def is_connection_dropped(conn): # Platform-specific
"""
Returns True if the connection is dropped and should be closed.
:param conn:
:class:`httplib.HTTPConnection` object.
Note: For platforms like AppEngine, this will always return ``False`` to
let the platform handle connection recycling transparently for us.
"""
sock = getattr(conn, 'sock', False)
if sock is False: # Platform-specific: AppEngine
return False
if sock is None: # Connection already closed (such as by httplib).
return True
if not poll:
if not select: # Platform-specific: AppEngine
return False
try:
return select([sock], [], [], 0.0)[0]
except socket.error:
return True
# This version is better on platforms that support it.
p = poll()
p.register(sock, POLLIN)
for (fno, ev) in p.poll(0.0):
if fno == sock.fileno():
# Either data is buffered (bad), or the connection is dropped.
return True
# This function is copied from socket.py in the Python 2.7 standard
# library test suite. Added to its signature is only `socket_options`.
def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None, socket_options=None):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
if host.startswith('['):
host = host.strip('[]')
err = None
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
# If provided, set socket level options before connecting.
# This is the only addition urllib3 makes to this function.
_set_socket_options(sock, socket_options)
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except socket.error as e:
err = e
if sock is not None:
sock.close()
sock = None
if err is not None:
raise err
raise socket.error("getaddrinfo returns an empty list")
def _set_socket_options(sock, options):
if options is None:
return
for opt in options:
sock.setsockopt(*opt)
| apache-2.0 |
danceos/dosek | generator/statistics.py | 1 | 3355 | import pprint
class Statistics:
def __init__(self, root):
self.tree = self.__new_node(root)
self.idx = {id(root): self.tree}
def __new_node(self, item):
return {"_id": id(item), "_type": item.__class__.__name__,
"_name": repr(item)}
def __stringify(self, obj):
if type(obj) == dict:
ret = {}
for k,v in obj.items():
ret[str(k)] = self.__stringify(v)
elif type(obj) == list:
ret = map(self.__stringify, obj)
elif type(obj) == tuple:
ret = tuple(map(self.__stringify, obj))
elif type(obj) in (int, float, str, bool):
ret = obj
else:
ret = '"' + repr(obj) + '"'
return ret
def add_child(self, parent, category, child):
parent_id = id(parent)
child_id = id(child)
assert parent_id in self.idx
assert not child_id in self.idx
parent = self.idx[parent_id]
child = self.__new_node(child)
child["_parent"] = parent_id
self.idx[child_id] = child
if not category in parent:
parent[category] = [child]
else:
parent[category].append(child)
def get_node(self, node):
if type(node) == dict:
node_id = node["_id"]
elif type(node) == int:
node_id = node
else:
node_id = id(node)
if not node_id in self.idx:
return None
return self.idx[node_id]
def get_parent(self, node):
node = self.get_node(node)
if not node:
return None
return self.get_node(node["_parent"])
def add_data(self, parent, category, data, scalar=False):
parent_id = id(parent)
parent = self.idx[parent_id]
if scalar:
assert category not in parent
parent[category] = data
else:
if not category in parent:
if data in ([], None):
parent[category] = []
else:
parent[category] = [self.__stringify(data)]
elif data != None:
parent[category].append(self.__stringify(data))
def dump(self):
ret = pprint.pformat(self.tree, width=150)
return ret
def rebuild_index(self, root, parent = None):
if not type(root) == dict:
return
if parent:
root["_parent"] = parent["_id"]
for k, v in root.items():
if k == "_id":
self.idx[v] = root
if type(v) == list:
for child in v:
self.rebuild_index(child, root)
def save(self, filename):
with open(filename, "w+") as fd:
fd.write(self.dump())
def find_all(self, _type):
ret = {}
for k, v in self.idx.items():
if v["_type"] == _type:
ret[k] = v
return ret
def find_one(self, _type):
ret = self.find_all(_type)
assert len(ret) == 1
return ret.values()[0]
@staticmethod
def load(filename):
ins = Statistics(None)
with open(filename) as fd:
data = fd.read()
ins.tree = eval(data)
ins.idx = {}
ins.rebuild_index(ins.tree)
return ins
| lgpl-3.0 |
egaxegax/django-dbcartajs | django/contrib/gis/gdal/layer.py | 219 | 8758 | # Needed ctypes routines
from ctypes import c_double, byref
# Other GDAL imports.
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.envelope import Envelope, OGREnvelope
from django.contrib.gis.gdal.error import OGRException, OGRIndexError, SRSException
from django.contrib.gis.gdal.feature import Feature
from django.contrib.gis.gdal.field import OGRFieldTypes
from django.contrib.gis.gdal.geomtype import OGRGeomType
from django.contrib.gis.gdal.geometries import OGRGeometry
from django.contrib.gis.gdal.srs import SpatialReference
# GDAL ctypes function prototypes.
from django.contrib.gis.gdal.prototypes import ds as capi, geom as geom_api, srs as srs_api
from django.utils.encoding import force_bytes, force_text
from django.utils import six
from django.utils.six.moves import xrange
# For more information, see the OGR C API source code:
# http://www.gdal.org/ogr/ogr__api_8h.html
#
# The OGR_L_* routines are relevant here.
class Layer(GDALBase):
"A class that wraps an OGR Layer, needs to be instantiated from a DataSource object."
#### Python 'magic' routines ####
def __init__(self, layer_ptr, ds):
"""
Initializes on an OGR C pointer to the Layer and the `DataSource` object
that owns this layer. The `DataSource` object is required so that a
reference to it is kept with this Layer. This prevents garbage
collection of the `DataSource` while this Layer is still active.
"""
if not layer_ptr:
raise OGRException('Cannot create Layer, invalid pointer given')
self.ptr = layer_ptr
self._ds = ds
self._ldefn = capi.get_layer_defn(self._ptr)
# Does the Layer support random reading?
self._random_read = self.test_capability(b'RandomRead')
def __getitem__(self, index):
"Gets the Feature at the specified index."
if isinstance(index, six.integer_types):
# An integer index was given -- we cannot do a check based on the
# number of features because the beginning and ending feature IDs
# are not guaranteed to be 0 and len(layer)-1, respectively.
if index < 0: raise OGRIndexError('Negative indices are not allowed on OGR Layers.')
return self._make_feature(index)
elif isinstance(index, slice):
# A slice was given
start, stop, stride = index.indices(self.num_feat)
return [self._make_feature(fid) for fid in xrange(start, stop, stride)]
else:
raise TypeError('Integers and slices may only be used when indexing OGR Layers.')
def __iter__(self):
"Iterates over each Feature in the Layer."
# ResetReading() must be called before iteration is to begin.
capi.reset_reading(self._ptr)
for i in xrange(self.num_feat):
yield Feature(capi.get_next_feature(self._ptr), self)
def __len__(self):
"The length is the number of features."
return self.num_feat
def __str__(self):
"The string name of the layer."
return self.name
def _make_feature(self, feat_id):
"""
Helper routine for __getitem__ that constructs a Feature from the given
Feature ID. If the OGR Layer does not support random-access reading,
then each feature of the layer will be incremented through until the
a Feature is found matching the given feature ID.
"""
if self._random_read:
# If the Layer supports random reading, return.
try:
return Feature(capi.get_feature(self.ptr, feat_id), self)
except OGRException:
pass
else:
# Random access isn't supported, have to increment through
# each feature until the given feature ID is encountered.
for feat in self:
if feat.fid == feat_id: return feat
# Should have returned a Feature, raise an OGRIndexError.
raise OGRIndexError('Invalid feature id: %s.' % feat_id)
#### Layer properties ####
@property
def extent(self):
"Returns the extent (an Envelope) of this layer."
env = OGREnvelope()
capi.get_extent(self.ptr, byref(env), 1)
return Envelope(env)
@property
def name(self):
"Returns the name of this layer in the Data Source."
name = capi.get_fd_name(self._ldefn)
return force_text(name, self._ds.encoding, strings_only=True)
@property
def num_feat(self, force=1):
"Returns the number of features in the Layer."
return capi.get_feature_count(self.ptr, force)
@property
def num_fields(self):
"Returns the number of fields in the Layer."
return capi.get_field_count(self._ldefn)
@property
def geom_type(self):
"Returns the geometry type (OGRGeomType) of the Layer."
return OGRGeomType(capi.get_fd_geom_type(self._ldefn))
@property
def srs(self):
"Returns the Spatial Reference used in this Layer."
try:
ptr = capi.get_layer_srs(self.ptr)
return SpatialReference(srs_api.clone_srs(ptr))
except SRSException:
return None
@property
def fields(self):
"""
Returns a list of string names corresponding to each of the Fields
available in this Layer.
"""
return [force_text(capi.get_field_name(capi.get_field_defn(self._ldefn, i)),
self._ds.encoding, strings_only=True)
for i in xrange(self.num_fields)]
@property
def field_types(self):
"""
Returns a list of the types of fields in this Layer. For example,
the list [OFTInteger, OFTReal, OFTString] would be returned for
an OGR layer that had an integer, a floating-point, and string
fields.
"""
return [OGRFieldTypes[capi.get_field_type(capi.get_field_defn(self._ldefn, i))]
for i in xrange(self.num_fields)]
@property
def field_widths(self):
"Returns a list of the maximum field widths for the features."
return [capi.get_field_width(capi.get_field_defn(self._ldefn, i))
for i in xrange(self.num_fields)]
@property
def field_precisions(self):
"Returns the field precisions for the features."
return [capi.get_field_precision(capi.get_field_defn(self._ldefn, i))
for i in xrange(self.num_fields)]
def _get_spatial_filter(self):
try:
return OGRGeometry(geom_api.clone_geom(capi.get_spatial_filter(self.ptr)))
except OGRException:
return None
def _set_spatial_filter(self, filter):
if isinstance(filter, OGRGeometry):
capi.set_spatial_filter(self.ptr, filter.ptr)
elif isinstance(filter, (tuple, list)):
if not len(filter) == 4:
raise ValueError('Spatial filter list/tuple must have 4 elements.')
# Map c_double onto params -- if a bad type is passed in it
# will be caught here.
xmin, ymin, xmax, ymax = map(c_double, filter)
capi.set_spatial_filter_rect(self.ptr, xmin, ymin, xmax, ymax)
elif filter is None:
capi.set_spatial_filter(self.ptr, None)
else:
raise TypeError('Spatial filter must be either an OGRGeometry instance, a 4-tuple, or None.')
spatial_filter = property(_get_spatial_filter, _set_spatial_filter)
#### Layer Methods ####
def get_fields(self, field_name):
"""
Returns a list containing the given field name for every Feature
in the Layer.
"""
if not field_name in self.fields:
raise OGRException('invalid field name: %s' % field_name)
return [feat.get(field_name) for feat in self]
def get_geoms(self, geos=False):
"""
Returns a list containing the OGRGeometry for every Feature in
the Layer.
"""
if geos:
from django.contrib.gis.geos import GEOSGeometry
return [GEOSGeometry(feat.geom.wkb) for feat in self]
else:
return [feat.geom for feat in self]
def test_capability(self, capability):
"""
Returns a bool indicating whether the this Layer supports the given
capability (a string). Valid capability strings include:
'RandomRead', 'SequentialWrite', 'RandomWrite', 'FastSpatialFilter',
'FastFeatureCount', 'FastGetExtent', 'CreateField', 'Transactions',
'DeleteFeature', and 'FastSetNextByIndex'.
"""
return bool(capi.test_capability(self.ptr, force_bytes(capability)))
| gpl-2.0 |
mxia/engine | build/android/pylib/remote/device/appurify_sanitized.py | 51 | 1146 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import contextlib
import logging
import os
import sys
from pylib import constants
sys.path.append(os.path.join(
constants.DIR_SOURCE_ROOT, 'third_party', 'requests', 'src'))
sys.path.append(os.path.join(
constants.DIR_SOURCE_ROOT, 'third_party', 'appurify-python', 'src'))
handlers_before = list(logging.getLogger().handlers)
import appurify.api
import appurify.utils
handlers_after = list(logging.getLogger().handlers)
new_handler = list(set(handlers_after) - set(handlers_before))
while new_handler:
logging.info("Removing logging handler.")
logging.getLogger().removeHandler(new_handler.pop())
api = appurify.api
utils = appurify.utils
# This is not thread safe. If multiple threads are ever supported with appurify
# this may cause logging messages to go missing.
@contextlib.contextmanager
def SanitizeLogging(verbose_count, level):
if verbose_count < 2:
logging.disable(level)
yield True
logging.disable(logging.NOTSET)
else:
yield False
| bsd-3-clause |
hyperized/ansible | lib/ansible/modules/network/fortios/fortios_switch_controller_storm_control.py | 13 | 10044 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_switch_controller_storm_control
short_description: Configure FortiSwitch storm control in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify switch_controller feature and storm_control category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
switch_controller_storm_control:
description:
- Configure FortiSwitch storm control.
default: null
type: dict
suboptions:
broadcast:
description:
- Enable/disable storm control to drop broadcast traffic.
type: str
choices:
- enable
- disable
rate:
description:
- Rate in packets per second at which storm traffic is controlled (1 - 10000000). Storm control drops excess traffic data rates beyond
this threshold.
type: int
unknown_multicast:
description:
- Enable/disable storm control to drop unknown multicast traffic.
type: str
choices:
- enable
- disable
unknown_unicast:
description:
- Enable/disable storm control to drop unknown unicast traffic.
type: str
choices:
- enable
- disable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure FortiSwitch storm control.
fortios_switch_controller_storm_control:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
switch_controller_storm_control:
broadcast: "enable"
rate: "4"
unknown_multicast: "enable"
unknown_unicast: "enable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_switch_controller_storm_control_data(json):
option_list = ['broadcast', 'rate', 'unknown_multicast',
'unknown_unicast']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def switch_controller_storm_control(data, fos):
vdom = data['vdom']
switch_controller_storm_control_data = data['switch_controller_storm_control']
filtered_data = underscore_to_hyphen(filter_switch_controller_storm_control_data(switch_controller_storm_control_data))
return fos.set('switch-controller',
'storm-control',
data=filtered_data,
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_switch_controller(data, fos):
if data['switch_controller_storm_control']:
resp = switch_controller_storm_control(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"switch_controller_storm_control": {
"required": False, "type": "dict", "default": None,
"options": {
"broadcast": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"rate": {"required": False, "type": "int"},
"unknown_multicast": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"unknown_unicast": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_switch_controller(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_switch_controller(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
jemekite/youtube-dl | youtube_dl/extractor/pornhub.py | 51 | 5647 | from __future__ import unicode_literals
import os
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse_unquote,
compat_urllib_parse_unquote_plus,
compat_urllib_parse_urlparse,
compat_urllib_request,
)
from ..utils import (
ExtractorError,
str_to_int,
)
from ..aes import (
aes_decrypt_text
)
class PornHubIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?pornhub\.com/(?:view_video\.php\?viewkey=|embed/)(?P<id>[0-9a-z]+)'
_TESTS = [{
'url': 'http://www.pornhub.com/view_video.php?viewkey=648719015',
'md5': '882f488fa1f0026f023f33576004a2ed',
'info_dict': {
'id': '648719015',
'ext': 'mp4',
"uploader": "Babes",
"title": "Seductive Indian beauty strips down and fingers her pink pussy",
"age_limit": 18
}
}, {
'url': 'http://www.pornhub.com/view_video.php?viewkey=ph557bbb6676d2d',
'only_matching': True,
}]
@classmethod
def _extract_url(cls, webpage):
mobj = re.search(
r'<iframe[^>]+?src=(["\'])(?P<url>(?:https?:)?//(?:www\.)?pornhub\.com/embed/\d+)\1', webpage)
if mobj:
return mobj.group('url')
def _extract_count(self, pattern, webpage, name):
return str_to_int(self._search_regex(
pattern, webpage, '%s count' % name, fatal=False))
def _real_extract(self, url):
video_id = self._match_id(url)
req = compat_urllib_request.Request(
'http://www.pornhub.com/view_video.php?viewkey=%s' % video_id)
req.add_header('Cookie', 'age_verified=1')
webpage = self._download_webpage(req, video_id)
error_msg = self._html_search_regex(
r'(?s)<div class="userMessageSection[^"]*".*?>(.*?)</div>',
webpage, 'error message', default=None)
if error_msg:
error_msg = re.sub(r'\s+', ' ', error_msg)
raise ExtractorError(
'PornHub said: %s' % error_msg,
expected=True, video_id=video_id)
video_title = self._html_search_regex(r'<h1 [^>]+>([^<]+)', webpage, 'title')
video_uploader = self._html_search_regex(
r'(?s)From: .+?<(?:a href="/users/|a href="/channels/|span class="username)[^>]+>(.+?)<',
webpage, 'uploader', fatal=False)
thumbnail = self._html_search_regex(r'"image_url":"([^"]+)', webpage, 'thumbnail', fatal=False)
if thumbnail:
thumbnail = compat_urllib_parse_unquote(thumbnail)
view_count = self._extract_count(
r'<span class="count">([\d,\.]+)</span> views', webpage, 'view')
like_count = self._extract_count(
r'<span class="votesUp">([\d,\.]+)</span>', webpage, 'like')
dislike_count = self._extract_count(
r'<span class="votesDown">([\d,\.]+)</span>', webpage, 'dislike')
comment_count = self._extract_count(
r'All Comments\s*<span>\(([\d,.]+)\)', webpage, 'comment')
video_urls = list(map(compat_urllib_parse_unquote, re.findall(r"player_quality_[0-9]{3}p\s*=\s*'([^']+)'", webpage)))
if webpage.find('"encrypted":true') != -1:
password = compat_urllib_parse_unquote_plus(
self._search_regex(r'"video_title":"([^"]+)', webpage, 'password'))
video_urls = list(map(lambda s: aes_decrypt_text(s, password, 32).decode('utf-8'), video_urls))
formats = []
for video_url in video_urls:
path = compat_urllib_parse_urlparse(video_url).path
extension = os.path.splitext(path)[1][1:]
format = path.split('/')[5].split('_')[:2]
format = "-".join(format)
m = re.match(r'^(?P<height>[0-9]+)[pP]-(?P<tbr>[0-9]+)[kK]$', format)
if m is None:
height = None
tbr = None
else:
height = int(m.group('height'))
tbr = int(m.group('tbr'))
formats.append({
'url': video_url,
'ext': extension,
'format': format,
'format_id': format,
'tbr': tbr,
'height': height,
})
self._sort_formats(formats)
return {
'id': video_id,
'uploader': video_uploader,
'title': video_title,
'thumbnail': thumbnail,
'view_count': view_count,
'like_count': like_count,
'dislike_count': dislike_count,
'comment_count': comment_count,
'formats': formats,
'age_limit': 18,
}
class PornHubPlaylistIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?pornhub\.com/playlist/(?P<id>\d+)'
_TESTS = [{
'url': 'http://www.pornhub.com/playlist/6201671',
'info_dict': {
'id': '6201671',
'title': 'P0p4',
},
'playlist_mincount': 35,
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
entries = [
self.url_result('http://www.pornhub.com/%s' % video_url, 'PornHub')
for video_url in set(re.findall('href="/?(view_video\.php\?viewkey=\d+[^"]*)"', webpage))
]
playlist = self._parse_json(
self._search_regex(
r'playlistObject\s*=\s*({.+?});', webpage, 'playlist'),
playlist_id)
return self.playlist_result(
entries, playlist_id, playlist.get('title'), playlist.get('description'))
| unlicense |
tiagormk/gem5-hmp | src/arch/x86/isa/insts/simd64/floating_point/arithmetic/reciprocal_estimation.py | 91 | 2172 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
# PFRCP
# PFRCPIT1
# PFRCPIT2
'''
| bsd-3-clause |
chauhanhardik/populo | common/djangoapps/course_modes/migrations/0006_expiration_date_to_datetime.py | 173 | 2009 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
from datetime import datetime
for course_mode in orm.CourseMode.objects.all():
if course_mode.expiration_date is None:
course_mode.expiration_datetime = None
course_mode.save()
else:
course_mode.expiration_datetime = datetime.combine(course_mode.expiration_date, datetime.min.time())
course_mode.save()
def backwards(self, orm):
for course_mode in orm.CourseMode.objects.all():
course_mode.expiration_datetime = None
course_mode.save()
models = {
'course_modes.coursemode': {
'Meta': {'unique_together': "(('course_id', 'mode_slug', 'currency'),)", 'object_name': 'CourseMode'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'expiration_date': ('django.db.models.fields.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'expiration_datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'min_price': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'mode_display_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'mode_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'suggested_prices': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['course_modes']
symmetrical = True
| agpl-3.0 |
CMSS-BCRDB/RDS | trove/version.py | 7 | 1392 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
try:
from trove.vcsversion import version_info
except ImportError:
version_info = {'branch_nick': u'LOCALBRANCH',
'revision_id': 'LOCALREVISION',
'revno': 0}
TROVE_VERSION = ['2012', '1']
YEAR, COUNT = TROVE_VERSION
FINAL = False # This becomes true at Release Candidate time
def canonical_version_string():
return '.'.join([YEAR, COUNT])
def version_string():
if FINAL:
return canonical_version_string()
else:
return '%s-dev' % (canonical_version_string(),)
def vcs_version_string():
return "%s:%s" % (version_info['branch_nick'], version_info['revision_id'])
def version_string_with_vcs():
return "%s-%s" % (canonical_version_string(), vcs_version_string())
| apache-2.0 |
inviwo/inviwo | tools/converters/update_itf.py | 2 | 1918 | #requires lxml, and beautifulsoup4
#To install lxml
#Install lxml on Windows: https://pypi.python.org/pypi/lxml/3.3.3
#To install beautifulsoup4
#In Windows, run cmd as admin"
#Run "python get-pip.py" script to install pip
#Navigate to "PythonPath/Scripts"
#Run "pip install beautifulsoup4"
import sys
import os
import re
from bs4 import BeautifulSoup
# custom indentation for bs4.BeatuifulSoup.prettify
# https://stackoverflow.com/questions/15509397/custom-indent-width-for-beautifulsoup-prettify
orig_prettify = BeautifulSoup.prettify
r = re.compile(r'^(\s*)', re.MULTILINE)
def prettify(self, encoding=None, formatter="minimal", indent_width=4):
return r.sub(r'\1' * indent_width, orig_prettify(self, encoding, formatter))
BeautifulSoup.prettify = prettify
def perform(f):
print("Open file " + f)
with open(f, 'r') as file:
filestr = "\n".join(file.readlines())
soup = BeautifulSoup(filestr, 'xml')
treedata = soup.find_all("InviwoTreeData")
for tf in treedata:
if "reference" in tf.attrs:
continue
print("BEFORE " + 60*"#")
print(tf.prettify())
print("AFTER " + 60*"#")
newtf = soup.new_tag("InviwoWorkspace", version="2")
newtf.append(tf.maskMin)
newtf.append(tf.maskMax)
dps = soup.new_tag("Points")
print(tf.point)
for i in tf.find_all("point"):
newp = soup.new_tag("Point")
newp.append(soup.new_tag("pos", content=i.pos["x"]))
newp.append(i.rgba)
dps.append(newp)
newtf.append(dps)
tf.decompose()
soup.append(newtf)
print(newtf.prettify())
print("DONE " + 60*"#")
print("Write file ")
with open(f, 'w') as fout:
fout.write(soup.prettify())
if __name__ == '__main__':
for f in sys.argv[1:]:
perform(f)
| bsd-2-clause |
openstack/oslo.utils | oslo_utils/tests/test_eventletutils.py | 1 | 8513 | # Copyright 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import threading
from unittest import mock
import warnings
import eventlet
from eventlet import greenthread
from oslotest import base as test_base
from oslo_utils import eventletutils
class EventletUtilsTest(test_base.BaseTestCase):
def setUp(self):
super(EventletUtilsTest, self).setUp()
self._old_avail = eventletutils.EVENTLET_AVAILABLE
eventletutils.EVENTLET_AVAILABLE = True
def tearDown(self):
super(EventletUtilsTest, self).tearDown()
eventletutils.EVENTLET_AVAILABLE = self._old_avail
@mock.patch("oslo_utils.eventletutils._patcher")
def test_warning_not_patched(self, mock_patcher):
mock_patcher.already_patched = True
mock_patcher.is_monkey_patched.return_value = False
with warnings.catch_warnings(record=True) as capture:
warnings.simplefilter("always")
eventletutils.warn_eventlet_not_patched(['os'])
self.assertEqual(1, len(capture))
w = capture[0]
self.assertEqual(RuntimeWarning, w.category)
self.assertIn('os', str(w.message))
@mock.patch("oslo_utils.eventletutils._patcher")
def test_warning_not_patched_none_provided(self, mock_patcher):
mock_patcher.already_patched = True
mock_patcher.is_monkey_patched.return_value = False
with warnings.catch_warnings(record=True) as capture:
warnings.simplefilter("always")
eventletutils.warn_eventlet_not_patched()
self.assertEqual(1, len(capture))
w = capture[0]
self.assertEqual(RuntimeWarning, w.category)
for m in eventletutils._ALL_PATCH:
self.assertIn(m, str(w.message))
@mock.patch("oslo_utils.eventletutils._patcher")
def test_warning_not_patched_all(self, mock_patcher):
mock_patcher.already_patched = True
mock_patcher.is_monkey_patched.return_value = False
with warnings.catch_warnings(record=True) as capture:
warnings.simplefilter("always")
eventletutils.warn_eventlet_not_patched(['all'])
self.assertEqual(1, len(capture))
w = capture[0]
self.assertEqual(RuntimeWarning, w.category)
for m in eventletutils._ALL_PATCH:
self.assertIn(m, str(w.message))
@mock.patch("oslo_utils.eventletutils._patcher")
def test_no_warning(self, mock_patcher):
mock_patcher.already_patched = True
mock_patcher.is_monkey_patched.return_value = True
with warnings.catch_warnings(record=True) as capture:
warnings.simplefilter("always")
eventletutils.warn_eventlet_not_patched(['os'])
self.assertEqual(0, len(capture))
@mock.patch("oslo_utils.eventletutils._patcher")
def test_eventlet_is_patched(self, mock_patcher):
mock_patcher.is_monkey_patched.return_value = True
self.assertTrue(eventletutils.is_monkey_patched('os'))
mock_patcher.is_monkey_patched.return_value = False
self.assertFalse(eventletutils.is_monkey_patched('os'))
@mock.patch("oslo_utils.eventletutils._patcher", None)
def test_eventlet_no_patcher(self):
self.assertFalse(eventletutils.is_monkey_patched('os'))
@mock.patch("oslo_utils.eventletutils._patcher")
def test_partially_patched_warning(self, mock_patcher):
is_patched = set()
mock_patcher.already_patched = True
mock_patcher.is_monkey_patched.side_effect = lambda m: m in is_patched
with warnings.catch_warnings(record=True) as capture:
warnings.simplefilter("always")
eventletutils.warn_eventlet_not_patched(['os'])
self.assertEqual(1, len(capture))
is_patched.add('os')
with warnings.catch_warnings(record=True) as capture:
warnings.simplefilter("always")
eventletutils.warn_eventlet_not_patched(['os'])
self.assertEqual(0, len(capture))
is_patched.add('thread')
with warnings.catch_warnings(record=True) as capture:
warnings.simplefilter("always")
eventletutils.warn_eventlet_not_patched(['os', 'thread'])
self.assertEqual(0, len(capture))
with warnings.catch_warnings(record=True) as capture:
warnings.simplefilter("always")
eventletutils.warn_eventlet_not_patched(['all'])
self.assertEqual(1, len(capture))
w = capture[0]
self.assertEqual(RuntimeWarning, w.category)
for m in ['os', 'thread']:
self.assertNotIn(m, str(w.message))
def test_invalid_patch_check(self):
self.assertRaises(ValueError,
eventletutils.warn_eventlet_not_patched,
['blah.blah'])
@mock.patch('oslo_utils.eventletutils._eventlet')
def test_event_api_compat(self, mock_eventlet):
with mock.patch('oslo_utils.eventletutils.is_monkey_patched',
return_value=True):
e_event = eventletutils.Event()
self.assertIsInstance(e_event, eventletutils.EventletEvent)
t_event = eventletutils.Event()
t_event_cls = threading.Event
self.assertIsInstance(t_event, t_event_cls)
public_methods = [m for m in dir(t_event) if not m.startswith("_") and
callable(getattr(t_event, m))]
for method in public_methods:
self.assertTrue(hasattr(e_event, method))
# Ensure set() allows multiple invocations, same as in
# threading implementation.
e_event.set()
self.assertTrue(e_event.isSet())
e_event.set()
self.assertTrue(e_event.isSet())
def test_event_no_timeout(self):
event = eventletutils.EventletEvent()
def thread_a():
self.assertTrue(event.wait())
a = greenthread.spawn(thread_a)
with eventlet.timeout.Timeout(0.5, False):
a.wait()
self.fail('wait() timed out')
def test_event_race(self):
event = eventletutils.EventletEvent()
def thread_a():
self.assertTrue(event.wait(2))
a = greenthread.spawn(thread_a)
def thread_b():
eventlet.sleep(0.1)
event.clear()
event.set()
a.wait()
b = greenthread.spawn(thread_b)
with eventlet.timeout.Timeout(0.5):
b.wait()
def test_event_clear_timeout(self):
event = eventletutils.EventletEvent()
def thread_a():
self.assertFalse(event.wait(0.5))
a = greenthread.spawn(thread_a)
def thread_b():
eventlet.sleep(0.1)
event.clear()
eventlet.sleep(0.1)
event.clear()
a.wait()
b = greenthread.spawn(thread_b)
with eventlet.timeout.Timeout(0.7):
b.wait()
def test_event_set_clear_timeout(self):
event = eventletutils.EventletEvent()
wakes = []
def thread_func():
result = event.wait(0.2)
wakes.append(result)
if len(wakes) == 1:
self.assertTrue(result)
event.clear()
else:
self.assertFalse(result)
a = greenthread.spawn(thread_func)
b = greenthread.spawn(thread_func)
eventlet.sleep(0) # start threads
event.set()
with eventlet.timeout.Timeout(0.3):
a.wait()
b.wait()
self.assertFalse(event.is_set())
self.assertEqual([True, False], wakes)
@mock.patch('oslo_utils.eventletutils._eventlet.event.Event')
def test_event_clear_already_sent(self, mock_event):
old_event = mock.Mock()
new_event = mock.Mock()
mock_event.side_effect = [old_event, new_event]
event = eventletutils.EventletEvent()
event.set()
event.clear()
self.assertEqual(1, old_event.send.call_count)
| apache-2.0 |
manashmndl/scikit-learn | sklearn/feature_selection/variance_threshold.py | 238 | 2594 | # Author: Lars Buitinck <L.J.Buitinck@uva.nl>
# License: 3-clause BSD
import numpy as np
from ..base import BaseEstimator
from .base import SelectorMixin
from ..utils import check_array
from ..utils.sparsefuncs import mean_variance_axis
from ..utils.validation import check_is_fitted
class VarianceThreshold(BaseEstimator, SelectorMixin):
"""Feature selector that removes all low-variance features.
This feature selection algorithm looks only at the features (X), not the
desired outputs (y), and can thus be used for unsupervised learning.
Read more in the :ref:`User Guide <variance_threshold>`.
Parameters
----------
threshold : float, optional
Features with a training-set variance lower than this threshold will
be removed. The default is to keep all features with non-zero variance,
i.e. remove the features that have the same value in all samples.
Attributes
----------
variances_ : array, shape (n_features,)
Variances of individual features.
Examples
--------
The following dataset has integer features, two of which are the same
in every sample. These are removed with the default setting for threshold::
>>> X = [[0, 2, 0, 3], [0, 1, 4, 3], [0, 1, 1, 3]]
>>> selector = VarianceThreshold()
>>> selector.fit_transform(X)
array([[2, 0],
[1, 4],
[1, 1]])
"""
def __init__(self, threshold=0.):
self.threshold = threshold
def fit(self, X, y=None):
"""Learn empirical variances from X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Sample vectors from which to compute variances.
y : any
Ignored. This parameter exists only for compatibility with
sklearn.pipeline.Pipeline.
Returns
-------
self
"""
X = check_array(X, ('csr', 'csc'), dtype=np.float64)
if hasattr(X, "toarray"): # sparse matrix
_, self.variances_ = mean_variance_axis(X, axis=0)
else:
self.variances_ = np.var(X, axis=0)
if np.all(self.variances_ <= self.threshold):
msg = "No feature in X meets the variance threshold {0:.5f}"
if X.shape[0] == 1:
msg += " (X contains only one sample)"
raise ValueError(msg.format(self.threshold))
return self
def _get_support_mask(self):
check_is_fitted(self, 'variances_')
return self.variances_ > self.threshold
| bsd-3-clause |
codekaki/odoo | addons/stock/partner.py | 57 | 1890 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class res_partner(osv.osv):
_inherit = 'res.partner'
_columns = {
'property_stock_customer': fields.property(
'stock.location',
type='many2one',
relation='stock.location',
string="Customer Location",
view_load=True,
help="This stock location will be used, instead of the default one, as the destination location for goods you send to this partner"),
'property_stock_supplier': fields.property(
'stock.location',
type='many2one',
relation='stock.location',
string="Supplier Location",
view_load=True,
help="This stock location will be used, instead of the default one, as the source location for goods you receive from the current partner"),
}
res_partner()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
boedy1996/SPARC | geonode/catalogue/backends/pycsw_local.py | 3 | 6015 | #########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import os
from lxml import etree
from django.conf import settings
from ConfigParser import SafeConfigParser
from owslib.iso import MD_Metadata
from pycsw import server
from geonode.catalogue.backends.generic import CatalogueBackend as GenericCatalogueBackend
from geonode.catalogue.backends.generic import METADATA_FORMATS
from shapely.geometry.base import ReadingError
# pycsw settings that the user shouldn't have to worry about
CONFIGURATION = {
'server': {
'home': '.',
'url': settings.CATALOGUE['default']['URL'],
'encoding': 'UTF-8',
'language': settings.LANGUAGE_CODE,
'maxrecords': '10',
# 'loglevel': 'DEBUG',
# 'logfile': '/tmp/pycsw.log',
# 'federatedcatalogues': 'http://geo.data.gov/geoportal/csw/discovery',
# 'pretty_print': 'true',
# 'domainquerytype': 'range',
# 'domaincounts': 'true',
'profiles': 'apiso,ebrim',
},
'repository': {
'source': 'geonode',
'mappings': os.path.join(os.path.dirname(__file__), 'pycsw_local_mappings.py')
}
}
class CatalogueBackend(GenericCatalogueBackend):
def __init__(self, *args, **kwargs):
super(CatalogueBackend, self).__init__(*args, **kwargs)
self.catalogue.formats = ['Atom', 'DIF', 'Dublin Core', 'ebRIM', 'FGDC', 'ISO']
self.catalogue.local = True
def remove_record(self, uuid):
pass
def create_record(self, item):
pass
def get_record(self, uuid):
results = self._csw_local_dispatch(identifier=uuid)
if len(results) < 1:
return None
result = etree.fromstring(results).find('{http://www.isotc211.org/2005/gmd}MD_Metadata')
if result is None:
return None
record = MD_Metadata(result)
record.keywords = []
if hasattr(record, 'identification') and hasattr(record.identification, 'keywords'):
for kw in record.identification.keywords:
record.keywords.extend(kw['keywords'])
record.links = {}
record.links['metadata'] = self.catalogue.urls_for_uuid(uuid)
record.links['download'] = self.catalogue.extract_links(record)
return record
def search_records(self, keywords, start, limit, bbox):
with self.catalogue:
lresults = self._csw_local_dispatch(keywords, keywords, start+1, limit, bbox)
# serialize XML
e = etree.fromstring(lresults)
self.catalogue.records = \
[MD_Metadata(x) for x in e.findall('//{http://www.isotc211.org/2005/gmd}MD_Metadata')]
# build results into JSON for API
results = [self.catalogue.metadatarecord2dict(doc) for v, doc in self.catalogue.records.iteritems()]
result = {'rows': results,
'total': e.find('{http://www.opengis.net/cat/csw/2.0.2}SearchResults').attrib.get(
'numberOfRecordsMatched'),
'next_page': e.find('{http://www.opengis.net/cat/csw/2.0.2}SearchResults').attrib.get(
'nextRecord')
}
return result
def _csw_local_dispatch(self, keywords=None, start=0, limit=10, bbox=None, identifier=None):
"""
HTTP-less CSW
"""
# serialize pycsw settings into SafeConfigParser
# object for interaction with pycsw
mdict = dict(settings.PYCSW['CONFIGURATION'], **CONFIGURATION)
config = SafeConfigParser()
for section, options in mdict.iteritems():
config.add_section(section)
for option, value in options.iteritems():
config.set(section, option, value)
# fake HTTP environment variable
os.environ['QUERY_STRING'] = ''
# init pycsw
csw = server.Csw(config)
# fake HTTP method
csw.requesttype = 'POST'
# fake HTTP request parameters
if identifier is None: # it's a GetRecords request
formats = []
for f in self.catalogue.formats:
formats.append(METADATA_FORMATS[f][0])
csw.kvp = {
'elementsetname': 'full',
'typenames': formats,
'resulttype': 'results',
'constraintlanguage': 'CQL_TEXT',
'constraint': 'csw:AnyText like "%%%s%%"' % keywords,
'outputschema': 'http://www.isotc211.org/2005/gmd',
'constraint': None,
'startposition': start,
'maxrecords': limit
}
response = csw.getrecords()
else: # it's a GetRecordById request
csw.kvp = {
'id': [identifier],
'outputschema': 'http://www.isotc211.org/2005/gmd',
}
# FIXME(Ariel): Remove this try/except block when pycsw deals with
# empty geometry fields better.
# https://gist.github.com/ingenieroariel/717bb720a201030e9b3a
try:
response = csw.getrecordbyid()
except ReadingError:
return []
return etree.tostring(response)
| gpl-3.0 |
msabramo/ansible | lib/ansible/modules/remote_management/hpilo/hponcfg.py | 66 | 2841 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Dag Wieers <dag@wieers.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: hponcfg
author: Dag Wieers (@dagwieers)
version_added: "2.3"
short_description: Configure HP iLO interface using hponcfg
description:
- This modules configures the HP iLO interface using hponcfg.
options:
path:
description:
- The XML file as accepted by hponcfg
required: true
aliases: ['src']
minfw:
description:
- The minimum firmware level needed
requirements:
- hponcfg tool
notes:
- You need a working hponcfg on the target system.
'''
EXAMPLES = r'''
- name: Example hponcfg configuration XML
copy:
content: |
<ribcl VERSION="2.0">
<login USER_LOGIN="user" PASSWORD="password">
<rib_info MODE="WRITE">
<mod_global_settings>
<session_timeout value="0"/>
<ssh_status value="Y"/>
<ssh_port value="22"/>
<serial_cli_status value="3"/>
<serial_cli_speed value="5"/>
</mod_global_settings>
</rib_info>
</login>
</ribcl>
dest: /tmp/enable-ssh.xml
- name: Configure HP iLO using enable-ssh.xml
hponcfg:
src: /tmp/enable-ssh.xml
'''
from ansible.module_utils.basic import AnsibleModule
def main():
module = AnsibleModule(
argument_spec = dict(
src = dict(required=True, type='path', aliases=['path']),
minfw = dict(type='str'),
)
)
# Consider every action a change (not idempotent yet!)
changed = True
src = module.params['src']
minfw = module.params['minfw']
options = ' -f %s' % src
# Add -v for debugging
# options += ' -v'
if minfw:
option += ' -m %s' % minfw
rc, stdout, stderr = module.run_command('hponcfg %s' % options)
if rc != 0:
module.fail_json(rc=rc, msg="Failed to run hponcfg", stdout=stdout, stderr=stderr)
module.exit_json(changed=changed, stdout=stdout, stderr=stderr)
if __name__ == '__main__':
main()
| gpl-3.0 |
adrienbrault/home-assistant | tests/components/aprs/test_device_tracker.py | 10 | 10449 | """Test APRS device tracker."""
from unittest.mock import Mock, patch
import aprslib
import homeassistant.components.aprs.device_tracker as device_tracker
from homeassistant.const import EVENT_HOMEASSISTANT_START
from tests.common import get_test_home_assistant
DEFAULT_PORT = 14580
TEST_CALLSIGN = "testcall"
TEST_COORDS_NULL_ISLAND = (0, 0)
TEST_FILTER = "testfilter"
TEST_HOST = "testhost"
TEST_PASSWORD = "testpass"
def test_make_filter():
"""Test filter."""
callsigns = ["CALLSIGN1", "callsign2"]
res = device_tracker.make_filter(callsigns)
assert res == "b/CALLSIGN1 b/CALLSIGN2"
def test_gps_accuracy_0():
"""Test GPS accuracy level 0."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 0)
assert acc == 0
def test_gps_accuracy_1():
"""Test GPS accuracy level 1."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 1)
assert acc == 186
def test_gps_accuracy_2():
"""Test GPS accuracy level 2."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 2)
assert acc == 1855
def test_gps_accuracy_3():
"""Test GPS accuracy level 3."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 3)
assert acc == 18553
def test_gps_accuracy_4():
"""Test GPS accuracy level 4."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 4)
assert acc == 111319
def test_gps_accuracy_invalid_int():
"""Test GPS accuracy with invalid input."""
level = 5
try:
device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, level)
assert False, "No exception."
except ValueError:
pass
def test_gps_accuracy_invalid_string():
"""Test GPS accuracy with invalid input."""
level = "not an int"
try:
device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, level)
assert False, "No exception."
except ValueError:
pass
def test_gps_accuracy_invalid_float():
"""Test GPS accuracy with invalid input."""
level = 1.2
try:
device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, level)
assert False, "No exception."
except ValueError:
pass
def test_aprs_listener():
"""Test listener thread."""
with patch("aprslib.IS") as mock_ais:
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
port = DEFAULT_PORT
see = Mock()
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
mock_ais.assert_called_with(callsign, passwd=password, host=host, port=port)
def test_aprs_listener_start_fail():
"""Test listener thread start failure."""
with patch(
"aprslib.IS.connect", side_effect=aprslib.ConnectionError("Unable to connect.")
):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert not listener.start_success
assert listener.start_message == "Unable to connect."
def test_aprs_listener_stop():
"""Test listener thread stop."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.ais.close = Mock()
listener.run()
listener.stop()
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_message == "Connected to testhost with callsign testcall."
assert listener.start_success
listener.ais.close.assert_called_with()
def test_aprs_listener_rx_msg():
"""Test rx_msg."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
sample_msg = {
device_tracker.ATTR_FORMAT: "uncompressed",
device_tracker.ATTR_FROM: "ZZ0FOOBAR-1",
device_tracker.ATTR_LATITUDE: 0.0,
device_tracker.ATTR_LONGITUDE: 0.0,
device_tracker.ATTR_ALTITUDE: 0,
}
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
listener.rx_msg(sample_msg)
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
see.assert_called_with(
dev_id=device_tracker.slugify("ZZ0FOOBAR-1"),
gps=(0.0, 0.0),
attributes={"altitude": 0},
)
def test_aprs_listener_rx_msg_ambiguity():
"""Test rx_msg with posambiguity."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
sample_msg = {
device_tracker.ATTR_FORMAT: "uncompressed",
device_tracker.ATTR_FROM: "ZZ0FOOBAR-1",
device_tracker.ATTR_LATITUDE: 0.0,
device_tracker.ATTR_LONGITUDE: 0.0,
device_tracker.ATTR_POS_AMBIGUITY: 1,
}
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
listener.rx_msg(sample_msg)
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
see.assert_called_with(
dev_id=device_tracker.slugify("ZZ0FOOBAR-1"),
gps=(0.0, 0.0),
attributes={device_tracker.ATTR_GPS_ACCURACY: 186},
)
def test_aprs_listener_rx_msg_ambiguity_invalid():
"""Test rx_msg with invalid posambiguity."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
sample_msg = {
device_tracker.ATTR_FORMAT: "uncompressed",
device_tracker.ATTR_FROM: "ZZ0FOOBAR-1",
device_tracker.ATTR_LATITUDE: 0.0,
device_tracker.ATTR_LONGITUDE: 0.0,
device_tracker.ATTR_POS_AMBIGUITY: 5,
}
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
listener.rx_msg(sample_msg)
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
see.assert_called_with(
dev_id=device_tracker.slugify("ZZ0FOOBAR-1"), gps=(0.0, 0.0), attributes={}
)
def test_aprs_listener_rx_msg_no_position():
"""Test rx_msg with non-position report."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
sample_msg = {device_tracker.ATTR_FORMAT: "invalid"}
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
listener.rx_msg(sample_msg)
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
see.assert_not_called()
def test_setup_scanner():
"""Test setup_scanner."""
with patch(
"homeassistant.components.aprs.device_tracker.AprsListenerThread"
) as listener:
hass = get_test_home_assistant()
hass.start()
config = {
"username": TEST_CALLSIGN,
"password": TEST_PASSWORD,
"host": TEST_HOST,
"callsigns": ["XX0FOO*", "YY0BAR-1"],
}
see = Mock()
res = device_tracker.setup_scanner(hass, config, see)
hass.bus.fire(EVENT_HOMEASSISTANT_START)
hass.stop()
assert res
listener.assert_called_with(
TEST_CALLSIGN, TEST_PASSWORD, TEST_HOST, "b/XX0FOO* b/YY0BAR-1", see
)
def test_setup_scanner_timeout():
"""Test setup_scanner failure from timeout."""
hass = get_test_home_assistant()
hass.start()
config = {
"username": TEST_CALLSIGN,
"password": TEST_PASSWORD,
"host": "localhost",
"timeout": 0.01,
"callsigns": ["XX0FOO*", "YY0BAR-1"],
}
see = Mock()
try:
assert not device_tracker.setup_scanner(hass, config, see)
finally:
hass.stop()
| mit |
lthurlow/Network-Grapher | proj/external/matplotlib-1.2.1/build/lib.linux-i686-2.7/matplotlib/backends/tkagg.py | 6 | 1063 | from __future__ import print_function
from matplotlib.backends import _tkagg
import Tkinter as Tk
def blit(photoimage, aggimage, bbox=None, colormode=1):
tk = photoimage.tk
if bbox is not None:
bbox_array = bbox.__array__()
else:
bbox_array = None
try:
tk.call("PyAggImagePhoto", photoimage, id(aggimage), colormode, id(bbox_array))
except Tk.TclError:
try:
try:
_tkagg.tkinit(tk.interpaddr(), 1)
except AttributeError:
_tkagg.tkinit(id(tk), 0)
tk.call("PyAggImagePhoto", photoimage, id(aggimage), colormode, id(bbox_array))
except (ImportError, AttributeError, Tk.TclError):
raise
def test(aggimage):
import time
r = Tk.Tk()
c = Tk.Canvas(r, width=aggimage.width, height=aggimage.height)
c.pack()
p = Tk.PhotoImage(width=aggimage.width, height=aggimage.height)
blit(p, aggimage)
c.create_image(aggimage.width,aggimage.height,image=p)
blit(p, aggimage)
while 1: r.update_idletasks()
| mit |
dschien/tasks-backup | apiclient/ext/appengine.py | 52 | 4139 | # Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Google App Engine
Utilities for making it easier to use the
Google API Client for Python on Google App Engine.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import pickle
from google.appengine.ext import db
from apiclient.oauth import OAuthCredentials
from apiclient.oauth import FlowThreeLegged
class FlowThreeLeggedProperty(db.Property):
"""Utility property that allows easy
storage and retreival of an
apiclient.oauth.FlowThreeLegged"""
# Tell what the user type is.
data_type = FlowThreeLegged
# For writing to datastore.
def get_value_for_datastore(self, model_instance):
flow = super(FlowThreeLeggedProperty,
self).get_value_for_datastore(model_instance)
return db.Blob(pickle.dumps(flow))
# For reading from datastore.
def make_value_from_datastore(self, value):
if value is None:
return None
return pickle.loads(value)
def validate(self, value):
if value is not None and not isinstance(value, FlowThreeLegged):
raise BadValueError('Property %s must be convertible '
'to a FlowThreeLegged instance (%s)' %
(self.name, value))
return super(FlowThreeLeggedProperty, self).validate(value)
def empty(self, value):
return not value
class OAuthCredentialsProperty(db.Property):
"""Utility property that allows easy
storage and retrieval of
apiclient.oath.OAuthCredentials
"""
# Tell what the user type is.
data_type = OAuthCredentials
# For writing to datastore.
def get_value_for_datastore(self, model_instance):
cred = super(OAuthCredentialsProperty,
self).get_value_for_datastore(model_instance)
return db.Blob(pickle.dumps(cred))
# For reading from datastore.
def make_value_from_datastore(self, value):
if value is None:
return None
return pickle.loads(value)
def validate(self, value):
if value is not None and not isinstance(value, OAuthCredentials):
raise BadValueError('Property %s must be convertible '
'to an OAuthCredentials instance (%s)' %
(self.name, value))
return super(OAuthCredentialsProperty, self).validate(value)
def empty(self, value):
return not value
class StorageByKeyName(object):
"""Store and retrieve a single credential to and from
the App Engine datastore.
This Storage helper presumes the Credentials
have been stored as a CredenialsProperty
on a datastore model class, and that entities
are stored by key_name.
"""
def __init__(self, model, key_name, property_name):
"""Constructor for Storage.
Args:
model: db.Model, model class
key_name: string, key name for the entity that has the credentials
property_name: string, name of the property that is a CredentialsProperty
"""
self.model = model
self.key_name = key_name
self.property_name = property_name
def get(self):
"""Retrieve Credential from datastore.
Returns:
Credentials
"""
entity = self.model.get_or_insert(self.key_name)
credential = getattr(entity, self.property_name)
if credential and hasattr(credential, 'set_store'):
credential.set_store(self.put)
return credential
def put(self, credentials):
"""Write a Credentials to the datastore.
Args:
credentials: Credentials, the credentials to store.
"""
entity = self.model.get_or_insert(self.key_name)
setattr(entity, self.property_name, credentials)
entity.put()
| apache-2.0 |
ArcherSys/ArcherSys | Lib/site-packages/twisted/protocols/loopback.py | 4 | 11925 | # -*- test-case-name: twisted.test.test_loopback -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Testing support for protocols -- loopback between client and server.
"""
from __future__ import division, absolute_import
# system imports
import tempfile
from zope.interface import implementer
# Twisted Imports
from twisted.protocols import policies
from twisted.internet import interfaces, protocol, main, defer
from twisted.internet.task import deferLater
from twisted.python import failure
from twisted.internet.interfaces import IAddress
class _LoopbackQueue(object):
"""
Trivial wrapper around a list to give it an interface like a queue, which
the addition of also sending notifications by way of a Deferred whenever
the list has something added to it.
"""
_notificationDeferred = None
disconnect = False
def __init__(self):
self._queue = []
def put(self, v):
self._queue.append(v)
if self._notificationDeferred is not None:
d, self._notificationDeferred = self._notificationDeferred, None
d.callback(None)
def __nonzero__(self):
return bool(self._queue)
__bool__ = __nonzero__
def get(self):
return self._queue.pop(0)
@implementer(IAddress)
class _LoopbackAddress(object):
pass
@implementer(interfaces.ITransport, interfaces.IConsumer)
class _LoopbackTransport(object):
disconnecting = False
producer = None
# ITransport
def __init__(self, q):
self.q = q
def write(self, data):
if not isinstance(data, bytes):
raise TypeError("Can only write bytes to ITransport")
self.q.put(data)
def writeSequence(self, iovec):
self.q.put(b''.join(iovec))
def loseConnection(self):
self.q.disconnect = True
self.q.put(None)
def abortConnection(self):
"""
Abort the connection. Same as L{loseConnection}.
"""
self.loseConnection()
def getPeer(self):
return _LoopbackAddress()
def getHost(self):
return _LoopbackAddress()
# IConsumer
def registerProducer(self, producer, streaming):
assert self.producer is None
self.producer = producer
self.streamingProducer = streaming
self._pollProducer()
def unregisterProducer(self):
assert self.producer is not None
self.producer = None
def _pollProducer(self):
if self.producer is not None and not self.streamingProducer:
self.producer.resumeProducing()
def identityPumpPolicy(queue, target):
"""
L{identityPumpPolicy} is a policy which delivers each chunk of data written
to the given queue as-is to the target.
This isn't a particularly realistic policy.
@see: L{loopbackAsync}
"""
while queue:
bytes = queue.get()
if bytes is None:
break
target.dataReceived(bytes)
def collapsingPumpPolicy(queue, target):
"""
L{collapsingPumpPolicy} is a policy which collapses all outstanding chunks
into a single string and delivers it to the target.
@see: L{loopbackAsync}
"""
bytes = []
while queue:
chunk = queue.get()
if chunk is None:
break
bytes.append(chunk)
if bytes:
target.dataReceived(b''.join(bytes))
def loopbackAsync(server, client, pumpPolicy=identityPumpPolicy):
"""
Establish a connection between C{server} and C{client} then transfer data
between them until the connection is closed. This is often useful for
testing a protocol.
@param server: The protocol instance representing the server-side of this
connection.
@param client: The protocol instance representing the client-side of this
connection.
@param pumpPolicy: When either C{server} or C{client} writes to its
transport, the string passed in is added to a queue of data for the
other protocol. Eventually, C{pumpPolicy} will be called with one such
queue and the corresponding protocol object. The pump policy callable
is responsible for emptying the queue and passing the strings it
contains to the given protocol's C{dataReceived} method. The signature
of C{pumpPolicy} is C{(queue, protocol)}. C{queue} is an object with a
C{get} method which will return the next string written to the
transport, or C{None} if the transport has been disconnected, and which
evaluates to C{True} if and only if there are more items to be
retrieved via C{get}.
@return: A L{Deferred} which fires when the connection has been closed and
both sides have received notification of this.
"""
serverToClient = _LoopbackQueue()
clientToServer = _LoopbackQueue()
server.makeConnection(_LoopbackTransport(serverToClient))
client.makeConnection(_LoopbackTransport(clientToServer))
return _loopbackAsyncBody(
server, serverToClient, client, clientToServer, pumpPolicy)
def _loopbackAsyncBody(server, serverToClient, client, clientToServer,
pumpPolicy):
"""
Transfer bytes from the output queue of each protocol to the input of the other.
@param server: The protocol instance representing the server-side of this
connection.
@param serverToClient: The L{_LoopbackQueue} holding the server's output.
@param client: The protocol instance representing the client-side of this
connection.
@param clientToServer: The L{_LoopbackQueue} holding the client's output.
@param pumpPolicy: See L{loopbackAsync}.
@return: A L{Deferred} which fires when the connection has been closed and
both sides have received notification of this.
"""
def pump(source, q, target):
sent = False
if q:
pumpPolicy(q, target)
sent = True
if sent and not q:
# A write buffer has now been emptied. Give any producer on that
# side an opportunity to produce more data.
source.transport._pollProducer()
return sent
while 1:
disconnect = clientSent = serverSent = False
# Deliver the data which has been written.
serverSent = pump(server, serverToClient, client)
clientSent = pump(client, clientToServer, server)
if not clientSent and not serverSent:
# Neither side wrote any data. Wait for some new data to be added
# before trying to do anything further.
d = defer.Deferred()
clientToServer._notificationDeferred = d
serverToClient._notificationDeferred = d
d.addCallback(
_loopbackAsyncContinue,
server, serverToClient, client, clientToServer, pumpPolicy)
return d
if serverToClient.disconnect:
# The server wants to drop the connection. Flush any remaining
# data it has.
disconnect = True
pump(server, serverToClient, client)
elif clientToServer.disconnect:
# The client wants to drop the connection. Flush any remaining
# data it has.
disconnect = True
pump(client, clientToServer, server)
if disconnect:
# Someone wanted to disconnect, so okay, the connection is gone.
server.connectionLost(failure.Failure(main.CONNECTION_DONE))
client.connectionLost(failure.Failure(main.CONNECTION_DONE))
return defer.succeed(None)
def _loopbackAsyncContinue(ignored, server, serverToClient, client,
clientToServer, pumpPolicy):
# Clear the Deferred from each message queue, since it has already fired
# and cannot be used again.
clientToServer._notificationDeferred = None
serverToClient._notificationDeferred = None
# Schedule some more byte-pushing to happen. This isn't done
# synchronously because no actual transport can re-enter dataReceived as
# a result of calling write, and doing this synchronously could result
# in that.
from twisted.internet import reactor
return deferLater(
reactor, 0,
_loopbackAsyncBody,
server, serverToClient, client, clientToServer, pumpPolicy)
@implementer(interfaces.ITransport, interfaces.IConsumer)
class LoopbackRelay:
buffer = ''
shouldLose = 0
disconnecting = 0
producer = None
def __init__(self, target, logFile=None):
self.target = target
self.logFile = logFile
def write(self, data):
self.buffer = self.buffer + data
if self.logFile:
self.logFile.write("loopback writing %s\n" % repr(data))
def writeSequence(self, iovec):
self.write("".join(iovec))
def clearBuffer(self):
if self.shouldLose == -1:
return
if self.producer:
self.producer.resumeProducing()
if self.buffer:
if self.logFile:
self.logFile.write("loopback receiving %s\n" % repr(self.buffer))
buffer = self.buffer
self.buffer = ''
self.target.dataReceived(buffer)
if self.shouldLose == 1:
self.shouldLose = -1
self.target.connectionLost(failure.Failure(main.CONNECTION_DONE))
def loseConnection(self):
if self.shouldLose != -1:
self.shouldLose = 1
def getHost(self):
return 'loopback'
def getPeer(self):
return 'loopback'
def registerProducer(self, producer, streaming):
self.producer = producer
def unregisterProducer(self):
self.producer = None
def logPrefix(self):
return 'Loopback(%r)' % (self.target.__class__.__name__,)
class LoopbackClientFactory(protocol.ClientFactory):
def __init__(self, protocol):
self.disconnected = 0
self.deferred = defer.Deferred()
self.protocol = protocol
def buildProtocol(self, addr):
return self.protocol
def clientConnectionLost(self, connector, reason):
self.disconnected = 1
self.deferred.callback(None)
class _FireOnClose(policies.ProtocolWrapper):
def __init__(self, protocol, factory):
policies.ProtocolWrapper.__init__(self, protocol, factory)
self.deferred = defer.Deferred()
def connectionLost(self, reason):
policies.ProtocolWrapper.connectionLost(self, reason)
self.deferred.callback(None)
def loopbackTCP(server, client, port=0, noisy=True):
"""Run session between server and client protocol instances over TCP."""
from twisted.internet import reactor
f = policies.WrappingFactory(protocol.Factory())
serverWrapper = _FireOnClose(f, server)
f.noisy = noisy
f.buildProtocol = lambda addr: serverWrapper
serverPort = reactor.listenTCP(port, f, interface='127.0.0.1')
clientF = LoopbackClientFactory(client)
clientF.noisy = noisy
reactor.connectTCP('127.0.0.1', serverPort.getHost().port, clientF)
d = clientF.deferred
d.addCallback(lambda x: serverWrapper.deferred)
d.addCallback(lambda x: serverPort.stopListening())
return d
def loopbackUNIX(server, client, noisy=True):
"""Run session between server and client protocol instances over UNIX socket."""
path = tempfile.mktemp()
from twisted.internet import reactor
f = policies.WrappingFactory(protocol.Factory())
serverWrapper = _FireOnClose(f, server)
f.noisy = noisy
f.buildProtocol = lambda addr: serverWrapper
serverPort = reactor.listenUNIX(path, f)
clientF = LoopbackClientFactory(client)
clientF.noisy = noisy
reactor.connectUNIX(path, clientF)
d = clientF.deferred
d.addCallback(lambda x: serverWrapper.deferred)
d.addCallback(lambda x: serverPort.stopListening())
return d
| mit |
peterfpeterson/mantid | Testing/SystemTests/tests/framework/MultiThreadedLoadNeXusTest.py | 3 | 3078 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2020 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
import concurrent.futures
import sys
from mantid.simpleapi import Load
from systemtesting import MantidSystemTest, linux_distro_description
def load_nexus_in_multiple_threads(filename, nthreads):
"""Attempt to load the the given filename from multiple threads at once
"""
results = [None for i in range(nthreads)]
with concurrent.futures.ThreadPoolExecutor(max_workers=nthreads) as executor:
jobs = [executor.submit(load_nexus, filename, index) for index in range(nthreads)]
for index, future in enumerate(concurrent.futures.as_completed(jobs)):
try:
future.result()
except Exception as exc:
results[index] = str(exc)
else:
results[index] = None
raise_error_if_failed(results)
def load_nexus(filename: str, index: int) -> None:
"""Callable for Thread object. Performs the Load call
:param filename: NeXus filename to load
:param index: Index of thread assigned to perform load
"""
Load(filename, OutputWorkspace=f'w{index}')
def raise_error_if_failed(results) -> None:
"""Raises a RuntimeError if any failure occurred"""
if not all(map(lambda x: x is None, results)):
messages = ["It was not possible to load a NeXus file with multiple threads. Errors:"]
for index, msg in enumerate(filter(lambda x: x is not None, results)):
messages.append(f'Thread {index} raised an error: {msg}')
raise RuntimeError('\n'.join(messages))
class MultiThreadedLoadNeXusTest(MantidSystemTest):
"""Verify that a NeXus file can be loaded
from multiple threads.
HDF5 can be built without the threadsafe option
which causes problems for the current how the
framework accesses NeXus files.
"""
NTHREADS = 2
def skipTests(self):
"""HDF5 is currently not built in threadsafe mode on RHEL or macOS"""
# Ideally this would be a capability check but that's very difficult as
# the RHEL library doesn't have the H5is_library_threadsafe function
if sys.platform == 'linux':
distro = linux_distro_description().lower()
is_redhat_like = [name in distro for name in ('red hat', 'centos', 'fedora')]
return any(is_redhat_like)
elif sys.platform == 'darwin':
return True
else:
return False
def runTest(self):
"""Spin up multiple threads and simply
check that we don't crash"""
# "Raw" data NeXus
load_nexus_in_multiple_threads(filename='INTER00013463.nxs', nthreads=self.NTHREADS)
# Mantid processed file
load_nexus_in_multiple_threads(filename='MARIReductionAutoEi.nxs', nthreads=self.NTHREADS)
| gpl-3.0 |
slank/ansible | lib/ansible/modules/cloud/ovirt/ovirt_hosts_facts.py | 12 | 3407 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_facts_full_argument_spec,
)
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: ovirt_hosts_facts
short_description: Retrieve facts about one or more oVirt hosts
author: "Ondra Machacek (@machacekondra)"
version_added: "2.3"
description:
- "Retrieve facts about one or more oVirt hosts."
notes:
- "This module creates a new top-level C(ovirt_hosts) fact, which
contains a list of hosts."
options:
pattern:
description:
- "Search term which is accepted by oVirt search backend."
- "For example to search host X from datacenter Y use following pattern:
name=X and datacenter=Y"
extends_documentation_fragment: ovirt_facts
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather facts about all hosts which names start with C(host) and
# belong to data center C(west):
- ovirt_hosts_facts:
pattern: name=host* and datacenter=west
- debug:
var: ovirt_hosts
'''
RETURN = '''
ovirt_hosts:
description: "List of dictionaries describing the hosts. Host attribues are mapped to dictionary keys,
all hosts attributes can be found at following url: https://ovirt.example.com/ovirt-engine/api/model#types/host."
returned: On success.
type: list
'''
def main():
argument_spec = ovirt_facts_full_argument_spec(
pattern=dict(default='', required=False),
)
module = AnsibleModule(argument_spec)
check_sdk(module)
try:
connection = create_connection(module.params.pop('auth'))
hosts_service = connection.system_service().hosts_service()
hosts = hosts_service.list(search=module.params['pattern'])
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_hosts=[
get_dict_of_struct(
struct=c,
connection=connection,
fetch_nested=module.params.get('fetch_nested'),
attributes=module.params.get('nested_attributes'),
) for c in hosts
],
),
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=False)
if __name__ == '__main__':
main()
| gpl-3.0 |
danielru/pySDC | pySDC/implementations/datatype_classes/mesh.py | 1 | 8383 | import numpy as np
import copy as cp
from pySDC.core.Errors import DataError
class mesh(object):
"""
Mesh data type with arbitrary dimensions
This data type can be used whenever structured data with a single unknown per point in space is required
Attributes:
values (np.ndarray): contains the ndarray of the values
"""
def __init__(self, init=None, val=None):
"""
Initialization routine
Args:
init: can either be a tuple (one int per dimension) or a number (if only one dimension is requested)
or another mesh object
val: initial value (default: None)
Raises:
DataError: if init is none of the types above
"""
# if init is another mesh, do a deepcopy (init by copy)
if isinstance(init, mesh):
self.values = cp.deepcopy(init.values)
# if init is a number or a tuple of numbers, create mesh object with val as initial value
elif isinstance(init, tuple) or isinstance(init, int):
self.values = np.empty(init, dtype=np.float64)
self.values[:] = val
# something is wrong, if none of the ones above hit
else:
raise DataError('something went wrong during %s initialization' % type(self))
def __add__(self, other):
"""
Overloading the addition operator for mesh types
Args:
other (mesh.mesh): mesh object to be added
Raises:
DataError: if other is not a mesh object
Returns:
mesh.mesh: sum of caller and other values (self+other)
"""
if isinstance(other, mesh):
# always create new mesh, since otherwise c = a + b changes a as well!
me = mesh(np.shape(self.values))
me.values = self.values + other.values
return me
else:
raise DataError("Type error: cannot add %s to %s" % (type(other), type(self)))
def __sub__(self, other):
"""
Overloading the subtraction operator for mesh types
Args:
other (mesh.mesh): mesh object to be subtracted
Raises:
DataError: if other is not a mesh object
Returns:
mesh.mesh: differences between caller and other values (self-other)
"""
if isinstance(other, mesh):
# always create new mesh, since otherwise c = a - b changes a as well!
me = mesh(np.shape(self.values))
me.values = self.values - other.values
return me
else:
raise DataError("Type error: cannot subtract %s from %s" % (type(other), type(self)))
def __rmul__(self, other):
"""
Overloading the right multiply by factor operator for mesh types
Args:
other (float): factor
Raises:
DataError: is other is not a float
Returns:
mesh.mesh: copy of original values scaled by factor
"""
if isinstance(other, float) or isinstance(other, complex):
# always create new mesh, since otherwise c = f*a changes a as well!
me = mesh(np.shape(self.values))
me.values = self.values * other
return me
else:
raise DataError("Type error: cannot multiply %s to %s" % (type(other), type(self)))
def __abs__(self):
"""
Overloading the abs operator for mesh types
Returns:
float: absolute maximum of all mesh values
"""
# take absolute values of the mesh values
absval = abs(self.values)
# return maximum
return np.amax(absval)
def apply_mat(self, A):
"""
Matrix multiplication operator
Args:
A: a matrix
Returns:
mesh.mesh: component multiplied by the matrix A
"""
if not A.shape[1] == self.values.shape[0]:
raise DataError("ERROR: cannot apply operator %s to %s" % (A, self))
me = mesh(A.shape[0])
me.values = A.dot(self.values)
return me
class rhs_imex_mesh(object):
"""
RHS data type for meshes with implicit and explicit components
This data type can be used to have RHS with 2 components (here implicit and explicit)
Attributes:
impl (mesh.mesh): implicit part
expl (mesh.mesh): explicit part
"""
def __init__(self, init):
"""
Initialization routine
Args:
init: can either be a tuple (one int per dimension) or a number (if only one dimension is requested)
or another rhs_imex_mesh object
Raises:
DataError: if init is none of the types above
"""
# if init is another rhs_imex_mesh, do a deepcopy (init by copy)
if isinstance(init, type(self)):
self.impl = mesh(init.impl)
self.expl = mesh(init.expl)
# if init is a number or a tuple of numbers, create mesh object with None as initial value
elif isinstance(init, tuple) or isinstance(init, int):
self.impl = mesh(init)
self.expl = mesh(init)
# something is wrong, if none of the ones above hit
else:
raise DataError('something went wrong during %s initialization' % type(self))
def __sub__(self, other):
"""
Overloading the subtraction operator for rhs types
Args:
other (mesh.rhs_imex_mesh): rhs object to be subtracted
Raises:
DataError: if other is not a rhs object
Returns:
mesh.rhs_imex_mesh: differences between caller and other values (self-other)
"""
if isinstance(other, rhs_imex_mesh):
# always create new rhs_imex_mesh, since otherwise c = a - b changes a as well!
me = rhs_imex_mesh(np.shape(self.impl.values))
me.impl.values = self.impl.values - other.impl.values
me.expl.values = self.expl.values - other.expl.values
return me
else:
raise DataError("Type error: cannot subtract %s from %s" % (type(other), type(self)))
def __add__(self, other):
"""
Overloading the addition operator for rhs types
Args:
other (mesh.rhs_imex_mesh): rhs object to be added
Raises:
DataError: if other is not a rhs object
Returns:
mesh.rhs_imex_mesh: sum of caller and other values (self-other)
"""
if isinstance(other, rhs_imex_mesh):
# always create new rhs_imex_mesh, since otherwise c = a + b changes a as well!
me = rhs_imex_mesh(np.shape(self.impl.values))
me.impl.values = self.impl.values + other.impl.values
me.expl.values = self.expl.values + other.expl.values
return me
else:
raise DataError("Type error: cannot add %s to %s" % (type(other), type(self)))
def __rmul__(self, other):
"""
Overloading the right multiply by factor operator for mesh types
Args:
other (float): factor
Raises:
DataError: is other is not a float
Returns:
mesh.rhs_imex_mesh: copy of original values scaled by factor
"""
if isinstance(other, float):
# always create new rhs_imex_mesh
me = rhs_imex_mesh(np.shape(self.impl.values))
me.impl.values = other * self.impl.values
me.expl.values = other * self.expl.values
return me
else:
raise DataError("Type error: cannot multiply %s to %s" % (type(other), type(self)))
def apply_mat(self, A):
"""
Matrix multiplication operator
Args:
A: a matrix
Returns:
mesh.rhs_imex_mesh: each component multiplied by the matrix A
"""
if not A.shape[1] == self.impl.values.shape[0]:
raise DataError("ERROR: cannot apply operator %s to %s" % (A, self.impl))
if not A.shape[1] == self.expl.values.shape[0]:
raise DataError("ERROR: cannot apply operator %s to %s" % (A, self.expl))
me = rhs_imex_mesh(A.shape[1])
me.impl.values = A.dot(self.impl.values)
me.expl.values = A.dot(self.expl.values)
return me
| bsd-2-clause |
cloudenvy/cloudenvy | setup.py | 1 | 1039 | try:
from setuptools import setup
except:
from distutils.core import setup
import cloudenvy.metadata
def parse_requirements(requirements_filename='requirements.txt'):
requirements = []
with open(requirements_filename) as requirements_file:
for requirement in requirements_file:
requirements.append(requirement.rstrip('\n'))
return requirements
config = dict(
name='cloudenvy',
version=cloudenvy.metadata.VERSION,
url='https://github.com/cloudenvy/cloudenvy',
description='Fast provisioning on openstack clouds.',
author='Brian Waldon',
author_email='bcwaldon@gmail.com',
install_requires=parse_requirements(),
packages=['cloudenvy', 'cloudenvy.commands', 'cloudenvy.clouds'],
entry_points={
'console_scripts': [
'envy = cloudenvy.main:main',
],
'cloudenvy_cloud_apis': [
'ec2 = cloudenvy.clouds.ec2:CloudAPI',
'openstack = cloudenvy.clouds.openstack:CloudAPI',
],
},
)
setup(**config)
| apache-2.0 |
beezee/GAE-Django-site | django/contrib/gis/db/backends/spatialite/models.py | 403 | 1847 | """
The GeometryColumns and SpatialRefSys models for the SpatiaLite backend.
"""
from django.db import models
from django.contrib.gis.db.backends.base import SpatialRefSysMixin
class GeometryColumns(models.Model):
"""
The 'geometry_columns' table from SpatiaLite.
"""
f_table_name = models.CharField(max_length=256)
f_geometry_column = models.CharField(max_length=256)
type = models.CharField(max_length=30)
coord_dimension = models.IntegerField()
srid = models.IntegerField(primary_key=True)
spatial_index_enabled = models.IntegerField()
class Meta:
db_table = 'geometry_columns'
managed = False
@classmethod
def table_name_col(cls):
"""
Returns the name of the metadata column used to store the
the feature table name.
"""
return 'f_table_name'
@classmethod
def geom_col_name(cls):
"""
Returns the name of the metadata column used to store the
the feature geometry column.
"""
return 'f_geometry_column'
def __unicode__(self):
return "%s.%s - %dD %s field (SRID: %d)" % \
(self.f_table_name, self.f_geometry_column,
self.coord_dimension, self.type, self.srid)
class SpatialRefSys(models.Model, SpatialRefSysMixin):
"""
The 'spatial_ref_sys' table from SpatiaLite.
"""
srid = models.IntegerField(primary_key=True)
auth_name = models.CharField(max_length=256)
auth_srid = models.IntegerField()
ref_sys_name = models.CharField(max_length=256)
proj4text = models.CharField(max_length=2048)
@property
def wkt(self):
from django.contrib.gis.gdal import SpatialReference
return SpatialReference(self.proj4text).wkt
class Meta:
db_table = 'spatial_ref_sys'
managed = False
| bsd-3-clause |
direvus/ansible | lib/ansible/parsing/utils/jsonify.py | 124 | 1233 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
def jsonify(result, format=False):
''' format JSON output (uncompressed or uncompressed) '''
if result is None:
return "{}"
indent = None
if format:
indent = 4
try:
return json.dumps(result, sort_keys=True, indent=indent, ensure_ascii=False)
except UnicodeDecodeError:
return json.dumps(result, sort_keys=True, indent=indent)
| gpl-3.0 |
jussimalinen/robotframework-selenium2library | src/Selenium2Library/locators/elementfinder.py | 32 | 6478 | from Selenium2Library import utils
class ElementFinder(object):
def __init__(self):
self._strategies = {
'identifier': self._find_by_identifier,
'id': self._find_by_id,
'name': self._find_by_name,
'xpath': self._find_by_xpath,
'dom': self._find_by_dom,
'link': self._find_by_link_text,
'css': self._find_by_css_selector,
'tag': self._find_by_tag_name,
None: self._find_by_default
}
def find(self, browser, locator, tag=None):
assert browser is not None
assert locator is not None and len(locator) > 0
(prefix, criteria) = self._parse_locator(locator)
strategy = self._strategies.get(prefix)
if strategy is None:
raise ValueError("Element locator with prefix '" + prefix + "' is not supported")
(tag, constraints) = self._get_tag_and_constraints(tag)
return strategy(browser, criteria, tag, constraints)
# Strategy routines, private
def _find_by_identifier(self, browser, criteria, tag, constraints):
elements = browser.find_elements_by_id(criteria)
elements.extend(browser.find_elements_by_name(criteria))
return self._filter_elements(elements, tag, constraints)
def _find_by_id(self, browser, criteria, tag, constraints):
return self._filter_elements(
browser.find_elements_by_id(criteria),
tag, constraints)
def _find_by_name(self, browser, criteria, tag, constraints):
return self._filter_elements(
browser.find_elements_by_name(criteria),
tag, constraints)
def _find_by_xpath(self, browser, criteria, tag, constraints):
return self._filter_elements(
browser.find_elements_by_xpath(criteria),
tag, constraints)
def _find_by_dom(self, browser, criteria, tag, constraints):
result = browser.execute_script("return %s;" % criteria)
if result is None:
return []
if not isinstance(result, list):
result = [result]
return self._filter_elements(result, tag, constraints)
def _find_by_link_text(self, browser, criteria, tag, constraints):
return self._filter_elements(
browser.find_elements_by_link_text(criteria),
tag, constraints)
def _find_by_css_selector(self, browser, criteria, tag, constraints):
return self._filter_elements(
browser.find_elements_by_css_selector(criteria),
tag, constraints)
def _find_by_tag_name(self, browser, criteria, tag, constraints):
return self._filter_elements(
browser.find_elements_by_tag_name(criteria),
tag, constraints)
def _find_by_default(self, browser, criteria, tag, constraints):
if criteria.startswith('//'):
return self._find_by_xpath(browser, criteria, tag, constraints)
return self._find_by_key_attrs(browser, criteria, tag, constraints)
def _find_by_key_attrs(self, browser, criteria, tag, constraints):
key_attrs = self._key_attrs.get(None)
if tag is not None:
key_attrs = self._key_attrs.get(tag, key_attrs)
xpath_criteria = utils.escape_xpath_value(criteria)
xpath_tag = tag if tag is not None else '*'
xpath_constraints = ["@%s='%s'" % (name, constraints[name]) for name in constraints]
xpath_searchers = ["%s=%s" % (attr, xpath_criteria) for attr in key_attrs]
xpath_searchers.extend(
self._get_attrs_with_url(key_attrs, criteria, browser))
xpath = "//%s[%s(%s)]" % (
xpath_tag,
' and '.join(xpath_constraints) + ' and ' if len(xpath_constraints) > 0 else '',
' or '.join(xpath_searchers))
return browser.find_elements_by_xpath(xpath)
# Private
_key_attrs = {
None: ['@id', '@name'],
'a': ['@id', '@name', '@href', 'normalize-space(descendant-or-self::text())'],
'img': ['@id', '@name', '@src', '@alt'],
'input': ['@id', '@name', '@value', '@src'],
'button': ['@id', '@name', '@value', 'normalize-space(descendant-or-self::text())']
}
def _get_tag_and_constraints(self, tag):
if tag is None: return None, {}
tag = tag.lower()
constraints = {}
if tag == 'link':
tag = 'a'
elif tag == 'image':
tag = 'img'
elif tag == 'list':
tag = 'select'
elif tag == 'radio button':
tag = 'input'
constraints['type'] = 'radio'
elif tag == 'checkbox':
tag = 'input'
constraints['type'] = 'checkbox'
elif tag == 'text field':
tag = 'input'
constraints['type'] = 'text'
elif tag == 'file upload':
tag = 'input'
constraints['type'] = 'file'
return tag, constraints
def _element_matches(self, element, tag, constraints):
if not element.tag_name.lower() == tag:
return False
for name in constraints:
if not element.get_attribute(name) == constraints[name]:
return False
return True
def _filter_elements(self, elements, tag, constraints):
if tag is None: return elements
return filter(
lambda element: self._element_matches(element, tag, constraints),
elements)
def _get_attrs_with_url(self, key_attrs, criteria, browser):
attrs = []
url = None
xpath_url = None
for attr in ['@src', '@href']:
if attr in key_attrs:
if url is None or xpath_url is None:
url = self._get_base_url(browser) + "/" + criteria
xpath_url = utils.escape_xpath_value(url)
attrs.append("%s=%s" % (attr, xpath_url))
return attrs
def _get_base_url(self, browser):
url = browser.get_current_url()
if '/' in url:
url = '/'.join(url.split('/')[:-1])
return url
def _parse_locator(self, locator):
prefix = None
criteria = locator
if not locator.startswith('//'):
locator_parts = locator.partition('=')
if len(locator_parts[1]) > 0:
prefix = locator_parts[0].strip().lower()
criteria = locator_parts[2].strip()
return (prefix, criteria)
| apache-2.0 |
CaeruleusAqua/OpenDaVINCI-python-interface | setup.py | 1 | 4249 | """A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from codecs import open
from os import path
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='opendavinci',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='1.0.0',
description='opendavinci-python bindings',
long_description=long_description,
# The project's main homepage.
url='https://github.com/CaeruleusAqua/OpenDaVINCI-python-interface',
# Author details
author='Julian Scholle',
author_email='julian.scholle@googlemail.com',
# Choose your license
license='GPL-v2',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Automotive',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GPL-v2',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
# What does your project relate to?
keywords='opendavinci automotive development',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['sysv_ipc', 'posix_ipc', 'opencv-python', 'numpy', 'protobuf', 'import_file'],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
# extras_require={
# 'dev': ['check-manifest'],
# 'test': ['coverage'],
# },
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
# package_data={
# 'sample': ['package_data.dat'],
# },
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
# data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
# entry_points={
# 'console_scripts': [
# 'sample=sample:main',
# ],
# },
)
| gpl-2.0 |
ShoRit/shipping-costs-sample | v2/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.py | 536 | 9371 | # -*- coding: utf-8 -*-
"""
lockfile.py - Platform-independent advisory file locks.
Requires Python 2.5 unless you apply 2.4.diff
Locking is done on a per-thread basis instead of a per-process basis.
Usage:
>>> lock = LockFile('somefile')
>>> try:
... lock.acquire()
... except AlreadyLocked:
... print 'somefile', 'is locked already.'
... except LockFailed:
... print 'somefile', 'can\\'t be locked.'
... else:
... print 'got lock'
got lock
>>> print lock.is_locked()
True
>>> lock.release()
>>> lock = LockFile('somefile')
>>> print lock.is_locked()
False
>>> with lock:
... print lock.is_locked()
True
>>> print lock.is_locked()
False
>>> lock = LockFile('somefile')
>>> # It is okay to lock twice from the same thread...
>>> with lock:
... lock.acquire()
...
>>> # Though no counter is kept, so you can't unlock multiple times...
>>> print lock.is_locked()
False
Exceptions:
Error - base class for other exceptions
LockError - base class for all locking exceptions
AlreadyLocked - Another thread or process already holds the lock
LockFailed - Lock failed for some other reason
UnlockError - base class for all unlocking exceptions
AlreadyUnlocked - File was not locked.
NotMyLock - File was locked but not by the current thread/process
"""
from __future__ import absolute_import
import functools
import os
import socket
import threading
import warnings
# Work with PEP8 and non-PEP8 versions of threading module.
if not hasattr(threading, "current_thread"):
threading.current_thread = threading.currentThread
if not hasattr(threading.Thread, "get_name"):
threading.Thread.get_name = threading.Thread.getName
__all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked',
'LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock',
'LinkFileLock', 'MkdirFileLock', 'SQLiteFileLock',
'LockBase', 'locked']
class Error(Exception):
"""
Base class for other exceptions.
>>> try:
... raise Error
... except Exception:
... pass
"""
pass
class LockError(Error):
"""
Base class for error arising from attempts to acquire the lock.
>>> try:
... raise LockError
... except Error:
... pass
"""
pass
class LockTimeout(LockError):
"""Raised when lock creation fails within a user-defined period of time.
>>> try:
... raise LockTimeout
... except LockError:
... pass
"""
pass
class AlreadyLocked(LockError):
"""Some other thread/process is locking the file.
>>> try:
... raise AlreadyLocked
... except LockError:
... pass
"""
pass
class LockFailed(LockError):
"""Lock file creation failed for some other reason.
>>> try:
... raise LockFailed
... except LockError:
... pass
"""
pass
class UnlockError(Error):
"""
Base class for errors arising from attempts to release the lock.
>>> try:
... raise UnlockError
... except Error:
... pass
"""
pass
class NotLocked(UnlockError):
"""Raised when an attempt is made to unlock an unlocked file.
>>> try:
... raise NotLocked
... except UnlockError:
... pass
"""
pass
class NotMyLock(UnlockError):
"""Raised when an attempt is made to unlock a file someone else locked.
>>> try:
... raise NotMyLock
... except UnlockError:
... pass
"""
pass
class _SharedBase(object):
def __init__(self, path):
self.path = path
def acquire(self, timeout=None):
"""
Acquire the lock.
* If timeout is omitted (or None), wait forever trying to lock the
file.
* If timeout > 0, try to acquire the lock for that many seconds. If
the lock period expires and the file is still locked, raise
LockTimeout.
* If timeout <= 0, raise AlreadyLocked immediately if the file is
already locked.
"""
raise NotImplemented("implement in subclass")
def release(self):
"""
Release the lock.
If the file is not locked, raise NotLocked.
"""
raise NotImplemented("implement in subclass")
def __enter__(self):
"""
Context manager support.
"""
self.acquire()
return self
def __exit__(self, *_exc):
"""
Context manager support.
"""
self.release()
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.path)
class LockBase(_SharedBase):
"""Base class for platform-specific lock classes."""
def __init__(self, path, threaded=True, timeout=None):
"""
>>> lock = LockBase('somefile')
>>> lock = LockBase('somefile', threaded=False)
"""
super(LockBase, self).__init__(path)
self.lock_file = os.path.abspath(path) + ".lock"
self.hostname = socket.gethostname()
self.pid = os.getpid()
if threaded:
t = threading.current_thread()
# Thread objects in Python 2.4 and earlier do not have ident
# attrs. Worm around that.
ident = getattr(t, "ident", hash(t))
self.tname = "-%x" % (ident & 0xffffffff)
else:
self.tname = ""
dirname = os.path.dirname(self.lock_file)
# unique name is mostly about the current process, but must
# also contain the path -- otherwise, two adjacent locked
# files conflict (one file gets locked, creating lock-file and
# unique file, the other one gets locked, creating lock-file
# and overwriting the already existing lock-file, then one
# gets unlocked, deleting both lock-file and unique file,
# finally the last lock errors out upon releasing.
self.unique_name = os.path.join(dirname,
"%s%s.%s%s" % (self.hostname,
self.tname,
self.pid,
hash(self.path)))
self.timeout = timeout
def is_locked(self):
"""
Tell whether or not the file is locked.
"""
raise NotImplemented("implement in subclass")
def i_am_locking(self):
"""
Return True if this object is locking the file.
"""
raise NotImplemented("implement in subclass")
def break_lock(self):
"""
Remove a lock. Useful if a locking thread failed to unlock.
"""
raise NotImplemented("implement in subclass")
def __repr__(self):
return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name,
self.path)
def _fl_helper(cls, mod, *args, **kwds):
warnings.warn("Import from %s module instead of lockfile package" % mod,
DeprecationWarning, stacklevel=2)
# This is a bit funky, but it's only for awhile. The way the unit tests
# are constructed this function winds up as an unbound method, so it
# actually takes three args, not two. We want to toss out self.
if not isinstance(args[0], str):
# We are testing, avoid the first arg
args = args[1:]
if len(args) == 1 and not kwds:
kwds["threaded"] = True
return cls(*args, **kwds)
def LinkFileLock(*args, **kwds):
"""Factory function provided for backwards compatibility.
Do not use in new code. Instead, import LinkLockFile from the
lockfile.linklockfile module.
"""
from . import linklockfile
return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile",
*args, **kwds)
def MkdirFileLock(*args, **kwds):
"""Factory function provided for backwards compatibility.
Do not use in new code. Instead, import MkdirLockFile from the
lockfile.mkdirlockfile module.
"""
from . import mkdirlockfile
return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile",
*args, **kwds)
def SQLiteFileLock(*args, **kwds):
"""Factory function provided for backwards compatibility.
Do not use in new code. Instead, import SQLiteLockFile from the
lockfile.mkdirlockfile module.
"""
from . import sqlitelockfile
return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile",
*args, **kwds)
def locked(path, timeout=None):
"""Decorator which enables locks for decorated function.
Arguments:
- path: path for lockfile.
- timeout (optional): Timeout for acquiring lock.
Usage:
@locked('/var/run/myname', timeout=0)
def myname(...):
...
"""
def decor(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
lock = FileLock(path, timeout=timeout)
lock.acquire()
try:
return func(*args, **kwargs)
finally:
lock.release()
return wrapper
return decor
if hasattr(os, "link"):
from . import linklockfile as _llf
LockFile = _llf.LinkLockFile
else:
from . import mkdirlockfile as _mlf
LockFile = _mlf.MkdirLockFile
FileLock = LockFile
| apache-2.0 |
mozilla/firefox-flicks | vendor-local/lib/python/celery/app/amqp.py | 1 | 14390 | # -*- coding: utf-8 -*-
"""
celery.app.amqp
~~~~~~~~~~~~~~~
Sending and receiving messages using Kombu.
"""
from __future__ import absolute_import
from datetime import timedelta
from weakref import WeakValueDictionary
from kombu import Connection, Consumer, Exchange, Producer, Queue
from kombu.common import entry_to_queue
from kombu.pools import ProducerPool
from kombu.utils import cached_property, uuid
from kombu.utils.encoding import safe_repr
from celery import signals
from celery.utils.text import indent as textindent
from . import app_or_default
from . import routes as _routes
#: Human readable queue declaration.
QUEUE_FORMAT = """
. %(name)s exchange:%(exchange)s(%(exchange_type)s) binding:%(routing_key)s
"""
class Queues(dict):
"""Queue name⇒ declaration mapping.
:param queues: Initial list/tuple or dict of queues.
:keyword create_missing: By default any unknown queues will be
added automatically, but if disabled
the occurrence of unknown queues
in `wanted` will raise :exc:`KeyError`.
:keyword ha_policy: Default HA policy for queues with none set.
"""
#: If set, this is a subset of queues to consume from.
#: The rest of the queues are then used for routing only.
_consume_from = None
def __init__(self, queues=None, default_exchange=None,
create_missing=True, ha_policy=None):
dict.__init__(self)
self.aliases = WeakValueDictionary()
self.default_exchange = default_exchange
self.create_missing = create_missing
self.ha_policy = ha_policy
if isinstance(queues, (tuple, list)):
queues = dict((q.name, q) for q in queues)
for name, q in (queues or {}).iteritems():
self.add(q) if isinstance(q, Queue) else self.add_compat(name, **q)
def __getitem__(self, name):
try:
return self.aliases[name]
except KeyError:
return dict.__getitem__(self, name)
def __setitem__(self, name, queue):
if self.default_exchange and (not queue.exchange or
not queue.exchange.name):
queue.exchange = self.default_exchange
dict.__setitem__(self, name, queue)
if queue.alias:
self.aliases[queue.alias] = queue
def __missing__(self, name):
if self.create_missing:
return self.add(self.new_missing(name))
raise KeyError(name)
def add(self, queue, **kwargs):
"""Add new queue.
:param queue: Name of the queue.
:keyword exchange: Name of the exchange.
:keyword routing_key: Binding key.
:keyword exchange_type: Type of exchange.
:keyword \*\*options: Additional declaration options.
"""
if not isinstance(queue, Queue):
return self.add_compat(queue, **kwargs)
if self.ha_policy:
if queue.queue_arguments is None:
queue.queue_arguments = {}
self._set_ha_policy(queue.queue_arguments)
self[queue.name] = queue
return queue
def add_compat(self, name, **options):
# docs used to use binding_key as routing key
options.setdefault('routing_key', options.get('binding_key'))
if options['routing_key'] is None:
options['routing_key'] = name
if self.ha_policy is not None:
self._set_ha_policy(options.setdefault('queue_arguments', {}))
q = self[name] = entry_to_queue(name, **options)
return q
def _set_ha_policy(self, args):
policy = self.ha_policy
if isinstance(policy, (list, tuple)):
return args.update({'x-ha-policy': 'nodes',
'x-ha-policy-params': list(policy)})
args['x-ha-policy'] = policy
def format(self, indent=0, indent_first=True):
"""Format routing table into string for log dumps."""
active = self.consume_from
if not active:
return ''
info = [
QUEUE_FORMAT.strip() % {
'name': (name + ':').ljust(12),
'exchange': q.exchange.name,
'exchange_type': q.exchange.type,
'routing_key': q.routing_key}
for name, q in sorted(active.iteritems())]
if indent_first:
return textindent('\n'.join(info), indent)
return info[0] + '\n' + textindent('\n'.join(info[1:]), indent)
def select_add(self, queue, **kwargs):
"""Add new task queue that will be consumed from even when
a subset has been selected using the :option:`-Q` option."""
q = self.add(queue, **kwargs)
if self._consume_from is not None:
self._consume_from[q.name] = q
return q
def select_subset(self, wanted):
"""Sets :attr:`consume_from` by selecting a subset of the
currently defined queues.
:param wanted: List of wanted queue names.
"""
if wanted:
self._consume_from = dict((name, self[name]) for name in wanted)
def select_remove(self, queue):
if self._consume_from is None:
self.select_subset(k for k in self if k != queue)
else:
self._consume_from.pop(queue, None)
def new_missing(self, name):
return Queue(name, Exchange(name), name)
@property
def consume_from(self):
if self._consume_from is not None:
return self._consume_from
return self
class TaskProducer(Producer):
app = None
auto_declare = False
retry = False
retry_policy = None
def __init__(self, channel=None, exchange=None, *args, **kwargs):
self.retry = kwargs.pop('retry', self.retry)
self.retry_policy = kwargs.pop('retry_policy',
self.retry_policy or {})
exchange = exchange or self.exchange
self.queues = self.app.amqp.queues # shortcut
self.default_queue = self.app.amqp.default_queue
super(TaskProducer, self).__init__(channel, exchange, *args, **kwargs)
def publish_task(self, task_name, task_args=None, task_kwargs=None,
countdown=None, eta=None, task_id=None, group_id=None,
taskset_id=None, # compat alias to group_id
expires=None, exchange=None, exchange_type=None,
event_dispatcher=None, retry=None, retry_policy=None,
queue=None, now=None, retries=0, chord=None,
callbacks=None, errbacks=None, routing_key=None,
serializer=None, delivery_mode=None, compression=None,
declare=None, **kwargs):
"""Send task message."""
qname = queue
if queue is None and exchange is None:
queue = self.default_queue
if queue is not None:
if isinstance(queue, basestring):
qname, queue = queue, self.queues[queue]
else:
qname = queue.name
exchange = exchange or queue.exchange.name
routing_key = routing_key or queue.routing_key
declare = declare or ([queue] if queue else [])
# merge default and custom policy
retry = self.retry if retry is None else retry
_rp = (dict(self.retry_policy, **retry_policy) if retry_policy
else self.retry_policy)
task_id = task_id or uuid()
task_args = task_args or []
task_kwargs = task_kwargs or {}
if not isinstance(task_args, (list, tuple)):
raise ValueError('task args must be a list or tuple')
if not isinstance(task_kwargs, dict):
raise ValueError('task kwargs must be a dictionary')
if countdown: # Convert countdown to ETA.
now = now or self.app.now()
eta = now + timedelta(seconds=countdown)
if isinstance(expires, (int, float)):
now = now or self.app.now()
expires = now + timedelta(seconds=expires)
eta = eta and eta.isoformat()
expires = expires and expires.isoformat()
body = {
'task': task_name,
'id': task_id,
'args': task_args,
'kwargs': task_kwargs,
'retries': retries or 0,
'eta': eta,
'expires': expires,
'utc': self.utc,
'callbacks': callbacks,
'errbacks': errbacks,
'taskset': group_id or taskset_id,
'chord': chord,
}
self.publish(
body,
exchange=exchange, routing_key=routing_key,
serializer=serializer or self.serializer,
compression=compression or self.compression,
retry=retry, retry_policy=_rp,
delivery_mode=delivery_mode, declare=declare,
**kwargs
)
signals.task_sent.send(sender=task_name, **body)
if event_dispatcher:
exname = exchange or self.exchange
if isinstance(exname, Exchange):
exname = exname.name
event_dispatcher.send(
'task-sent', uuid=task_id,
name=task_name,
args=safe_repr(task_args),
kwargs=safe_repr(task_kwargs),
retries=retries,
eta=eta,
expires=expires,
queue=qname,
exchange=exname,
routing_key=routing_key,
)
return task_id
delay_task = publish_task # XXX Compat
class TaskPublisher(TaskProducer):
"""Deprecated version of :class:`TaskProducer`."""
def __init__(self, channel=None, exchange=None, *args, **kwargs):
self.app = app_or_default(kwargs.pop('app', self.app))
self.retry = kwargs.pop('retry', self.retry)
self.retry_policy = kwargs.pop('retry_policy',
self.retry_policy or {})
exchange = exchange or self.exchange
if not isinstance(exchange, Exchange):
exchange = Exchange(exchange,
kwargs.pop('exchange_type', 'direct'))
self.queues = self.app.amqp.queues # shortcut
super(TaskPublisher, self).__init__(channel, exchange, *args, **kwargs)
class TaskConsumer(Consumer):
app = None
def __init__(self, channel, queues=None, app=None, **kw):
self.app = app or self.app
super(TaskConsumer, self).__init__(
channel,
queues or self.app.amqp.queues.consume_from.values(), **kw
)
class AMQP(object):
Connection = Connection
Consumer = Consumer
#: compat alias to Connection
BrokerConnection = Connection
#: Cached and prepared routing table.
_rtable = None
#: Underlying producer pool instance automatically
#: set by the :attr:`producer_pool`.
_producer_pool = None
def __init__(self, app):
self.app = app
def flush_routes(self):
self._rtable = _routes.prepare(self.app.conf.CELERY_ROUTES)
def Queues(self, queues, create_missing=None, ha_policy=None):
"""Create new :class:`Queues` instance, using queue defaults
from the current configuration."""
conf = self.app.conf
if create_missing is None:
create_missing = conf.CELERY_CREATE_MISSING_QUEUES
if ha_policy is None:
ha_policy = conf.CELERY_QUEUE_HA_POLICY
if not queues and conf.CELERY_DEFAULT_QUEUE:
queues = (Queue(conf.CELERY_DEFAULT_QUEUE,
exchange=self.default_exchange,
routing_key=conf.CELERY_DEFAULT_ROUTING_KEY), )
return Queues(queues, self.default_exchange, create_missing, ha_policy)
def Router(self, queues=None, create_missing=None):
"""Returns the current task router."""
return _routes.Router(self.routes, queues or self.queues,
self.app.either('CELERY_CREATE_MISSING_QUEUES',
create_missing), app=self.app)
@cached_property
def TaskConsumer(self):
"""Return consumer configured to consume from the queues
we are configured for (``app.amqp.queues.consume_from``)."""
return self.app.subclass_with_self(TaskConsumer,
reverse='amqp.TaskConsumer')
get_task_consumer = TaskConsumer # XXX compat
@cached_property
def TaskProducer(self):
"""Returns publisher used to send tasks.
You should use `app.send_task` instead.
"""
conf = self.app.conf
return self.app.subclass_with_self(
TaskProducer,
reverse='amqp.TaskProducer',
exchange=self.default_exchange,
routing_key=conf.CELERY_DEFAULT_ROUTING_KEY,
serializer=conf.CELERY_TASK_SERIALIZER,
compression=conf.CELERY_MESSAGE_COMPRESSION,
retry=conf.CELERY_TASK_PUBLISH_RETRY,
retry_policy=conf.CELERY_TASK_PUBLISH_RETRY_POLICY,
utc=conf.CELERY_ENABLE_UTC,
)
TaskPublisher = TaskProducer # compat
@cached_property
def default_queue(self):
return self.queues[self.app.conf.CELERY_DEFAULT_QUEUE]
@cached_property
def queues(self):
"""Queue name⇒ declaration mapping."""
return self.Queues(self.app.conf.CELERY_QUEUES)
@queues.setter # noqa
def queues(self, queues):
return self.Queues(queues)
@property
def routes(self):
if self._rtable is None:
self.flush_routes()
return self._rtable
@cached_property
def router(self):
return self.Router()
@property
def producer_pool(self):
if self._producer_pool is None:
self._producer_pool = ProducerPool(
self.app.pool,
limit=self.app.pool.limit,
Producer=self.TaskProducer,
)
return self._producer_pool
publisher_pool = producer_pool # compat alias
@cached_property
def default_exchange(self):
return Exchange(self.app.conf.CELERY_DEFAULT_EXCHANGE,
self.app.conf.CELERY_DEFAULT_EXCHANGE_TYPE)
| bsd-3-clause |
ar4s/django | tests/template_tests/test_unicode.py | 6 | 1357 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
from django.template import Template, TemplateEncodingError, Context
from django.utils.safestring import SafeData
from django.utils import six
class UnicodeTests(TestCase):
def test_template(self):
# Templates can be created from unicode strings.
t1 = Template('ŠĐĆŽćžšđ {{ var }}')
# Templates can also be created from bytestrings. These are assumed to
# be encoded using UTF-8.
s = b'\xc5\xa0\xc4\x90\xc4\x86\xc5\xbd\xc4\x87\xc5\xbe\xc5\xa1\xc4\x91 {{ var }}'
t2 = Template(s)
s = b'\x80\xc5\xc0'
self.assertRaises(TemplateEncodingError, Template, s)
# Contexts can be constructed from unicode or UTF-8 bytestrings.
c1 = Context({b"var": b"foo"})
c2 = Context({"var": b"foo"})
c3 = Context({b"var": "Đđ"})
c4 = Context({"var": b"\xc4\x90\xc4\x91"})
# Since both templates and all four contexts represent the same thing,
# they all render the same (and are returned as unicode objects and
# "safe" objects as well, for auto-escaping purposes).
self.assertEqual(t1.render(c3), t2.render(c3))
self.assertIsInstance(t1.render(c3), six.text_type)
self.assertIsInstance(t1.render(c3), SafeData)
| bsd-3-clause |
open-synergy/server-tools | password_security/tests/test_password_security_home.py | 11 | 11224 | # -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
import mock
from contextlib import contextmanager
from openerp.tests.common import TransactionCase
from openerp.http import Response
from ..controllers import main
IMPORT = 'openerp.addons.password_security.controllers.main'
class EndTestException(Exception):
""" It allows for isolation of resources by raise """
class MockResponse(object):
def __new__(cls):
return mock.Mock(spec=Response)
class MockPassError(main.PassError):
def __init__(self):
super(MockPassError, self).__init__('Message')
class TestPasswordSecurityHome(TransactionCase):
def setUp(self):
super(TestPasswordSecurityHome, self).setUp()
self.PasswordSecurityHome = main.PasswordSecurityHome
self.password_security_home = self.PasswordSecurityHome()
self.passwd = 'I am a password!'
self.qcontext = {
'password': self.passwd,
}
@contextmanager
def mock_assets(self):
""" It mocks and returns assets used by this controller """
methods = ['do_signup', 'web_login', 'web_auth_signup',
'web_auth_reset_password',
]
with mock.patch.multiple(
main.AuthSignupHome, **{m: mock.DEFAULT for m in methods}
) as _super:
mocks = {}
for method in methods:
mocks[method] = _super[method]
mocks[method].return_value = MockResponse()
with mock.patch('%s.request' % IMPORT) as request:
with mock.patch('%s.ensure_db' % IMPORT) as ensure:
with mock.patch('%s.http' % IMPORT) as http:
http.redirect_with_hash.return_value = \
MockResponse()
mocks.update({
'request': request,
'ensure_db': ensure,
'http': http,
})
yield mocks
def test_do_signup_check(self):
""" It should check password on user """
with self.mock_assets() as assets:
check_password = assets['request'].env.user.check_password
check_password.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.password_security_home.do_signup(self.qcontext)
check_password.assert_called_once_with(
self.passwd,
)
def test_do_signup_return(self):
""" It should return result of super """
with self.mock_assets() as assets:
res = self.password_security_home.do_signup(self.qcontext)
self.assertEqual(assets['do_signup'](), res)
def test_web_login_ensure_db(self):
""" It should verify available db """
with self.mock_assets() as assets:
assets['ensure_db'].side_effect = EndTestException
with self.assertRaises(EndTestException):
self.password_security_home.web_login()
def test_web_login_super(self):
""" It should call superclass w/ proper args """
expect_list = [1, 2, 3]
expect_dict = {'test1': 'good1', 'test2': 'good2'}
with self.mock_assets() as assets:
assets['web_login'].side_effect = EndTestException
with self.assertRaises(EndTestException):
self.password_security_home.web_login(
*expect_list, **expect_dict
)
assets['web_login'].assert_called_once_with(
*expect_list, **expect_dict
)
def test_web_login_no_post(self):
""" It should return immediate result of super when not POST """
with self.mock_assets() as assets:
assets['request'].httprequest.method = 'GET'
assets['request'].session.authenticate.side_effect = \
EndTestException
res = self.password_security_home.web_login()
self.assertEqual(
assets['web_login'](), res,
)
def test_web_login_authenticate(self):
""" It should attempt authentication to obtain uid """
with self.mock_assets() as assets:
assets['request'].httprequest.method = 'POST'
authenticate = assets['request'].session.authenticate
request = assets['request']
authenticate.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.password_security_home.web_login()
authenticate.assert_called_once_with(
request.session.db,
request.params['login'],
request.params['password'],
)
def test_web_login_authenticate_fail(self):
""" It should return super result if failed auth """
with self.mock_assets() as assets:
authenticate = assets['request'].session.authenticate
request = assets['request']
request.httprequest.method = 'POST'
request.env['res.users'].sudo.side_effect = EndTestException
authenticate.return_value = False
res = self.password_security_home.web_login()
self.assertEqual(
assets['web_login'](), res,
)
def test_web_login_get_user(self):
""" It should get the proper user as sudo """
with self.mock_assets() as assets:
request = assets['request']
request.httprequest.method = 'POST'
sudo = request.env['res.users'].sudo()
sudo.browse.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.password_security_home.web_login()
sudo.browse.assert_called_once_with(
request.uid
)
def test_web_login_valid_pass(self):
""" It should return parent result if pass isn't expired """
with self.mock_assets() as assets:
request = assets['request']
request.httprequest.method = 'POST'
user = request.env['res.users'].sudo().browse()
user.action_expire_password.side_effect = EndTestException
user._password_has_expired.return_value = False
res = self.password_security_home.web_login()
self.assertEqual(
assets['web_login'](), res,
)
def test_web_login_expire_pass(self):
""" It should expire password if necessary """
with self.mock_assets() as assets:
request = assets['request']
request.httprequest.method = 'POST'
user = request.env['res.users'].sudo().browse()
user.action_expire_password.side_effect = EndTestException
user._password_has_expired.return_value = True
with self.assertRaises(EndTestException):
self.password_security_home.web_login()
def test_web_login_redirect(self):
""" It should redirect w/ hash to reset after expiration """
with self.mock_assets() as assets:
request = assets['request']
request.httprequest.method = 'POST'
user = request.env['res.users'].sudo().browse()
user._password_has_expired.return_value = True
res = self.password_security_home.web_login()
self.assertEqual(
assets['http'].redirect_with_hash(), res,
)
def test_web_auth_signup_valid(self):
""" It should return super if no errors """
with self.mock_assets() as assets:
res = self.password_security_home.web_auth_signup()
self.assertEqual(
assets['web_auth_signup'](), res,
)
def test_web_auth_signup_invalid_qcontext(self):
""" It should catch PassError and get signup qcontext """
with self.mock_assets() as assets:
with mock.patch.object(
main.AuthSignupHome, 'get_auth_signup_qcontext',
) as qcontext:
assets['web_auth_signup'].side_effect = MockPassError
qcontext.side_effect = EndTestException
with self.assertRaises(EndTestException):
self.password_security_home.web_auth_signup()
def test_web_auth_signup_invalid_render(self):
""" It should render & return signup form on invalid """
with self.mock_assets() as assets:
with mock.patch.object(
main.AuthSignupHome, 'get_auth_signup_qcontext', spec=dict
) as qcontext:
assets['web_auth_signup'].side_effect = MockPassError
res = self.password_security_home.web_auth_signup()
assets['request'].render.assert_called_once_with(
'auth_signup.signup', qcontext(),
)
self.assertEqual(
assets['request'].render(), res,
)
def test_web_auth_reset_password_fail_login(self):
""" It should raise from failed _validate_pass_reset by login """
with self.mock_assets() as assets:
with mock.patch.object(
main.AuthSignupHome, 'get_auth_signup_qcontext', spec=dict
) as qcontext:
qcontext['login'] = 'login'
search = assets['request'].env.sudo().search
assets['request'].httprequest.method = 'POST'
user = mock.MagicMock()
user._validate_pass_reset.side_effect = MockPassError
search.return_value = user
with self.assertRaises(MockPassError):
self.password_security_home.web_auth_reset_password()
def test_web_auth_reset_password_fail_email(self):
""" It should raise from failed _validate_pass_reset by email """
with self.mock_assets() as assets:
with mock.patch.object(
main.AuthSignupHome, 'get_auth_signup_qcontext', spec=dict
) as qcontext:
qcontext['login'] = 'login'
search = assets['request'].env.sudo().search
assets['request'].httprequest.method = 'POST'
user = mock.MagicMock()
user._validate_pass_reset.side_effect = MockPassError
search.side_effect = [[], user]
with self.assertRaises(MockPassError):
self.password_security_home.web_auth_reset_password()
def test_web_auth_reset_password_success(self):
""" It should return parent response on no validate errors """
with self.mock_assets() as assets:
with mock.patch.object(
main.AuthSignupHome, 'get_auth_signup_qcontext', spec=dict
) as qcontext:
qcontext['login'] = 'login'
assets['request'].httprequest.method = 'POST'
res = self.password_security_home.web_auth_reset_password()
self.assertEqual(
assets['web_auth_reset_password'](), res,
)
| agpl-3.0 |
luotao1/Paddle | python/paddle/fluid/tests/unittests/dygraph_to_static/test_for_enumerate.py | 2 | 12869 | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import numpy as np
import unittest
import paddle
import paddle.fluid as fluid
from paddle.fluid.dygraph.dygraph_to_static import ProgramTranslator
program_translator = ProgramTranslator()
# 0. for in range var.numpy()[0]
@paddle.jit.to_static
def for_in_range(x):
z = fluid.layers.fill_constant([1], 'int32', 0)
x = fluid.dygraph.to_variable(x)
for i in range(x.numpy()[0]):
z = z + i
return z
# 1. for iter list
@paddle.jit.to_static
def for_iter_list(x_array):
z = fluid.layers.fill_constant([1], 'int32', 0)
for x in x_array:
z = z + x
return z
# 2. for enumerate list
@paddle.jit.to_static
def for_enumerate_list(x_array):
z = fluid.layers.fill_constant([1], 'int32', 0)
for i, x in enumerate(x_array):
z = z + x + i
return z
# 3. for iter var.numpy()
@paddle.jit.to_static
def for_iter_var_numpy(x_array):
z = fluid.layers.fill_constant([1], 'int32', 0)
x_array = fluid.dygraph.to_variable(x_array)
for x in x_array.numpy():
z = z + x
return z
# 4. for enumerate var.numpy()
@paddle.jit.to_static
def for_enumerate_var_numpy(x_array):
y = fluid.layers.fill_constant([1], 'int32', 0)
z = fluid.layers.fill_constant([1], 'int32', 0)
x_array = fluid.dygraph.to_variable(x_array)
for i, x in enumerate(x_array.numpy()):
y = y + i
z = z + x
return y, z
# 5. for enumerate var.numpy() with start
@paddle.jit.to_static
def for_enumerate_var_numpy_with_start(x_array):
y = fluid.layers.fill_constant([1], 'int32', 0)
z = fluid.layers.fill_constant([1], 'int32', 0)
x_array = fluid.dygraph.to_variable(x_array)
for i, x in enumerate(x_array.numpy(), 1):
y = y + i
z = z + x
return y, z
# 6. for in range with break
@paddle.jit.to_static
def for_in_range_with_break(x):
z = fluid.layers.fill_constant([1], 'int32', 0)
x = fluid.dygraph.to_variable(x)
for i in range(x.numpy()[0]):
z = z + i
if i > 2:
break
return z
# 7. for enumerate var.numpy() with break
@paddle.jit.to_static
def for_enumerate_var_numpy_with_break(x_array):
y = fluid.layers.fill_constant([1], 'int32', 0)
z = fluid.layers.fill_constant([1], 'int32', 0)
x_array = fluid.dygraph.to_variable(x_array)
for i, x in enumerate(x_array.numpy()):
y = y + i
z = z + x
if i > 2:
break
return y, z
# 8. for enumerate var.numpy() with continue
@paddle.jit.to_static
def for_enumerate_var_numpy_with_continue(x_array):
y = fluid.layers.fill_constant([1], 'int32', 0)
z = fluid.layers.fill_constant([1], 'int32', 0)
x_array = fluid.dygraph.to_variable(x_array)
for i, x in enumerate(x_array.numpy()):
y = y + i
if i > 2:
continue
z = z + x
return y, z
# 9. for enumerate var.numpy() with start & break
@paddle.jit.to_static
def for_enumerate_var_numpy_with_start_break(x_array):
y = fluid.layers.fill_constant([1], 'int32', 0)
z = fluid.layers.fill_constant([1], 'int32', 0)
x_array = fluid.dygraph.to_variable(x_array)
for i, x in enumerate(x_array.numpy(), 1):
y = y + i
z = z + x
if i > 2:
break
return y, z
# 10. for enumerate var.numpy() with start & continue
@paddle.jit.to_static
def for_enumerate_var_numpy_with_start_continue(x_array):
y = fluid.layers.fill_constant([1], 'int32', 0)
z = fluid.layers.fill_constant([1], 'int32', 0)
x_array = fluid.dygraph.to_variable(x_array)
for i, x in enumerate(x_array.numpy(), 1):
y = y + i
if i > 2:
continue
z = z + x
return y, z
# 11. for iter var
@paddle.jit.to_static
def for_iter_var(x_array):
z = fluid.layers.fill_constant([1], 'int32', 0)
x_array = fluid.dygraph.to_variable(x_array)
for x in x_array:
z = z + x
return z
# 12. for enumerate var
@paddle.jit.to_static
def for_enumerate_var(x_array):
y = fluid.layers.fill_constant([1], 'int32', 0)
z = fluid.layers.fill_constant([1], 'int32', 0)
x_array = fluid.dygraph.to_variable(x_array)
for i, x in enumerate(x_array):
y = y + i
z = z + x
return y, z
# 13. for iter list[var]
@paddle.jit.to_static
def for_iter_var_list(x):
# 1. prepare data, ref test_list.py
x = fluid.dygraph.to_variable(x)
iter_num = fluid.layers.fill_constant(shape=[1], value=5, dtype="int32")
a = []
for i in range(iter_num):
a.append(x + i)
# 2. iter list[var]
y = fluid.layers.fill_constant([1], 'int32', 0)
for x in a:
y = y + x
return y
# 14. for enumerate list[var]
@paddle.jit.to_static
def for_enumerate_var_list(x):
# 1. prepare data, ref test_list.py
x = fluid.dygraph.to_variable(x)
iter_num = fluid.layers.fill_constant(shape=[1], value=5, dtype="int32")
a = []
for i in range(iter_num):
a.append(x + i)
# 2. iter list[var]
y = fluid.layers.fill_constant([1], 'int32', 0)
z = fluid.layers.fill_constant([1], 'int32', 0)
for i, x in enumerate(a):
y = y + i
z = z + x
return y, z
# 15. for enumerate list[var] with a nested for range
@paddle.jit.to_static
def for_enumerate_var_with_nested_range(x_array):
x = fluid.layers.fill_constant([1], 'int32', 0)
x_array = fluid.dygraph.to_variable(x_array)
for i, num in enumerate(x_array):
for idx in range(num):
x = x + num
return x
# 16. for iter var[idx]
@paddle.jit.to_static
def for_iter_var_idx(x_array):
z = fluid.layers.fill_constant([1], 'int32', 0)
x_array = fluid.dygraph.to_variable(x_array)
for x in x_array[0:]:
z = z + x
return z
# 17. for a,b,c in z: (a, b, c) is a tuple
@paddle.jit.to_static
def for_tuple_as_iter_var(x_array):
x = paddle.to_tensor(x_array)
z = paddle.to_tensor(np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]))
a_result = paddle.zeros([3])
b_result = paddle.zeros([3])
c_result = paddle.zeros([3])
for a, b, c in z:
a_result += a
b_result += b
c_result += c
return a_result, b_result, c_result
# 18. for t in enumerate(collection): t is tuple of (idx, element)
@paddle.jit.to_static
def for_tuple_as_enumerate_iter(x_array):
x = paddle.to_tensor(x_array)
x_list = [x, x, x]
a_result = paddle.zeros([5])
for t in enumerate(x_list):
a_result += t[1]
return a_result
# 19. for i, (a, b, c, d, e) in enumerate(collection): (a, b, c, d, e) is a tuple
@paddle.jit.to_static
def for_tuple_as_enumerate_value(x_array):
x = paddle.to_tensor(x_array)
x_list = [x, x, x]
a_result = paddle.zeros([1])
b_result = paddle.zeros([1])
c_result = paddle.zeros([1])
d_result = paddle.zeros([1])
e_result = paddle.zeros([1])
for i, (a, b, c, d, e) in enumerate(x_list):
a_result += a
b_result += b
c_result += c
d_result += d
e_result += e
return a_result
# 20. test for function in a class
class ForwardContainsForLayer(paddle.nn.Layer):
def __init__(self):
super(ForwardContainsForLayer, self).__init__()
self.high = 5
self.low = 3
@paddle.jit.to_static
def forward(self, x):
# just for test case, x is useless in this method
y = paddle.zeros([10, 2, 3])
z = []
for i in range(self.high - self.low):
z.append(y[i].clone())
return z
class TestTransformBase(unittest.TestCase):
def setUp(self):
self.place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
self.set_input()
self.set_test_func()
def set_input(self):
self.input = [1, 2, 3]
def set_test_func(self):
raise NotImplementedError(
"For Enumerate test should implement set_test_func")
def _run(self, to_static):
program_translator.enable(to_static)
with fluid.dygraph.guard():
return self.dygraph_func(self.input)
def get_dygraph_output(self):
return self._run(to_static=False)
def get_static_output(self):
return self._run(to_static=True)
class TestTransform(TestTransformBase):
def transformed_result_compare(self):
dy_outs = self.get_dygraph_output()
if not isinstance(dy_outs, (tuple, list)):
dy_outs = (dy_outs, )
st_outs = self.get_static_output()
if not isinstance(st_outs, (tuple, list)):
st_outs = (st_outs, )
for x, y in zip(dy_outs, st_outs):
self.assertTrue(np.allclose(x.numpy(), y.numpy()))
class TestTransformError(TestTransformBase):
def transformed_error(self, etype):
with self.assertRaises(etype):
dy_out = self.get_dygraph_output()
st_out = self.get_static_output()
class TestForInRange(TestTransform):
def set_input(self):
self.input = np.array([5])
def set_test_func(self):
self.dygraph_func = for_in_range
def test_transformed_result_compare(self):
self.transformed_result_compare()
class TestForIterList(TestTransform):
def set_test_func(self):
self.dygraph_func = for_iter_list
def test_transformed_result_compare(self):
self.transformed_result_compare()
class TestForEnumerateSimple(TestForIterList):
def set_test_func(self):
self.dygraph_func = for_enumerate_list
class TestForInRangeWithBreak(TestForInRange):
def set_test_func(self):
self.dygraph_func = for_in_range_with_break
class TestForIterVarNumpy(TestTransform):
def set_input(self):
self.input = np.array([1, 2, 3, 4, 5])
def set_test_func(self):
self.dygraph_func = for_iter_var_numpy
def test_transformed_result_compare(self):
self.transformed_result_compare()
class TestForEnumerateVarNumpy(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_enumerate_var_numpy
class TestForEnumerateVarNumpyWithStart(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_enumerate_var_numpy_with_start
class TestForEnumerateVarNumpyWithBreak(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_enumerate_var_numpy_with_break
class TestForEnumerateVarNumpyWithBreak(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_enumerate_var_numpy_with_continue
class TestForEnumerateVarNumpyWithStartAndBreak(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_enumerate_var_numpy_with_start_break
class TestForEnumerateVarNumpyWithStartAndBreak(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_enumerate_var_numpy_with_start_continue
class TestForIterVar(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_iter_var
class TestForIterVarIdx(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_iter_var_idx
class TestForEnumerateVar(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_enumerate_var
class TestForEnumerateVarWithNestedRange(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_enumerate_var_with_nested_range
class TestForIterVarList(TestForInRange):
def set_test_func(self):
self.dygraph_func = for_iter_var_list
class TestForEnumerateVarList(TestForInRange):
def set_test_func(self):
self.dygraph_func = for_enumerate_var_list
class TestForTupleAsIterVar(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_tuple_as_iter_var
class TestForTupleAsEnumerateIter(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_tuple_as_enumerate_iter
class TestForTupleAsEnumerateValue(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_tuple_as_enumerate_value
class TestForwardContainsForLayer(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = ForwardContainsForLayer()
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
vijayendrabvs/hap | neutron/plugins/mlnx/agent/utils.py | 8 | 5692 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Mellanox Technologies, Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron.openstack.common import importutils
from neutron.openstack.common import jsonutils
from neutron.openstack.common import log as logging
from neutron.plugins.mlnx.common.comm_utils import RetryDecorator
from neutron.plugins.mlnx.common import exceptions
zmq = importutils.try_import('eventlet.green.zmq')
LOG = logging.getLogger(__name__)
class EswitchUtils(object):
def __init__(self, daemon_endpoint, timeout):
if not zmq:
msg = _("Failed to import eventlet.green.zmq. "
"Won't connect to eSwitchD - exiting...")
LOG.error(msg)
raise SystemExit(msg)
self.__conn = None
self.daemon = daemon_endpoint
self.timeout = timeout
@property
def _conn(self):
if self.__conn is None:
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.setsockopt(zmq.LINGER, 0)
socket.connect(self.daemon)
self.__conn = socket
self.poller = zmq.Poller()
self.poller.register(self._conn, zmq.POLLIN)
return self.__conn
@RetryDecorator(exceptions.RequestTimeout)
def send_msg(self, msg):
self._conn.send(msg)
socks = dict(self.poller.poll(self.timeout))
if socks.get(self._conn) == zmq.POLLIN:
recv_msg = self._conn.recv()
response = self.parse_response_msg(recv_msg)
return response
else:
self._conn.setsockopt(zmq.LINGER, 0)
self._conn.close()
self.poller.unregister(self._conn)
self.__conn = None
raise exceptions.RequestTimeout()
def parse_response_msg(self, recv_msg):
msg = jsonutils.loads(recv_msg)
if msg['status'] == 'OK':
if 'response' in msg:
return msg.get('response')
return
elif msg['status'] == 'FAIL':
msg_dict = dict(action=msg['action'], reason=msg['reason'])
error_msg = _("Action %(action)s failed: %(reason)s") % msg_dict
else:
error_msg = _("Unknown operation status %s") % msg['status']
LOG.error(error_msg)
raise exceptions.OperationFailed(err_msg=error_msg)
def get_attached_vnics(self):
LOG.debug(_("get_attached_vnics"))
msg = jsonutils.dumps({'action': 'get_vnics', 'fabric': '*'})
vnics = self.send_msg(msg)
return vnics
def set_port_vlan_id(self, physical_network,
segmentation_id, port_mac):
LOG.debug(_("Set Vlan %(segmentation_id)s on Port %(port_mac)s "
"on Fabric %(physical_network)s"),
{'port_mac': port_mac,
'segmentation_id': segmentation_id,
'physical_network': physical_network})
msg = jsonutils.dumps({'action': 'set_vlan',
'fabric': physical_network,
'port_mac': port_mac,
'vlan': segmentation_id})
self.send_msg(msg)
def define_fabric_mappings(self, interface_mapping):
for fabric, phy_interface in interface_mapping.iteritems():
LOG.debug(_("Define Fabric %(fabric)s on interface %(ifc)s"),
{'fabric': fabric,
'ifc': phy_interface})
msg = jsonutils.dumps({'action': 'define_fabric_mapping',
'fabric': fabric,
'interface': phy_interface})
self.send_msg(msg)
def port_up(self, fabric, port_mac):
LOG.debug(_("Port Up for %(port_mac)s on fabric %(fabric)s"),
{'port_mac': port_mac, 'fabric': fabric})
msg = jsonutils.dumps({'action': 'port_up',
'fabric': fabric,
'ref_by': 'mac_address',
'mac': 'port_mac'})
self.send_msg(msg)
def port_down(self, fabric, port_mac):
LOG.debug(_("Port Down for %(port_mac)s on fabric %(fabric)s"),
{'port_mac': port_mac, 'fabric': fabric})
msg = jsonutils.dumps({'action': 'port_down',
'fabric': fabric,
'ref_by': 'mac_address',
'mac': port_mac})
self.send_msg(msg)
def port_release(self, fabric, port_mac):
LOG.debug(_("Port Release for %(port_mac)s on fabric %(fabric)s"),
{'port_mac': port_mac, 'fabric': fabric})
msg = jsonutils.dumps({'action': 'port_release',
'fabric': fabric,
'ref_by': 'mac_address',
'mac': port_mac})
self.send_msg(msg)
def get_eswitch_ports(self, fabric):
# TODO(irena) - to implement for next phase
return {}
def get_eswitch_id(self, fabric):
# TODO(irena) - to implement for next phase
return ""
| apache-2.0 |
daviddupont69/CouchPotatoServer | libs/chardet/langhebrewmodel.py | 235 | 11340 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Simon Montagu
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Shoshannah Forbes - original C code (?)
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import constants
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Windows-1255 language model
# Character Mapping Table:
win1255_CharToOrderMap = ( \
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40
78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50
253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60
66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70
124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23,
12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 98.4004%
# first 1024 sequences: 1.5981%
# rest sequences: 0.087%
# negative sequences: 0.0015%
HebrewLangModel = ( \
0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
)
Win1255HebrewModel = { \
'charToOrderMap': win1255_CharToOrderMap,
'precedenceMatrix': HebrewLangModel,
'mTypicalPositiveRatio': 0.984004,
'keepEnglishLetter': constants.False,
'charsetName': "windows-1255"
}
| gpl-3.0 |
dulems/hue | desktop/core/ext-py/Babel-0.9.6/babel/messages/plurals.py | 67 | 7207 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
"""Plural form definitions."""
from babel.core import default_locale, Locale
from babel.util import itemgetter
LC_CTYPE = default_locale('LC_CTYPE')
PLURALS = {
# Afar
# 'aa': (),
# Abkhazian
# 'ab': (),
# Avestan
# 'ae': (),
# Afrikaans - From Pootle's PO's
'af': (2, '(n != 1)'),
# Akan
# 'ak': (),
# Amharic
# 'am': (),
# Aragonese
# 'an': (),
# Arabic - From Pootle's PO's
'ar': (6, '(n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n>=3 && n<=10 ? 3 : n>=11 && n<=99 ? 4 : 5)'),
# Assamese
# 'as': (),
# Avaric
# 'av': (),
# Aymara
# 'ay': (),
# Azerbaijani
# 'az': (),
# Bashkir
# 'ba': (),
# Belarusian
# 'be': (),
# Bulgarian - From Pootle's PO's
'bg': (2, '(n != 1)'),
# Bihari
# 'bh': (),
# Bislama
# 'bi': (),
# Bambara
# 'bm': (),
# Bengali - From Pootle's PO's
'bn': (2, '(n != 1)'),
# Tibetan - as discussed in private with Andrew West
'bo': (1, '0'),
# Breton
# 'br': (),
# Bosnian
# 'bs': (),
# Catalan - From Pootle's PO's
'ca': (2, '(n != 1)'),
# Chechen
# 'ce': (),
# Chamorro
# 'ch': (),
# Corsican
# 'co': (),
# Cree
# 'cr': (),
# Czech
'cs': (3, '(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)'),
# Church Slavic
# 'cu': (),
# Chuvash
'cv': (1, '0'),
# Welsh
'cy': (5, '(n==1 ? 1 : n==2 ? 2 : n==3 ? 3 : n==6 ? 4 : 0)'),
# Danish
'da': (2, '(n != 1)'),
# German
'de': (2, '(n != 1)'),
# Divehi
# 'dv': (),
# Dzongkha
'dz': (1, '0'),
# Greek
'el': (2, '(n != 1)'),
# English
'en': (2, '(n != 1)'),
# Esperanto
'eo': (2, '(n != 1)'),
# Spanish
'es': (2, '(n != 1)'),
# Estonian
'et': (2, '(n != 1)'),
# Basque - From Pootle's PO's
'eu': (2, '(n != 1)'),
# Persian - From Pootle's PO's
'fa': (1, '0'),
# Finnish
'fi': (2, '(n != 1)'),
# French
'fr': (2, '(n > 1)'),
# Friulian - From Pootle's PO's
'fur': (2, '(n > 1)'),
# Irish
'ga': (3, '(n==1 ? 0 : n==2 ? 1 : 2)'),
# Galician - From Pootle's PO's
'gl': (2, '(n != 1)'),
# Hausa - From Pootle's PO's
'ha': (2, '(n != 1)'),
# Hebrew
'he': (2, '(n != 1)'),
# Hindi - From Pootle's PO's
'hi': (2, '(n != 1)'),
# Croatian
'hr': (3, '(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)'),
# Hungarian
'hu': (1, '0'),
# Armenian - From Pootle's PO's
'hy': (1, '0'),
# Icelandic - From Pootle's PO's
'is': (2, '(n != 1)'),
# Italian
'it': (2, '(n != 1)'),
# Japanese
'ja': (1, '0'),
# Georgian - From Pootle's PO's
'ka': (1, '0'),
# Kongo - From Pootle's PO's
'kg': (2, '(n != 1)'),
# Khmer - From Pootle's PO's
'km': (1, '0'),
# Korean
'ko': (1, '0'),
# Kurdish - From Pootle's PO's
'ku': (2, '(n != 1)'),
# Lao - Another member of the Tai language family, like Thai.
'lo': (1, '0'),
# Lithuanian
'lt': (3, '(n%10==1 && n%100!=11 ? 0 : n%10>=2 && (n%100<10 || n%100>=20) ? 1 : 2)'),
# Latvian
'lv': (3, '(n%10==1 && n%100!=11 ? 0 : n != 0 ? 1 : 2)'),
# Maltese - From Pootle's PO's
'mt': (4, '(n==1 ? 0 : n==0 || ( n%100>1 && n%100<11) ? 1 : (n%100>10 && n%100<20 ) ? 2 : 3)'),
# Norwegian Bokmål
'nb': (2, '(n != 1)'),
# Dutch
'nl': (2, '(n != 1)'),
# Norwegian Nynorsk
'nn': (2, '(n != 1)'),
# Norwegian
'no': (2, '(n != 1)'),
# Punjabi - From Pootle's PO's
'pa': (2, '(n != 1)'),
# Polish
'pl': (3, '(n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)'),
# Portuguese
'pt': (2, '(n != 1)'),
# Brazilian
'pt_BR': (2, '(n > 1)'),
# Romanian - From Pootle's PO's
'ro': (3, '(n==1 ? 0 : (n==0 || (n%100 > 0 && n%100 < 20)) ? 1 : 2)'),
# Russian
'ru': (3, '(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)'),
# Slovak
'sk': (3, '(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)'),
# Slovenian
'sl': (4, '(n%100==1 ? 0 : n%100==2 ? 1 : n%100==3 || n%100==4 ? 2 : 3)'),
# Serbian - From Pootle's PO's
'sr': (3, '(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)'),
# Southern Sotho - From Pootle's PO's
'st': (2, '(n != 1)'),
# Swedish
'sv': (2, '(n != 1)'),
# Thai
'th': (1, '0'),
# Turkish
'tr': (1, '0'),
# Ukrainian
'uk': (3, '(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)'),
# Venda - From Pootle's PO's
've': (2, '(n != 1)'),
# Vietnamese - From Pootle's PO's
'vi': (1, '0'),
# Xhosa - From Pootle's PO's
'xh': (2, '(n != 1)'),
# Chinese - From Pootle's PO's
'zh_CN': (1, '0'),
'zh_HK': (1, '0'),
'zh_TW': (1, '0'),
}
DEFAULT_PLURAL = (2, '(n != 1)')
class _PluralTuple(tuple):
"""A tuple with plural information."""
__slots__ = ()
num_plurals = property(itemgetter(0), doc="""
The number of plurals used by the locale.""")
plural_expr = property(itemgetter(1), doc="""
The plural expression used by the locale.""")
plural_forms = property(lambda x: 'npurals=%s; plural=%s' % x, doc="""
The plural expression used by the catalog or locale.""")
def __str__(self):
return self.plural_forms
def get_plural(locale=LC_CTYPE):
"""A tuple with the information catalogs need to perform proper
pluralization. The first item of the tuple is the number of plural
forms, the second the plural expression.
>>> get_plural(locale='en')
(2, '(n != 1)')
>>> get_plural(locale='ga')
(3, '(n==1 ? 0 : n==2 ? 1 : 2)')
The object returned is a special tuple with additional members:
>>> tup = get_plural("ja")
>>> tup.num_plurals
1
>>> tup.plural_expr
'0'
>>> tup.plural_forms
'npurals=1; plural=0'
Converting the tuple into a string prints the plural forms for a
gettext catalog:
>>> str(tup)
'npurals=1; plural=0'
"""
locale = Locale.parse(locale)
try:
tup = PLURALS[str(locale)]
except KeyError:
try:
tup = PLURALS[locale.language]
except KeyError:
tup = DEFAULT_PLURAL
return _PluralTuple(tup)
| apache-2.0 |
internaphosting/fake-switches | fake_switches/brocade/command_processor/enabled.py | 4 | 18515 | # Copyright 2015-2016 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from fake_switches import group_sequences
from fake_switches.brocade.command_processor import explain_missing_port
from fake_switches.command_processing.base_command_processor import BaseCommandProcessor
from fake_switches.command_processing.switch_tftp_parser import SwitchTftpParser
from fake_switches.switch_configuration import split_port_name, VlanPort
class EnabledCommandProcessor(BaseCommandProcessor):
def __init__(self, config):
super(EnabledCommandProcessor, self).__init__()
self.config_processor = config
def get_prompt(self):
return "SSH@%s#" % self.switch_configuration.name
def do_configure(self, *_):
self.move_to(self.config_processor)
def do_show(self, *args):
if "running-config".startswith(args[0]):
if "vlan".startswith(args[1]):
self.show_run_vlan()
if "interface".startswith(args[1]):
self.show_run_int(args)
elif "interfaces".startswith(args[0]):
self.show_int(args)
elif "vlan".startswith(args[0]):
if args[1].isdigit():
self._show_vlan(int(args[1]))
elif "brief".startswith(args[1]):
self.show_vlan_brief()
elif "ethernet".startswith(args[1]):
self.show_vlan_int(args)
else:
self.write_line("Invalid input -> %s" % args[1])
self.write_line("Type ? for a list")
elif "ip".startswith(args[0]) and "route".startswith(args[1]) and "static".startswith(args[2]):
routes = self.switch_configuration.static_routes
if routes:
self.write_line(" Destination Gateway Port Cost Type Uptime src-vrf")
for n, route in enumerate(routes):
self.write_line("{index:<8}{destination:<18} {next_hop:}".format(index=n+1, destination=str(route.dest), next_hop=str(route.next_hop)))
self.write_line("")
elif "version".startswith(args[0]):
self.show_version()
def do_ncopy(self, protocol, url, filename, target):
try:
SwitchTftpParser(self.switch_configuration).parse(url, filename, self.config_processor)
self.write_line("done")
except Exception as e:
self.logger.warning("tftp parsing went wrong : %s" % str(e))
self.write_line("%s: Download to %s failed - Session timed out" % (protocol.upper(), target))
def do_skip_page_display(self, *args):
pass
def do_write(self, *args):
self.switch_configuration.commit()
def do_exit(self):
self.is_done = True
def show_run_vlan(self):
self.write_line("spanning-tree")
self.write_line("!")
self.write_line("!")
for vlan in sorted(self.switch_configuration.vlans, key=lambda v: v.number):
if vlan_name(vlan):
self.write_line("vlan %d name %s" % (vlan.number, vlan_name(vlan)))
else:
self.write_line("vlan %d" % vlan.number)
untagged_ports = []
for port in self.switch_configuration.ports:
if not isinstance(port, VlanPort):
if vlan.number == 1 and port.access_vlan is None and port.trunk_native_vlan is None:
untagged_ports.append(port)
elif port.access_vlan == vlan.number or port.trunk_native_vlan == vlan.number:
untagged_ports.append(port)
if len(untagged_ports) > 0:
if vlan.number == 1:
self.write_line(" no untagged %s" % to_port_ranges(untagged_ports))
else:
self.write_line(" untagged %s" % to_port_ranges(untagged_ports))
tagged_ports = [p for p in self.switch_configuration.ports if
p.trunk_vlans and vlan.number in p.trunk_vlans]
if tagged_ports:
self.write_line(" tagged %s" % to_port_ranges(tagged_ports))
vif = self.get_interface_vlan_for(vlan)
if vif is not None:
self.write_line(" router-interface %s" % vif.name)
self.write_line("!")
self.write_line("!")
self.write_line("")
def show_run_int(self, args):
port_list = []
if len(args) < 3:
port_list = sorted(self.switch_configuration.ports, key=lambda e: ("a" if not isinstance(e, VlanPort) else "b") + e.name)
else:
if "ve".startswith(args[2]):
port = self.switch_configuration.get_port_by_partial_name(" ".join(args[2:]))
if not port:
self.write_line("Error - %s was not configured" % " ".join(args[2:]))
else:
port_list = [port]
else:
port_type, port_number = split_port_name("".join(args[2:]))
port = self.switch_configuration.get_port_by_partial_name(port_number)
if not port:
self.write_line("")
else:
port_list = [port]
if len(port_list) > 0:
for port in port_list:
attributes = get_port_attributes(port)
if len(attributes) > 0 or isinstance(port, VlanPort):
self.write_line("interface %s" % port.name)
for a in attributes:
self.write_line(" " + a)
self.write_line("!")
self.write_line("")
def show_int(self, args):
ports = []
port_name = " ".join(args[1:])
if len(args) > 1:
port = self.switch_configuration.get_port_by_partial_name(port_name)
if port:
ports.append(port)
else:
ports = self.switch_configuration.ports
if not ports:
[self.write_line(l) for l in explain_missing_port(port_name)]
for port in ports:
if isinstance(port, VlanPort):
_, port_id = split_port_name(port.name)
self.write_line("Ve%s is down, line protocol is down" % port_id)
self.write_line(" Hardware is Virtual Ethernet, address is 0000.0000.0000 (bia 0000.0000.0000)")
if port.description:
self.write_line(" Port name is %s" % port.description)
else:
self.write_line(" No port name")
self.write_line(" Vlan id: %s" % port.vlan_id)
self.write_line(" Internet address is %s, IP MTU 1500 bytes, encapsulation ethernet" % (
port.ips[0] if port.ips else "0.0.0.0/0"))
else:
_, port_id = split_port_name(port.name)
self.write_line("GigabitEthernet%s is %s, line protocol is down" % (
port_id, "down" if port.shutdown is False else "disabled"))
self.write_line(" Hardware is GigabitEthernet, address is 0000.0000.0000 (bia 0000.0000.0000)")
self.write_line(" " + ", ".join([vlan_membership(port), port_mode(port), port_state(port)]))
if port.description:
self.write_line(" Port name is %s" % port.description)
else:
self.write_line(" No port name")
def show_vlan_brief(self):
self.write_line("")
self.write_line("VLAN Name Encap ESI Ve Pri Ports")
self.write_line("---- ---- ----- --- ----- --- -----")
for vlan in sorted(self.switch_configuration.vlans, key=lambda v: v.number):
ports = [port for port in self.switch_configuration.ports
if port.access_vlan == vlan.number or (port.access_vlan is None and vlan.number == 1)]
self.write_line("%-4s %-10s - -%s" % (
vlan.number,
vlan_name(vlan)[:10] if vlan_name(vlan) else "[None]",
(" Untagged Ports : %s" % to_port_ranges(ports)) if ports else ""
))
def show_vlan_int(self, args):
port = self.switch_configuration.get_port_by_partial_name(" ".join(args[1:]))
if port:
untagged_vlan = port.access_vlan or (port.trunk_native_vlan if port.trunk_native_vlan != 1 else None)
if untagged_vlan is None and port.trunk_vlans is None:
self.write_line("VLAN: 1 Untagged")
else:
for vlan in sorted(port.trunk_vlans or []):
if untagged_vlan is not None and untagged_vlan < vlan:
self.write_line("VLAN: %s Untagged" % untagged_vlan)
untagged_vlan = None
self.write_line("VLAN: %s Tagged" % vlan)
if untagged_vlan is not None:
self.write_line("VLAN: %s Untagged" % untagged_vlan)
else:
self.write_line("Invalid input -> %s" % args[2])
self.write_line("Type ? for a list")
def _show_vlan(self, vlan_id):
vlan = self.switch_configuration.get_vlan(vlan_id)
if vlan is None:
self.write_line("Error: vlan {} is not configured".format(vlan_id))
else:
vif = self.get_interface_vlan_for(vlan)
ports = self.get_interface_ports_for(vlan)
self.write_line("")
self.write_line("PORT-VLAN {}, Name {}, Priority Level -, Priority Force 0, Creation Type STATIC".format(
vlan_id, vlan.name if vlan.name is not None else "[None]"))
self.write_line("Topo HW idx : 81 Topo SW idx: 257 Topo next vlan: 0")
self.write_line("L2 protocols : STP")
if len(ports["tagged"]) > 0:
self.write_line("Statically tagged Ports : {}".format(to_port_ranges(ports["tagged"])))
if len(ports["untagged"]) > 0:
self.write_line("Untagged Ports : {}".format(to_port_ranges(ports["untagged"])))
self.write_line("Associated Virtual Interface Id: {}".format(
"NONE" if vif is None else vif.name.split(" ")[-1]))
self.write_line("----------------------------------------------------------")
if len(ports["untagged"]) == 0 and len(ports["tagged"]) == 0:
self.write_line("No ports associated with VLAN")
else:
self.write_line("Port Type Tag-Mode Protocol State")
for port in ports["untagged"]:
self.write_line("{} PHYSICAL UNTAGGED STP DISABLED".format(split_port_name(port.name)[1]))
for port in ports["tagged"]:
self.write_line("{} PHYSICAL TAGGED STP DISABLED".format(split_port_name(port.name)[1]))
self.write_line("Arp Inspection: 0")
self.write_line("DHCP Snooping: 0")
self.write_line("IPv4 Multicast Snooping: Disabled")
self.write_line("IPv6 Multicast Snooping: Disabled")
self.write_line("")
if vif is None:
self.write_line("No Virtual Interfaces configured for this vlan")
else:
self.write_line("Ve{} is down, line protocol is down".format(vif.name.split(" ")[-1]))
self.write_line(" Type is Vlan (Vlan Id: {})".format(vlan_id))
self.write_line(" Hardware is Virtual Ethernet, address is 748e.f8a7.1b01 (bia 748e.f8a7.1b01)")
self.write_line(" No port name")
self.write_line(" Vlan id: {}".format(vlan_id))
self.write_line(" Internet address is 0.0.0.0/0, IP MTU 1500 bytes, encapsulation ethernet")
self.write_line(" Configured BW 0 kbps")
def get_interface_vlan_for(self, vlan):
return next((p for p in self.switch_configuration.ports
if isinstance(p, VlanPort) and p.vlan_id == vlan.number),
None)
def show_version(self):
self.write_line("System: NetIron CER (Serial #: 1P2539K036, Part #: 40-1000617-02)")
self.write_line("License: RT_SCALE, ADV_SVCS_PREM (LID: XXXXXXXXXX)")
self.write_line("Boot : Version 5.8.0T185 Copyright (c) 1996-2014 Brocade Communications Systems, Inc.")
self.write_line("Compiled on May 18 2015 at 13:03:00 labeled as ceb05800")
self.write_line(" (463847 bytes) from boot flash")
self.write_line("Monitor : Version 5.8.0T185 Copyright (c) 1996-2014 Brocade Communications Systems, Inc.")
self.write_line("Compiled on May 18 2015 at 13:03:00 labeled as ceb05800")
self.write_line(" (463847 bytes) from code flash")
self.write_line("IronWare : Version 5.8.0bT183 Copyright (c) 1996-2014 Brocade Communications Systems, Inc.")
self.write_line("Compiled on May 21 2015 at 09:20:22 labeled as ce05800b")
self.write_line(" (17563175 bytes) from Primary")
self.write_line("CPLD Version: 0x00000010")
self.write_line("Micro-Controller Version: 0x0000000d")
self.write_line("Extended route scalability")
self.write_line("PBIF Version: 0x0162")
self.write_line("800 MHz Power PC processor 8544 (version 8021/0023) 400 MHz bus")
self.write_line("512 KB Boot Flash (MX29LV040C), 64 MB Code Flash (MT28F256J3)")
self.write_line("2048 MB DRAM")
self.write_line("System uptime is 109 days 4 hours 39 minutes 4 seconds")
def get_interface_ports_for(self, vlan):
vlan_ports = {"tagged": [], "untagged": []}
for port in self.switch_configuration.ports:
if not isinstance(port, VlanPort):
if port.access_vlan == vlan.number or port.trunk_native_vlan == vlan.number:
vlan_ports["untagged"].append(port)
elif port.trunk_vlans and vlan.number in port.trunk_vlans:
vlan_ports["tagged"].append(port)
return vlan_ports
def port_index(port):
return int(re.match(".*(\d)$", port.name).groups()[0])
def to_port_ranges(ports):
port_range_list = group_sequences(ports, are_in_sequence=lambda a, b: port_index(a) + 1 == port_index(b))
out = []
for port_range in port_range_list:
if len(port_range) == 1:
out.append("ethe %s" % split_port_name(port_range[0].name)[1])
else:
out.append("ethe %s to %s" %
(split_port_name(port_range[0].name)[1], split_port_name(port_range[-1].name)[1]))
out_str = " ".join(out)
return out_str
def vlan_name(vlan):
return vlan.name or ("DEFAULT-VLAN" if vlan.number == 1 else None)
def get_port_attributes(port):
attributes = []
if port.description:
attributes.append("port-name %s" % port.description)
if port.shutdown is False:
attributes.append("enable")
if port.vrf is not None:
attributes.append("vrf forwarding {}".format(port.vrf.name))
if isinstance(port, VlanPort):
for ip in sorted(port.ips, key=lambda e: e.ip):
attributes.append("ip address %s" % ip)
for ip in sorted(port.secondary_ips, key=lambda e: e.ip):
attributes.append("ip address %s secondary" % ip)
if port.access_group_in:
attributes.append("ip access-group %s in" % port.access_group_in)
if port.access_group_out:
attributes.append("ip access-group %s out" % port.access_group_out)
if port.vrrp_common_authentication:
attributes.append("ip vrrp-extended auth-type simple-text-auth ********")
for vrrp in port.vrrps:
attributes.append("ip vrrp-extended vrid %s" % vrrp.group_id)
if vrrp.priority and len(vrrp.track) > 0:
attributes.append(" backup priority %s track-priority %s" % (vrrp.priority, list(vrrp.track.values())[0]))
if vrrp.ip_addresses:
for ip_address in vrrp.ip_addresses:
attributes.append(" ip-address %s" % ip_address)
if vrrp.advertising:
attributes.append(" advertise backup")
if vrrp.timers_hold:
attributes.append(" dead-interval %s" % vrrp.timers_hold)
if vrrp.timers_hello:
attributes.append(" hello-interval %s" % vrrp.timers_hello)
if len(vrrp.track) > 0 and list(vrrp.track.keys())[0] is not None:
attributes.append(" track-port %s" % list(vrrp.track.keys())[0])
if vrrp.activated:
attributes.append(" activate")
else:
attributes.append(" exit")
for ip_address in port.ip_helpers:
attributes.append("ip helper-address %s" % ip_address)
if port.ip_redirect is False:
attributes.append("no ip redirect")
return attributes
def vlan_membership(port):
if port.access_vlan:
return "Member of VLAN %s (untagged)" % port.access_vlan
elif port.trunk_vlans and port.trunk_native_vlan:
return "Member of VLAN %s (untagged), %d L2 VLANS (tagged)" % (port.trunk_native_vlan, len(port.trunk_vlans))
elif port.trunk_vlans and not port.trunk_native_vlan:
return "Member of %d L2 VLAN(S) (tagged)" % len(port.trunk_vlans)
else:
return "Member of VLAN 1 (untagged)"
def port_mode(port):
if port.trunk_vlans and port.trunk_native_vlan == 1:
return "port is in dual mode (default vlan)"
elif port.trunk_vlans and port.trunk_native_vlan:
return "port is in dual mode"
elif port.access_vlan or port.trunk_vlans is None:
return "port is in untagged mode"
else:
return "port is in tagged mode"
def port_state(_):
return "port state is Disabled"
| apache-2.0 |
pczerkas/tempest | tempest/services/messaging/json/messaging_client.py | 17 | 6586 | # Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from oslo_serialization import jsonutils as json
from six.moves.urllib import parse as urllib
from tempest.api_schema.response.messaging.v1 import queues as queues_schema
from tempest.common import service_client
class MessagingClient(service_client.ServiceClient):
def __init__(self, auth_provider, service, region,
endpoint_type=None, build_interval=None, build_timeout=None,
disable_ssl_certificate_validation=None, ca_certs=None,
trace_requests=None):
dscv = disable_ssl_certificate_validation
super(MessagingClient, self).__init__(
auth_provider, service, region,
endpoint_type=endpoint_type,
build_interval=build_interval,
build_timeout=build_timeout,
disable_ssl_certificate_validation=dscv,
ca_certs=ca_certs,
trace_requests=trace_requests)
self.version = '1'
self.uri_prefix = 'v{0}'.format(self.version)
client_id = uuid.uuid4().hex
self.headers = {'Client-ID': client_id}
def list_queues(self):
uri = '{0}/queues'.format(self.uri_prefix)
resp, body = self.get(uri)
if resp['status'] != '204':
body = json.loads(body)
self.validate_response(queues_schema.list_queues, resp, body)
return resp, body
def create_queue(self, queue_name):
uri = '{0}/queues/{1}'.format(self.uri_prefix, queue_name)
resp, body = self.put(uri, body=None)
self.expected_success(201, resp.status)
return resp, body
def show_queue(self, queue_name):
uri = '{0}/queues/{1}'.format(self.uri_prefix, queue_name)
resp, body = self.get(uri)
self.expected_success(204, resp.status)
return resp, body
def head_queue(self, queue_name):
uri = '{0}/queues/{1}'.format(self.uri_prefix, queue_name)
resp, body = self.head(uri)
self.expected_success(204, resp.status)
return resp, body
def delete_queue(self, queue_name):
uri = '{0}/queues/{1}'.format(self.uri_prefix, queue_name)
resp, body = self.delete(uri)
self.expected_success(204, resp.status)
return resp, body
def show_queue_stats(self, queue_name):
uri = '{0}/queues/{1}/stats'.format(self.uri_prefix, queue_name)
resp, body = self.get(uri)
body = json.loads(body)
self.validate_response(queues_schema.queue_stats, resp, body)
return resp, body
def show_queue_metadata(self, queue_name):
uri = '{0}/queues/{1}/metadata'.format(self.uri_prefix, queue_name)
resp, body = self.get(uri)
self.expected_success(200, resp.status)
body = json.loads(body)
return resp, body
def set_queue_metadata(self, queue_name, rbody):
uri = '{0}/queues/{1}/metadata'.format(self.uri_prefix, queue_name)
resp, body = self.put(uri, body=json.dumps(rbody))
self.expected_success(204, resp.status)
return resp, body
def post_messages(self, queue_name, rbody):
uri = '{0}/queues/{1}/messages'.format(self.uri_prefix, queue_name)
resp, body = self.post(uri, body=json.dumps(rbody),
extra_headers=True,
headers=self.headers)
body = json.loads(body)
self.validate_response(queues_schema.post_messages, resp, body)
return resp, body
def list_messages(self, queue_name):
uri = '{0}/queues/{1}/messages?echo=True'.format(self.uri_prefix,
queue_name)
resp, body = self.get(uri, extra_headers=True, headers=self.headers)
if resp['status'] != '204':
body = json.loads(body)
self.validate_response(queues_schema.list_messages, resp, body)
return resp, body
def show_single_message(self, message_uri):
resp, body = self.get(message_uri, extra_headers=True,
headers=self.headers)
if resp['status'] != '204':
body = json.loads(body)
self.validate_response(queues_schema.get_single_message, resp,
body)
return resp, body
def show_multiple_messages(self, message_uri):
resp, body = self.get(message_uri, extra_headers=True,
headers=self.headers)
if resp['status'] != '204':
body = json.loads(body)
self.validate_response(queues_schema.get_multiple_messages,
resp,
body)
return resp, body
def delete_messages(self, message_uri):
resp, body = self.delete(message_uri)
self.expected_success(204, resp.status)
return resp, body
def post_claims(self, queue_name, rbody, url_params=False):
uri = '{0}/queues/{1}/claims'.format(self.uri_prefix, queue_name)
if url_params:
uri += '?%s' % urllib.urlencode(url_params)
resp, body = self.post(uri, body=json.dumps(rbody),
extra_headers=True,
headers=self.headers)
body = json.loads(body)
self.validate_response(queues_schema.claim_messages, resp, body)
return resp, body
def query_claim(self, claim_uri):
resp, body = self.get(claim_uri)
if resp['status'] != '204':
body = json.loads(body)
self.validate_response(queues_schema.query_claim, resp, body)
return resp, body
def update_claim(self, claim_uri, rbody):
resp, body = self.patch(claim_uri, body=json.dumps(rbody))
self.expected_success(204, resp.status)
return resp, body
def release_claim(self, claim_uri):
resp, body = self.delete(claim_uri)
self.expected_success(204, resp.status)
return resp, body
| apache-2.0 |
hmen89/odoo | addons/account_asset/__openerp__.py | 122 | 2168 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Assets Management',
'version': '1.0',
'depends': ['account'],
'author': 'OpenERP S.A.',
'description': """
Financial and accounting asset management.
==========================================
This Module manages the assets owned by a company or an individual. It will keep
track of depreciation's occurred on those assets. And it allows to create Move's
of the depreciation lines.
""",
'website': 'http://www.openerp.com',
'category': 'Accounting & Finance',
'sequence': 32,
'demo': [ 'account_asset_demo.xml'],
'test': [
'test/account_asset_demo.yml',
'test/account_asset.yml',
'test/account_asset_wizard.yml',
],
'data': [
'security/account_asset_security.xml',
'security/ir.model.access.csv',
'wizard/account_asset_change_duration_view.xml',
'wizard/wizard_asset_compute_view.xml',
'account_asset_view.xml',
'account_asset_invoice_view.xml',
'report/account_asset_report_view.xml',
],
'auto_install': False,
'installable': True,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
MicroMagnum/MicroMagnum | src/magnum/micromagnetics/world/body.py | 1 | 1780 | # Copyright 2012, 2013 by the Micromagnum authors.
#
# This file is part of MicroMagnum.
#
# MicroMagnum is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MicroMagnum is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MicroMagnum. If not, see <http://www.gnu.org/licenses/>.
from .material import Material
from .shape import Shape, Everywhere
class Body(object):
"""
A Body object consists of a name (the id), a Shape and a Material. It
specifies the material parameters of a region inside the simulation
volume.
"""
def __init__(self, id, material, shape = None):
"""
Create a body object with an ID, a material, and a shape. If no
shape is given, the Everywhere() shape, which encompasses the whole
simulation volume, is used as a default.
"""
self.__id = id
self.__material = material
self.__shape = shape or Everywhere()
assert isinstance(self.id, str)
assert isinstance(self.material, Material)
assert isinstance(self.shape, Shape)
material = property(lambda self: self.__material)
shape = property(lambda self: self.__shape)
id = property(lambda self: self.__id)
def __repr__(self):
return "Body(" + repr(self.id) + ", " + repr(self.material) + ", " + repr(self.shape) + ")"
| gpl-3.0 |
blancha/abcngspipelines | sra/getsra.py | 1 | 2427 | #!/usr/bin/env python3
# Author Alexis Blanchet-Cohen
# Date: 09/06/2014
import argparse
import os
import os.path
import pandas
import subprocess
import util
# Read the command line arguments.
parser = argparse.ArgumentParser(description="Generates scripts to download SRA files.")
parser.add_argument("-s", "--scriptsDirectory", help="Scripts directory. DEFAULT=.", default=".")
parser.add_argument("-i", "--samplesFile", help="Input file with names of SRA runs. DEFAULT=.", default="./SraRunTable.txt")
parser.add_argument("-o", "--outputDirectory", help="Output directory with SRA files. DEFAULT=.", default=".")
parser.add_argument("-q", "--submitJobsToQueue", help="Submit jobs to queue immediately.", choices=["yes", "no", "y", "n"], default="no")
args = parser.parse_args()
# If not in the main scripts directory, cd to the main scripts directory, if it exists.
#util.cdMainScriptsDirectory()
# Process the command line arguments.
scriptsDirectory = os.path.abspath(args.scriptsDirectory)
samplesFile = os.path.abspath(args.samplesFile)
outputDirectory = os.path.abspath(args.outputDirectory)
# Check if the samplesFile exists, and is a file.
if not(os.path.exists(samplesFile) and os.path.isfile(samplesFile)):
exit(samplesFile + " does not exist or is not a file.")
# Read configuration files
config = util.readConfigurationFiles()
header = config.getboolean("server", "PBS_header")
# Read input file.
samplesFile = pandas.read_csv(samplesFile, sep="\t")
# Create scripts directory, if it does not exist yet, and cd to it.
if not os.path.exists(scriptsDirectory):
os.mkdir(scriptsDirectory)
os.chdir(scriptsDirectory)
# Create output directory, if it does not exist yet.
if not os.path.exists(outputDirectory):
os.makedirs(outputDirectory)
# Cycle through all the samples and write the star scripts.
for index, row in samplesFile.iterrows():
run = row["Run_s"]
# Create script file.
scriptName = "getsra_" + run + ".sh"
script = open(scriptName, 'w')
if header:
util.writeHeader(script, config, "getsra")
script.write("wget" + " \\\n")
script.write("ftp://ftp.ncbi.nih.gov/sra/sra-instant/reads/ByRun/sra/SRR/" + os.path.join(run[0:6], run, run + ".sra") + " \\\n")
script.write("&> " + scriptName + ".log")
if (args.submitJobsToQueue.lower() == "yes") | (args.submitJobsToQueue.lower() == "y"):
subprocess.call("submitJobs.py", shell=True)
| gpl-3.0 |
fyfcauc/android_external_chromium-org | net/tools/testserver/asn1.py | 180 | 3751 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file implements very minimal ASN.1, DER serialization.
import types
def ToDER(obj):
'''ToDER converts the given object into DER encoding'''
if type(obj) == types.NoneType:
# None turns into NULL
return TagAndLength(5, 0)
if type(obj) == types.StringType:
# Strings are PRINTABLESTRING
return TagAndLength(19, len(obj)) + obj
if type(obj) == types.BooleanType:
val = "\x00"
if obj:
val = "\xff"
return TagAndLength(1, 1) + val
if type(obj) == types.IntType or type(obj) == types.LongType:
big_endian = []
val = obj
while val != 0:
big_endian.append(val & 0xff)
val >>= 8
if len(big_endian) == 0 or big_endian[-1] >= 128:
big_endian.append(0)
big_endian.reverse()
return TagAndLength(2, len(big_endian)) + ToBytes(big_endian)
return obj.ToDER()
def ToBytes(array_of_bytes):
'''ToBytes converts the array of byte values into a binary string'''
return ''.join([chr(x) for x in array_of_bytes])
def TagAndLength(tag, length):
der = [tag]
if length < 128:
der.append(length)
elif length < 256:
der.append(0x81)
der.append(length)
elif length < 65535:
der.append(0x82)
der.append(length >> 8)
der.append(length & 0xff)
else:
assert False
return ToBytes(der)
class Raw(object):
'''Raw contains raw DER encoded bytes that are used verbatim'''
def __init__(self, der):
self.der = der
def ToDER(self):
return self.der
class Explicit(object):
'''Explicit prepends an explicit tag'''
def __init__(self, tag, child):
self.tag = tag
self.child = child
def ToDER(self):
der = ToDER(self.child)
tag = self.tag
tag |= 0x80 # content specific
tag |= 0x20 # complex
return TagAndLength(tag, len(der)) + der
class ENUMERATED(object):
def __init__(self, value):
self.value = value
def ToDER(self):
return TagAndLength(10, 1) + chr(self.value)
class SEQUENCE(object):
def __init__(self, children):
self.children = children
def ToDER(self):
der = ''.join([ToDER(x) for x in self.children])
return TagAndLength(0x30, len(der)) + der
class SET(object):
def __init__(self, children):
self.children = children
def ToDER(self):
der = ''.join([ToDER(x) for x in self.children])
return TagAndLength(0x31, len(der)) + der
class OCTETSTRING(object):
def __init__(self, val):
self.val = val
def ToDER(self):
return TagAndLength(4, len(self.val)) + self.val
class OID(object):
def __init__(self, parts):
self.parts = parts
def ToDER(self):
if len(self.parts) < 2 or self.parts[0] > 6 or self.parts[1] >= 40:
assert False
der = [self.parts[0]*40 + self.parts[1]]
for x in self.parts[2:]:
if x == 0:
der.append(0)
else:
octets = []
while x != 0:
v = x & 0x7f
if len(octets) > 0:
v |= 0x80
octets.append(v)
x >>= 7
octets.reverse()
der = der + octets
return TagAndLength(6, len(der)) + ToBytes(der)
class UTCTime(object):
def __init__(self, time_str):
self.time_str = time_str
def ToDER(self):
return TagAndLength(23, len(self.time_str)) + self.time_str
class GeneralizedTime(object):
def __init__(self, time_str):
self.time_str = time_str
def ToDER(self):
return TagAndLength(24, len(self.time_str)) + self.time_str
class BitString(object):
def __init__(self, bits):
self.bits = bits
def ToDER(self):
return TagAndLength(3, 1 + len(self.bits)) + "\x00" + self.bits
| bsd-3-clause |
BrotherPhil/django | django/contrib/postgres/fields/jsonb.py | 341 | 2994 | import json
from psycopg2.extras import Json
from django.contrib.postgres import forms, lookups
from django.core import exceptions
from django.db.models import Field, Transform
from django.utils.translation import ugettext_lazy as _
__all__ = ['JSONField']
class JSONField(Field):
empty_strings_allowed = False
description = _('A JSON object')
default_error_messages = {
'invalid': _("Value must be valid JSON."),
}
def db_type(self, connection):
return 'jsonb'
def get_transform(self, name):
transform = super(JSONField, self).get_transform(name)
if transform:
return transform
return KeyTransformFactory(name)
def get_prep_value(self, value):
if value is not None:
return Json(value)
return value
def get_prep_lookup(self, lookup_type, value):
if lookup_type in ('has_key', 'has_keys', 'has_any_keys'):
return value
if isinstance(value, (dict, list)):
return Json(value)
return super(JSONField, self).get_prep_lookup(lookup_type, value)
def validate(self, value, model_instance):
super(JSONField, self).validate(value, model_instance)
try:
json.dumps(value)
except TypeError:
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def value_to_string(self, obj):
value = self.value_from_object(obj)
return value
def formfield(self, **kwargs):
defaults = {'form_class': forms.JSONField}
defaults.update(kwargs)
return super(JSONField, self).formfield(**defaults)
JSONField.register_lookup(lookups.DataContains)
JSONField.register_lookup(lookups.ContainedBy)
JSONField.register_lookup(lookups.HasKey)
JSONField.register_lookup(lookups.HasKeys)
JSONField.register_lookup(lookups.HasAnyKeys)
class KeyTransform(Transform):
def __init__(self, key_name, *args, **kwargs):
super(KeyTransform, self).__init__(*args, **kwargs)
self.key_name = key_name
def as_sql(self, compiler, connection):
key_transforms = [self.key_name]
previous = self.lhs
while isinstance(previous, KeyTransform):
key_transforms.insert(0, previous.key_name)
previous = previous.lhs
lhs, params = compiler.compile(previous)
if len(key_transforms) > 1:
return "{} #> %s".format(lhs), [key_transforms] + params
try:
int(self.key_name)
except ValueError:
lookup = "'%s'" % self.key_name
else:
lookup = "%s" % self.key_name
return "%s -> %s" % (lhs, lookup), params
class KeyTransformFactory(object):
def __init__(self, key_name):
self.key_name = key_name
def __call__(self, *args, **kwargs):
return KeyTransform(self.key_name, *args, **kwargs)
| bsd-3-clause |
mosbasik/buzhug | javasrc/lib/Jython/Lib/test/test_coercion.py | 19 | 11217 | import copy
import sys
import warnings
import unittest
from test.test_support import run_unittest, TestFailed
# Fake a number that implements numeric methods through __coerce__
class CoerceNumber:
def __init__(self, arg):
self.arg = arg
def __repr__(self):
return '<CoerceNumber %s>' % repr(self.arg)
def __coerce__(self, other):
if isinstance(other, CoerceNumber):
return self.arg, other.arg
else:
return (self.arg, other)
# New-style class version of CoerceNumber
class CoerceTo(object):
def __init__(self, arg):
self.arg = arg
def __coerce__(self, other):
if isinstance(other, CoerceTo):
return self.arg, other.arg
else:
return self.arg, other
# Fake a number that implements numeric ops through methods.
class MethodNumber:
def __init__(self,arg):
self.arg = arg
def __repr__(self):
return '<MethodNumber %s>' % repr(self.arg)
def __add__(self,other):
return self.arg + other
def __radd__(self,other):
return other + self.arg
def __sub__(self,other):
return self.arg - other
def __rsub__(self,other):
return other - self.arg
def __mul__(self,other):
return self.arg * other
def __rmul__(self,other):
return other * self.arg
def __div__(self,other):
return self.arg / other
def __rdiv__(self,other):
return other / self.arg
def __truediv__(self,other):
return self.arg / other
def __rtruediv__(self,other):
return other / self.arg
def __floordiv__(self,other):
return self.arg // other
def __rfloordiv__(self,other):
return other // self.arg
def __pow__(self,other):
return self.arg ** other
def __rpow__(self,other):
return other ** self.arg
def __mod__(self,other):
return self.arg % other
def __rmod__(self,other):
return other % self.arg
def __cmp__(self, other):
return cmp(self.arg, other)
candidates = [2, 2L, 4.0, 2+0j, [1], (2,), None,
MethodNumber(2), CoerceNumber(2)]
infix_binops = [ '+', '-', '*', '**', '%', '//', '/' ]
TE = TypeError
# b = both normal and augmented give same result list
# s = single result lists for normal and augmented
# e = equals other results
# result lists: ['+', '-', '*', '**', '%', '//', ('classic /', 'new /')]
# ^^^^^^^^^^^^^^^^^^^^^^
# 2-tuple if results differ
# else only one value
infix_results = {
# 2
(0,0): ('b', [4, 0, 4, 4, 0, 1, (1, 1.0)]),
(0,1): ('e', (0,0)),
(0,2): ('b', [6.0, -2.0, 8.0, 16.0, 2.0, 0.0, 0.5]),
(0,3): ('b', [4+0j, 0+0j, 4+0j, 4+0j, 0+0j, 1+0j, 1+0j]),
(0,4): ('b', [TE, TE, [1, 1], TE, TE, TE, TE]),
(0,5): ('b', [TE, TE, (2, 2), TE, TE, TE, TE]),
(0,6): ('b', [TE, TE, TE, TE, TE, TE, TE]),
(0,7): ('e', (0,0)),
(0,8): ('e', (0,0)),
# 2L
(1,0): ('e', (0,0)),
(1,1): ('e', (0,1)),
(1,2): ('e', (0,2)),
(1,3): ('e', (0,3)),
(1,4): ('e', (0,4)),
(1,5): ('e', (0,5)),
(1,6): ('e', (0,6)),
(1,7): ('e', (0,7)),
(1,8): ('e', (0,8)),
# 4.0
(2,0): ('b', [6.0, 2.0, 8.0, 16.0, 0.0, 2.0, 2.0]),
(2,1): ('e', (2,0)),
(2,2): ('b', [8.0, 0.0, 16.0, 256.0, 0.0, 1.0, 1.0]),
(2,3): ('b', [6+0j, 2+0j, 8+0j, 16+0j, 0+0j, 2+0j, 2+0j]),
(2,4): ('b', [TE, TE, TE, TE, TE, TE, TE]),
(2,5): ('e', (2,4)),
(2,6): ('e', (2,4)),
(2,7): ('e', (2,0)),
(2,8): ('e', (2,0)),
# (2+0j)
(3,0): ('b', [4+0j, 0+0j, 4+0j, 4+0j, 0+0j, 1+0j, 1+0j]),
(3,1): ('e', (3,0)),
(3,2): ('b', [6+0j, -2+0j, 8+0j, 16+0j, 2+0j, 0+0j, 0.5+0j]),
(3,3): ('b', [4+0j, 0+0j, 4+0j, 4+0j, 0+0j, 1+0j, 1+0j]),
(3,4): ('b', [TE, TE, TE, TE, TE, TE, TE]),
(3,5): ('e', (3,4)),
(3,6): ('e', (3,4)),
(3,7): ('e', (3,0)),
(3,8): ('e', (3,0)),
# [1]
(4,0): ('b', [TE, TE, [1, 1], TE, TE, TE, TE]),
(4,1): ('e', (4,0)),
(4,2): ('b', [TE, TE, TE, TE, TE, TE, TE]),
(4,3): ('b', [TE, TE, TE, TE, TE, TE, TE]),
(4,4): ('b', [[1, 1], TE, TE, TE, TE, TE, TE]),
(4,5): ('s', [TE, TE, TE, TE, TE, TE, TE], [[1, 2], TE, TE, TE, TE, TE, TE]),
(4,6): ('b', [TE, TE, TE, TE, TE, TE, TE]),
(4,7): ('e', (4,0)),
(4,8): ('e', (4,0)),
# (2,)
(5,0): ('b', [TE, TE, (2, 2), TE, TE, TE, TE]),
(5,1): ('e', (5,0)),
(5,2): ('b', [TE, TE, TE, TE, TE, TE, TE]),
(5,3): ('e', (5,2)),
(5,4): ('e', (5,2)),
(5,5): ('b', [(2, 2), TE, TE, TE, TE, TE, TE]),
(5,6): ('b', [TE, TE, TE, TE, TE, TE, TE]),
(5,7): ('e', (5,0)),
(5,8): ('e', (5,0)),
# None
(6,0): ('b', [TE, TE, TE, TE, TE, TE, TE]),
(6,1): ('e', (6,0)),
(6,2): ('e', (6,0)),
(6,3): ('e', (6,0)),
(6,4): ('e', (6,0)),
(6,5): ('e', (6,0)),
(6,6): ('e', (6,0)),
(6,7): ('e', (6,0)),
(6,8): ('e', (6,0)),
# MethodNumber(2)
(7,0): ('e', (0,0)),
(7,1): ('e', (0,1)),
(7,2): ('e', (0,2)),
(7,3): ('e', (0,3)),
(7,4): ('e', (0,4)),
(7,5): ('e', (0,5)),
(7,6): ('e', (0,6)),
(7,7): ('e', (0,7)),
(7,8): ('e', (0,8)),
# CoerceNumber(2)
(8,0): ('e', (0,0)),
(8,1): ('e', (0,1)),
(8,2): ('e', (0,2)),
(8,3): ('e', (0,3)),
(8,4): ('e', (0,4)),
(8,5): ('e', (0,5)),
(8,6): ('e', (0,6)),
(8,7): ('e', (0,7)),
(8,8): ('e', (0,8)),
}
def process_infix_results():
for key in sorted(infix_results):
val = infix_results[key]
if val[0] == 'e':
infix_results[key] = infix_results[val[1]]
else:
if val[0] == 's':
res = (val[1], val[2])
elif val[0] == 'b':
res = (val[1], val[1])
for i in range(1):
if isinstance(res[i][6], tuple):
if 1/2 == 0:
# testing with classic (floor) division
res[i][6] = res[i][6][0]
else:
# testing with -Qnew
res[i][6] = res[i][6][1]
infix_results[key] = res
process_infix_results()
# now infix_results has two lists of results for every pairing.
prefix_binops = [ 'divmod' ]
prefix_results = [
[(1,0), (1L,0L), (0.0,2.0), ((1+0j),0j), TE, TE, TE, TE, (1,0)],
[(1L,0L), (1L,0L), (0.0,2.0), ((1+0j),0j), TE, TE, TE, TE, (1L,0L)],
[(2.0,0.0), (2.0,0.0), (1.0,0.0), ((2+0j),0j), TE, TE, TE, TE, (2.0,0.0)],
[((1+0j),0j), ((1+0j),0j), (0j,(2+0j)), ((1+0j),0j), TE, TE, TE, TE, ((1+0j),0j)],
[TE, TE, TE, TE, TE, TE, TE, TE, TE],
[TE, TE, TE, TE, TE, TE, TE, TE, TE],
[TE, TE, TE, TE, TE, TE, TE, TE, TE],
[TE, TE, TE, TE, TE, TE, TE, TE, TE],
[(1,0), (1L,0L), (0.0,2.0), ((1+0j),0j), TE, TE, TE, TE, (1,0)]
]
def format_float(value):
if abs(value) < 0.01:
return '0.0'
else:
return '%.1f' % value
# avoid testing platform fp quirks
def format_result(value):
if isinstance(value, complex):
return '(%s + %sj)' % (format_float(value.real),
format_float(value.imag))
elif isinstance(value, float):
return format_float(value)
return str(value)
class CoercionTest(unittest.TestCase):
def test_infix_binops(self):
for ia, a in enumerate(candidates):
for ib, b in enumerate(candidates):
results = infix_results[(ia, ib)]
for op, res, ires in zip(infix_binops, results[0], results[1]):
if res is TE:
self.assertRaises(TypeError, eval,
'a %s b' % op, {'a': a, 'b': b})
else:
self.assertEquals(format_result(res),
format_result(eval('a %s b' % op)),
'%s %s %s == %s failed' % (a, op, b, res))
try:
z = copy.copy(a)
except copy.Error:
z = a # assume it has no inplace ops
if ires is TE:
try:
exec 'z %s= b' % op
except TypeError:
pass
else:
self.fail("TypeError not raised")
else:
exec('z %s= b' % op)
self.assertEquals(ires, z)
def test_prefix_binops(self):
for ia, a in enumerate(candidates):
for ib, b in enumerate(candidates):
for op in prefix_binops:
res = prefix_results[ia][ib]
if res is TE:
self.assertRaises(TypeError, eval,
'%s(a, b)' % op, {'a': a, 'b': b})
else:
self.assertEquals(format_result(res),
format_result(eval('%s(a, b)' % op)),
'%s(%s, %s) == %s failed' % (op, a, b, res))
def test_cmptypes(self):
# Built-in tp_compare slots expect their arguments to have the
# same type, but a user-defined __coerce__ doesn't have to obey.
# SF #980352
evil_coercer = CoerceTo(42)
# Make sure these don't crash any more
self.assertNotEquals(cmp(u'fish', evil_coercer), 0)
self.assertNotEquals(cmp(slice(1), evil_coercer), 0)
# ...but that this still works
class WackyComparer(object):
def __cmp__(slf, other):
self.assert_(other == 42, 'expected evil_coercer, got %r' % other)
return 0
self.assertEquals(cmp(WackyComparer(), evil_coercer), 0)
# ...and classic classes too, since that code path is a little different
class ClassicWackyComparer:
def __cmp__(slf, other):
self.assert_(other == 42, 'expected evil_coercer, got %r' % other)
return 0
self.assertEquals(cmp(ClassicWackyComparer(), evil_coercer), 0)
def test_infinite_rec_classic_classes(self):
# if __coerce__() returns its arguments reversed it causes an infinite
# recursion for classic classes.
class Tester:
def __coerce__(self, other):
return other, self
exc = TestFailed("__coerce__() returning its arguments reverse "
"should raise RuntimeError")
try:
Tester() + 1
except (RuntimeError, TypeError):
return
except:
raise exc
else:
raise exc
def test_main():
warnings.filterwarnings("ignore",
r'complex divmod\(\), // and % are deprecated',
DeprecationWarning,
r'test.test_coercion$')
run_unittest(CoercionTest)
if __name__ == "__main__":
test_main()
| bsd-3-clause |
hidenori-t/chainer | tests/cupy_tests/creation_tests/test_from_data.py | 11 | 2084 | import unittest
import numpy
import cupy
from cupy import testing
@testing.gpu
class TestFromData(unittest.TestCase):
_multiprocess_can_split_ = True
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_array(self, xp, dtype):
return xp.array([[1, 2, 3], [2, 3, 4]], dtype=dtype)
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_array_copy(self, xp, dtype):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
return xp.array(a)
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_array_copy_is_copied(self, xp, dtype):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
b = xp.array(a)
a.fill(0)
return b
@testing.for_all_dtypes(name='dtype1')
@testing.for_all_dtypes(name='dtype2')
@testing.numpy_cupy_array_equal()
def test_array_copy_with_dtype(self, xp, dtype1, dtype2):
a = testing.shaped_arange((2, 3, 4), xp, dtype1)
return xp.array(a, dtype=dtype2)
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_asarray(self, xp, dtype):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
return xp.asarray(a)
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_asarray_is_not_copied(self, xp, dtype):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
b = xp.asarray(a)
a.fill(0)
return b
def test_ascontiguousarray_on_noncontiguous_array(self):
a = testing.shaped_arange((2, 3, 4))
b = a.transpose(2, 0, 1)
c = cupy.ascontiguousarray(b)
self.assertTrue(c.flags.c_contiguous)
testing.assert_array_equal(b, c)
def test_ascontiguousarray_on_contiguous_array(self):
a = testing.shaped_arange((2, 3, 4))
b = cupy.ascontiguousarray(a)
self.assertIs(a, b)
@testing.numpy_cupy_array_equal()
def test_copy(self, xp):
a = xp.zeros((2, 3, 4), dtype=numpy.float32)
b = a.copy()
a[1] = 1
return b
| mit |
azatoth/scons | test/QT/moc-from-cpp.py | 5 | 2889 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Create a moc file from a cpp file.
"""
import TestSCons
test = TestSCons.TestSCons()
test.Qt_dummy_installation()
##############################################################################
lib_aaa = TestSCons.lib_ + 'aaa' + TestSCons._lib
moc = 'aaa.moc'
test.Qt_create_SConstruct('SConstruct')
test.write('SConscript', """
Import("env dup")
if dup == 0: env.Append(CPPPATH=['.'])
env.StaticLibrary(target = '%s', source = ['aaa.cpp','useit.cpp'])
""" % lib_aaa)
test.write('aaa.h', r"""
void aaa(void);
""")
test.write('aaa.cpp', r"""
#include "my_qobject.h"
void aaa(void) Q_OBJECT
#include "%s"
""" % moc)
test.write('useit.cpp', r"""
#include "aaa.h"
void useit() {
aaa();
}
""")
test.run(arguments=lib_aaa,
stderr=TestSCons.noisy_ar,
match=TestSCons.match_re_dotall)
test.up_to_date(options = '-n', arguments = lib_aaa)
test.write('aaa.cpp', r"""
#include "my_qobject.h"
/* a change */
void aaa(void) Q_OBJECT
#include "%s"
""" % moc)
test.not_up_to_date(options = '-n', arguments = moc)
test.run(options = '-c', arguments = lib_aaa)
test.run(arguments = "variant_dir=1 " + test.workpath('build', lib_aaa),
stderr=TestSCons.noisy_ar,
match=TestSCons.match_re_dotall)
test.run(arguments = "variant_dir=1 chdir=1 " + test.workpath('build', lib_aaa))
test.must_exist(test.workpath('build', moc))
test.run(arguments = "variant_dir=1 dup=0 " +
test.workpath('build_dup0', lib_aaa),
stderr=TestSCons.noisy_ar,
match=TestSCons.match_re_dotall)
test.must_exist(test.workpath('build_dup0', moc))
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.