repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
jaimeMF/youtube-dl
|
refs/heads/master
|
youtube_dl/extractor/zingmp3.py
|
36
|
# coding=utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import ExtractorError
class ZingMp3BaseInfoExtractor(InfoExtractor):
def _extract_item(self, item, fatal=True):
error_message = item.find('./errormessage').text
if error_message:
if not fatal:
return
raise ExtractorError(
'%s returned error: %s' % (self.IE_NAME, error_message),
expected=True)
title = item.find('./title').text.strip()
source = item.find('./source').text
extension = item.attrib['type']
thumbnail = item.find('./backimage').text
return {
'title': title,
'url': source,
'ext': extension,
'thumbnail': thumbnail,
}
def _extract_player_xml(self, player_xml_url, id, playlist_title=None):
player_xml = self._download_xml(player_xml_url, id, 'Downloading Player XML')
items = player_xml.findall('./item')
if len(items) == 1:
# one single song
data = self._extract_item(items[0])
data['id'] = id
return data
else:
# playlist of songs
entries = []
for i, item in enumerate(items, 1):
entry = self._extract_item(item, fatal=False)
if not entry:
continue
entry['id'] = '%s-%d' % (id, i)
entries.append(entry)
return {
'_type': 'playlist',
'id': id,
'title': playlist_title,
'entries': entries,
}
class ZingMp3SongIE(ZingMp3BaseInfoExtractor):
_VALID_URL = r'https?://mp3\.zing\.vn/bai-hat/(?P<slug>[^/]+)/(?P<song_id>\w+)\.html'
_TESTS = [{
'url': 'http://mp3.zing.vn/bai-hat/Xa-Mai-Xa-Bao-Thy/ZWZB9WAB.html',
'md5': 'ead7ae13693b3205cbc89536a077daed',
'info_dict': {
'id': 'ZWZB9WAB',
'title': 'Xa Mãi Xa',
'ext': 'mp3',
'thumbnail': 're:^https?://.*\.jpg$',
},
}]
IE_NAME = 'zingmp3:song'
IE_DESC = 'mp3.zing.vn songs'
def _real_extract(self, url):
matched = re.match(self._VALID_URL, url)
slug = matched.group('slug')
song_id = matched.group('song_id')
webpage = self._download_webpage(
'http://mp3.zing.vn/bai-hat/%s/%s.html' % (slug, song_id), song_id)
player_xml_url = self._search_regex(
r'&xmlURL=(?P<xml_url>[^&]+)&', webpage, 'player xml url')
return self._extract_player_xml(player_xml_url, song_id)
class ZingMp3AlbumIE(ZingMp3BaseInfoExtractor):
_VALID_URL = r'https?://mp3\.zing\.vn/(?:album|playlist)/(?P<slug>[^/]+)/(?P<album_id>\w+)\.html'
_TESTS = [{
'url': 'http://mp3.zing.vn/album/Lau-Dai-Tinh-Ai-Bang-Kieu-Minh-Tuyet/ZWZBWDAF.html',
'info_dict': {
'_type': 'playlist',
'id': 'ZWZBWDAF',
'title': 'Lâu Đài Tình Ái - Bằng Kiều ft. Minh Tuyết | Album 320 lossless',
},
'playlist_count': 10,
}, {
'url': 'http://mp3.zing.vn/playlist/Duong-Hong-Loan-apollobee/IWCAACCB.html',
'only_matching': True,
}]
IE_NAME = 'zingmp3:album'
IE_DESC = 'mp3.zing.vn albums'
def _real_extract(self, url):
matched = re.match(self._VALID_URL, url)
slug = matched.group('slug')
album_id = matched.group('album_id')
webpage = self._download_webpage(
'http://mp3.zing.vn/album/%s/%s.html' % (slug, album_id), album_id)
player_xml_url = self._search_regex(
r'&xmlURL=(?P<xml_url>[^&]+)&', webpage, 'player xml url')
return self._extract_player_xml(
player_xml_url, album_id,
playlist_title=self._og_search_title(webpage))
|
robovm/robovm-studio
|
refs/heads/master
|
python/testData/codeInsight/smartEnter/parenthesized_after.py
|
83
|
class A:
def foo(self):
self.a = (1, 1, 2, 3)<caret>
|
nebolsin/tyk
|
refs/heads/master
|
coprocess/bindings/python/coprocess_return_overrides_pb2.py
|
2
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: coprocess_return_overrides.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='coprocess_return_overrides.proto',
package='coprocess',
syntax='proto3',
serialized_pb=_b('\n coprocess_return_overrides.proto\x12\tcoprocess\"\xaa\x01\n\x0fReturnOverrides\x12\x15\n\rresponse_code\x18\x01 \x01(\x05\x12\x16\n\x0eresponse_error\x18\x02 \x01(\t\x12\x38\n\x07headers\x18\x03 \x03(\x0b\x32\'.coprocess.ReturnOverrides.HeadersEntry\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_RETURNOVERRIDES_HEADERSENTRY = _descriptor.Descriptor(
name='HeadersEntry',
full_name='coprocess.ReturnOverrides.HeadersEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='coprocess.ReturnOverrides.HeadersEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='coprocess.ReturnOverrides.HeadersEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=172,
serialized_end=218,
)
_RETURNOVERRIDES = _descriptor.Descriptor(
name='ReturnOverrides',
full_name='coprocess.ReturnOverrides',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='response_code', full_name='coprocess.ReturnOverrides.response_code', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='response_error', full_name='coprocess.ReturnOverrides.response_error', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='headers', full_name='coprocess.ReturnOverrides.headers', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_RETURNOVERRIDES_HEADERSENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=48,
serialized_end=218,
)
_RETURNOVERRIDES_HEADERSENTRY.containing_type = _RETURNOVERRIDES
_RETURNOVERRIDES.fields_by_name['headers'].message_type = _RETURNOVERRIDES_HEADERSENTRY
DESCRIPTOR.message_types_by_name['ReturnOverrides'] = _RETURNOVERRIDES
ReturnOverrides = _reflection.GeneratedProtocolMessageType('ReturnOverrides', (_message.Message,), dict(
HeadersEntry = _reflection.GeneratedProtocolMessageType('HeadersEntry', (_message.Message,), dict(
DESCRIPTOR = _RETURNOVERRIDES_HEADERSENTRY,
__module__ = 'coprocess_return_overrides_pb2'
# @@protoc_insertion_point(class_scope:coprocess.ReturnOverrides.HeadersEntry)
))
,
DESCRIPTOR = _RETURNOVERRIDES,
__module__ = 'coprocess_return_overrides_pb2'
# @@protoc_insertion_point(class_scope:coprocess.ReturnOverrides)
))
_sym_db.RegisterMessage(ReturnOverrides)
_sym_db.RegisterMessage(ReturnOverrides.HeadersEntry)
_RETURNOVERRIDES_HEADERSENTRY.has_options = True
_RETURNOVERRIDES_HEADERSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
# @@protoc_insertion_point(module_scope)
|
abstract-open-solutions/OCB
|
refs/heads/8.0
|
addons/mrp_repair/mrp_repair.py
|
27
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from datetime import datetime
from openerp.tools.translate import _
import openerp.addons.decimal_precision as dp
class mrp_repair(osv.osv):
_name = 'mrp.repair'
_inherit = 'mail.thread'
_description = 'Repair Order'
def _amount_untaxed(self, cr, uid, ids, field_name, arg, context=None):
""" Calculates untaxed amount.
@param self: The object pointer
@param cr: The current row, from the database cursor,
@param uid: The current user ID for security checks
@param ids: List of selected IDs
@param field_name: Name of field.
@param arg: Argument
@param context: A standard dictionary for contextual values
@return: Dictionary of values.
"""
res = {}
cur_obj = self.pool.get('res.currency')
for repair in self.browse(cr, uid, ids, context=context):
res[repair.id] = 0.0
for line in repair.operations:
res[repair.id] += line.price_subtotal
for line in repair.fees_lines:
res[repair.id] += line.price_subtotal
cur = repair.pricelist_id.currency_id
res[repair.id] = cur_obj.round(cr, uid, cur, res[repair.id])
return res
def _amount_tax(self, cr, uid, ids, field_name, arg, context=None):
""" Calculates taxed amount.
@param field_name: Name of field.
@param arg: Argument
@return: Dictionary of values.
"""
res = {}
#return {}.fromkeys(ids, 0)
cur_obj = self.pool.get('res.currency')
tax_obj = self.pool.get('account.tax')
for repair in self.browse(cr, uid, ids, context=context):
val = 0.0
cur = repair.pricelist_id.currency_id
for line in repair.operations:
#manage prices with tax included use compute_all instead of compute
if line.to_invoice:
tax_calculate = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, line.product_uom_qty, line.product_id, repair.partner_id)
for c in tax_calculate['taxes']:
val += c['amount']
for line in repair.fees_lines:
if line.to_invoice:
tax_calculate = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, line.product_uom_qty, line.product_id, repair.partner_id)
for c in tax_calculate['taxes']:
val += c['amount']
res[repair.id] = cur_obj.round(cr, uid, cur, val)
return res
def _amount_total(self, cr, uid, ids, field_name, arg, context=None):
""" Calculates total amount.
@param field_name: Name of field.
@param arg: Argument
@return: Dictionary of values.
"""
res = {}
untax = self._amount_untaxed(cr, uid, ids, field_name, arg, context=context)
tax = self._amount_tax(cr, uid, ids, field_name, arg, context=context)
cur_obj = self.pool.get('res.currency')
for id in ids:
repair = self.browse(cr, uid, id, context=context)
cur = repair.pricelist_id.currency_id
res[id] = cur_obj.round(cr, uid, cur, untax.get(id, 0.0) + tax.get(id, 0.0))
return res
def _get_default_address(self, cr, uid, ids, field_name, arg, context=None):
res = {}
partner_obj = self.pool.get('res.partner')
for data in self.browse(cr, uid, ids, context=context):
adr_id = False
if data.partner_id:
adr_id = partner_obj.address_get(cr, uid, [data.partner_id.id], ['default'])['default']
res[data.id] = adr_id
return res
def _get_lines(self, cr, uid, ids, context=None):
return self.pool['mrp.repair'].search(cr, uid, [('operations', 'in', ids)], context=context)
def _get_fee_lines(self, cr, uid, ids, context=None):
return self.pool['mrp.repair'].search(cr, uid, [('fees_lines', 'in', ids)], context=context)
_columns = {
'name': fields.char('Repair Reference', required=True, states={'confirmed': [('readonly', True)]}, copy=False),
'product_id': fields.many2one('product.product', string='Product to Repair', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'product_qty': fields.float('Product Quantity', digits_compute=dp.get_precision('Product Unit of Measure'),
required=True, readonly=True, states={'draft': [('readonly', False)]}),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'partner_id': fields.many2one('res.partner', 'Partner', select=True, help='Choose partner for whom the order will be invoiced and delivered.', states={'confirmed': [('readonly', True)]}),
'address_id': fields.many2one('res.partner', 'Delivery Address', domain="[('parent_id','=',partner_id)]", states={'confirmed': [('readonly', True)]}),
'default_address_id': fields.function(_get_default_address, type="many2one", relation="res.partner"),
'state': fields.selection([
('draft', 'Quotation'),
('cancel', 'Cancelled'),
('confirmed', 'Confirmed'),
('under_repair', 'Under Repair'),
('ready', 'Ready to Repair'),
('2binvoiced', 'To be Invoiced'),
('invoice_except', 'Invoice Exception'),
('done', 'Repaired')
], 'Status', readonly=True, track_visibility='onchange', copy=False,
help=' * The \'Draft\' status is used when a user is encoding a new and unconfirmed repair order. \
\n* The \'Confirmed\' status is used when a user confirms the repair order. \
\n* The \'Ready to Repair\' status is used to start to repairing, user can start repairing only after repair order is confirmed. \
\n* The \'To be Invoiced\' status is used to generate the invoice before or after repairing done. \
\n* The \'Done\' status is set when repairing is completed.\
\n* The \'Cancelled\' status is used when user cancel repair order.'),
'location_id': fields.many2one('stock.location', 'Current Location', select=True, required=True, readonly=True, states={'draft': [('readonly', False)], 'confirmed': [('readonly', True)]}),
'location_dest_id': fields.many2one('stock.location', 'Delivery Location', readonly=True, required=True, states={'draft': [('readonly', False)], 'confirmed': [('readonly', True)]}),
'lot_id': fields.many2one('stock.production.lot', 'Repaired Lot', domain="[('product_id','=', product_id)]", help="Products repaired are all belonging to this lot", oldname="prodlot_id"),
'guarantee_limit': fields.date('Warranty Expiration', states={'confirmed': [('readonly', True)]}),
'operations': fields.one2many('mrp.repair.line', 'repair_id', 'Operation Lines', readonly=True, states={'draft': [('readonly', False)]}, copy=True),
'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', help='Pricelist of the selected partner.'),
'partner_invoice_id': fields.many2one('res.partner', 'Invoicing Address'),
'invoice_method': fields.selection([
("none", "No Invoice"),
("b4repair", "Before Repair"),
("after_repair", "After Repair")
], "Invoice Method",
select=True, required=True, states={'draft': [('readonly', False)]}, readonly=True, help='Selecting \'Before Repair\' or \'After Repair\' will allow you to generate invoice before or after the repair is done respectively. \'No invoice\' means you don\'t want to generate invoice for this repair order.'),
'invoice_id': fields.many2one('account.invoice', 'Invoice', readonly=True, track_visibility="onchange", copy=False),
'move_id': fields.many2one('stock.move', 'Move', readonly=True, help="Move created by the repair order", track_visibility="onchange", copy=False),
'fees_lines': fields.one2many('mrp.repair.fee', 'repair_id', 'Fees', readonly=True, states={'draft': [('readonly', False)]}, copy=True),
'internal_notes': fields.text('Internal Notes'),
'quotation_notes': fields.text('Quotation Notes'),
'company_id': fields.many2one('res.company', 'Company'),
'invoiced': fields.boolean('Invoiced', readonly=True, copy=False),
'repaired': fields.boolean('Repaired', readonly=True, copy=False),
'amount_untaxed': fields.function(_amount_untaxed, string='Untaxed Amount',
store={
'mrp.repair': (lambda self, cr, uid, ids, c={}: ids, ['operations', 'fees_lines'], 10),
'mrp.repair.line': (_get_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10),
'mrp.repair.fee': (_get_fee_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10),
}),
'amount_tax': fields.function(_amount_tax, string='Taxes',
store={
'mrp.repair': (lambda self, cr, uid, ids, c={}: ids, ['operations', 'fees_lines'], 10),
'mrp.repair.line': (_get_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10),
'mrp.repair.fee': (_get_fee_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10),
}),
'amount_total': fields.function(_amount_total, string='Total',
store={
'mrp.repair': (lambda self, cr, uid, ids, c={}: ids, ['operations', 'fees_lines'], 10),
'mrp.repair.line': (_get_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10),
'mrp.repair.fee': (_get_fee_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10),
}),
}
def _default_stock_location(self, cr, uid, context=None):
try:
warehouse = self.pool.get('ir.model.data').get_object(cr, uid, 'stock', 'warehouse0')
return warehouse.lot_stock_id.id
except:
return False
_defaults = {
'state': lambda *a: 'draft',
'name': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get(cr, uid, 'mrp.repair'),
'invoice_method': lambda *a: 'none',
'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'mrp.repair', context=context),
'pricelist_id': lambda self, cr, uid, context: self.pool.get('product.pricelist').search(cr, uid, [('type', '=', 'sale')])[0],
'product_qty': 1.0,
'location_id': _default_stock_location,
}
_sql_constraints = [
('name', 'unique (name)', 'The name of the Repair Order must be unique!'),
]
def onchange_product_id(self, cr, uid, ids, product_id=None):
""" On change of product sets some values.
@param product_id: Changed product
@return: Dictionary of values.
"""
product = False
if product_id:
product = self.pool.get("product.product").browse(cr, uid, product_id)
return {'value': {
'guarantee_limit': False,
'lot_id': False,
'product_uom': product and product.uom_id.id or False,
}
}
def onchange_product_uom(self, cr, uid, ids, product_id, product_uom, context=None):
res = {'value': {}}
if not product_uom or not product_id:
return res
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
uom = self.pool.get('product.uom').browse(cr, uid, product_uom, context=context)
if uom.category_id.id != product.uom_id.category_id.id:
res['warning'] = {'title': _('Warning'), 'message': _('The Product Unit of Measure you chose has a different category than in the product form.')}
res['value'].update({'product_uom': product.uom_id.id})
return res
def onchange_location_id(self, cr, uid, ids, location_id=None):
""" On change of location
"""
return {'value': {'location_dest_id': location_id}}
def button_dummy(self, cr, uid, ids, context=None):
return True
def onchange_partner_id(self, cr, uid, ids, part, address_id):
""" On change of partner sets the values of partner address,
partner invoice address and pricelist.
@param part: Changed id of partner.
@param address_id: Address id from current record.
@return: Dictionary of values.
"""
part_obj = self.pool.get('res.partner')
pricelist_obj = self.pool.get('product.pricelist')
if not part:
return {'value': {
'address_id': False,
'partner_invoice_id': False,
'pricelist_id': pricelist_obj.search(cr, uid, [('type', '=', 'sale')])[0]
}
}
addr = part_obj.address_get(cr, uid, [part], ['delivery', 'invoice', 'default'])
partner = part_obj.browse(cr, uid, part)
pricelist = partner.property_product_pricelist and partner.property_product_pricelist.id or False
return {'value': {
'address_id': addr['delivery'] or addr['default'],
'partner_invoice_id': addr['invoice'],
'pricelist_id': pricelist
}
}
def action_cancel_draft(self, cr, uid, ids, *args):
""" Cancels repair order when it is in 'Draft' state.
@param *arg: Arguments
@return: True
"""
if not len(ids):
return False
mrp_line_obj = self.pool.get('mrp.repair.line')
for repair in self.browse(cr, uid, ids):
mrp_line_obj.write(cr, uid, [l.id for l in repair.operations], {'state': 'draft'})
self.write(cr, uid, ids, {'state': 'draft'})
return self.create_workflow(cr, uid, ids)
def action_confirm(self, cr, uid, ids, *args):
""" Repair order state is set to 'To be invoiced' when invoice method
is 'Before repair' else state becomes 'Confirmed'.
@param *arg: Arguments
@return: True
"""
mrp_line_obj = self.pool.get('mrp.repair.line')
for o in self.browse(cr, uid, ids):
if (o.invoice_method == 'b4repair'):
self.write(cr, uid, [o.id], {'state': '2binvoiced'})
else:
self.write(cr, uid, [o.id], {'state': 'confirmed'})
for line in o.operations:
if line.product_id.track_production:
raise osv.except_osv(_('Warning!'), _("Serial number is required for operation line with product '%s'") % (line.product_id.name))
mrp_line_obj.write(cr, uid, [l.id for l in o.operations], {'state': 'confirmed'})
return True
def action_cancel(self, cr, uid, ids, context=None):
""" Cancels repair order.
@return: True
"""
mrp_line_obj = self.pool.get('mrp.repair.line')
for repair in self.browse(cr, uid, ids, context=context):
if not repair.invoiced:
mrp_line_obj.write(cr, uid, [l.id for l in repair.operations], {'state': 'cancel'}, context=context)
else:
raise osv.except_osv(_('Warning!'), _('Repair order is already invoiced.'))
return self.write(cr, uid, ids, {'state': 'cancel'})
def wkf_invoice_create(self, cr, uid, ids, *args):
self.action_invoice_create(cr, uid, ids)
return True
def action_invoice_create(self, cr, uid, ids, group=False, context=None):
""" Creates invoice(s) for repair order.
@param group: It is set to true when group invoice is to be generated.
@return: Invoice Ids.
"""
res = {}
invoices_group = {}
inv_line_obj = self.pool.get('account.invoice.line')
inv_obj = self.pool.get('account.invoice')
repair_line_obj = self.pool.get('mrp.repair.line')
repair_fee_obj = self.pool.get('mrp.repair.fee')
for repair in self.browse(cr, uid, ids, context=context):
res[repair.id] = False
if repair.state in ('draft', 'cancel') or repair.invoice_id:
continue
if not (repair.partner_id.id and repair.partner_invoice_id.id):
raise osv.except_osv(_('No partner!'), _('You have to select a Partner Invoice Address in the repair form!'))
comment = repair.quotation_notes
if (repair.invoice_method != 'none'):
if group and repair.partner_invoice_id.id in invoices_group:
inv_id = invoices_group[repair.partner_invoice_id.id]
invoice = inv_obj.browse(cr, uid, inv_id)
invoice_vals = {
'name': invoice.name + ', ' + repair.name,
'origin': invoice.origin + ', ' + repair.name,
'comment': (comment and (invoice.comment and invoice.comment + "\n" + comment or comment)) or (invoice.comment and invoice.comment or ''),
}
inv_obj.write(cr, uid, [inv_id], invoice_vals, context=context)
else:
if not repair.partner_id.property_account_receivable:
raise osv.except_osv(_('Error!'), _('No account defined for partner "%s".') % repair.partner_id.name)
account_id = repair.partner_id.property_account_receivable.id
inv = {
'name': repair.name,
'origin': repair.name,
'type': 'out_invoice',
'account_id': account_id,
'partner_id': repair.partner_invoice_id.id or repair.partner_id.id,
'currency_id': repair.pricelist_id.currency_id.id,
'comment': repair.quotation_notes,
'fiscal_position': repair.partner_id.property_account_position.id
}
inv_id = inv_obj.create(cr, uid, inv)
invoices_group[repair.partner_invoice_id.id] = inv_id
self.write(cr, uid, repair.id, {'invoiced': True, 'invoice_id': inv_id})
for operation in repair.operations:
if operation.to_invoice:
if group:
name = repair.name + '-' + operation.name
else:
name = operation.name
if operation.product_id.property_account_income:
account_id = operation.product_id.property_account_income.id
elif operation.product_id.categ_id.property_account_income_categ:
account_id = operation.product_id.categ_id.property_account_income_categ.id
else:
raise osv.except_osv(_('Error!'), _('No account defined for product "%s".') % operation.product_id.name)
invoice_line_id = inv_line_obj.create(cr, uid, {
'invoice_id': inv_id,
'name': name,
'origin': repair.name,
'account_id': account_id,
'quantity': operation.product_uom_qty,
'invoice_line_tax_id': [(6, 0, [x.id for x in operation.tax_id])],
'uos_id': operation.product_uom.id,
'price_unit': operation.price_unit,
'price_subtotal': operation.product_uom_qty * operation.price_unit,
'product_id': operation.product_id and operation.product_id.id or False
})
repair_line_obj.write(cr, uid, [operation.id], {'invoiced': True, 'invoice_line_id': invoice_line_id})
for fee in repair.fees_lines:
if fee.to_invoice:
if group:
name = repair.name + '-' + fee.name
else:
name = fee.name
if not fee.product_id:
raise osv.except_osv(_('Warning!'), _('No product defined on Fees!'))
if fee.product_id.property_account_income:
account_id = fee.product_id.property_account_income.id
elif fee.product_id.categ_id.property_account_income_categ:
account_id = fee.product_id.categ_id.property_account_income_categ.id
else:
raise osv.except_osv(_('Error!'), _('No account defined for product "%s".') % fee.product_id.name)
invoice_fee_id = inv_line_obj.create(cr, uid, {
'invoice_id': inv_id,
'name': name,
'origin': repair.name,
'account_id': account_id,
'quantity': fee.product_uom_qty,
'invoice_line_tax_id': [(6, 0, [x.id for x in fee.tax_id])],
'uos_id': fee.product_uom.id,
'product_id': fee.product_id and fee.product_id.id or False,
'price_unit': fee.price_unit,
'price_subtotal': fee.product_uom_qty * fee.price_unit
})
repair_fee_obj.write(cr, uid, [fee.id], {'invoiced': True, 'invoice_line_id': invoice_fee_id})
inv_obj.button_reset_taxes(cr, uid, inv_id, context=context)
res[repair.id] = inv_id
return res
def action_repair_ready(self, cr, uid, ids, context=None):
""" Writes repair order state to 'Ready'
@return: True
"""
for repair in self.browse(cr, uid, ids, context=context):
self.pool.get('mrp.repair.line').write(cr, uid, [l.id for
l in repair.operations], {'state': 'confirmed'}, context=context)
self.write(cr, uid, [repair.id], {'state': 'ready'})
return True
def action_repair_start(self, cr, uid, ids, context=None):
""" Writes repair order state to 'Under Repair'
@return: True
"""
repair_line = self.pool.get('mrp.repair.line')
for repair in self.browse(cr, uid, ids, context=context):
repair_line.write(cr, uid, [l.id for
l in repair.operations], {'state': 'confirmed'}, context=context)
repair.write({'state': 'under_repair'})
return True
def action_repair_end(self, cr, uid, ids, context=None):
""" Writes repair order state to 'To be invoiced' if invoice method is
After repair else state is set to 'Ready'.
@return: True
"""
for order in self.browse(cr, uid, ids, context=context):
val = {}
val['repaired'] = True
if (not order.invoiced and order.invoice_method == 'after_repair'):
val['state'] = '2binvoiced'
elif (not order.invoiced and order.invoice_method == 'b4repair'):
val['state'] = 'ready'
else:
pass
self.write(cr, uid, [order.id], val)
return True
def wkf_repair_done(self, cr, uid, ids, *args):
self.action_repair_done(cr, uid, ids)
return True
def action_repair_done(self, cr, uid, ids, context=None):
""" Creates stock move for operation and stock move for final product of repair order.
@return: Move ids of final products
"""
res = {}
move_obj = self.pool.get('stock.move')
repair_line_obj = self.pool.get('mrp.repair.line')
for repair in self.browse(cr, uid, ids, context=context):
move_ids = []
for move in repair.operations:
move_id = move_obj.create(cr, uid, {
'name': move.name,
'product_id': move.product_id.id,
'restrict_lot_id': move.lot_id.id,
'product_uom_qty': move.product_uom_qty,
'product_uom': move.product_uom.id,
'partner_id': repair.address_id and repair.address_id.id or False,
'location_id': move.location_id.id,
'location_dest_id': move.location_dest_id.id,
})
move_ids.append(move_id)
repair_line_obj.write(cr, uid, [move.id], {'move_id': move_id, 'state': 'done'}, context=context)
move_id = move_obj.create(cr, uid, {
'name': repair.name,
'product_id': repair.product_id.id,
'product_uom': repair.product_uom.id or repair.product_id.uom_id.id,
'product_uom_qty': repair.product_qty,
'partner_id': repair.address_id and repair.address_id.id or False,
'location_id': repair.location_id.id,
'location_dest_id': repair.location_dest_id.id,
'restrict_lot_id': repair.lot_id.id,
})
move_ids.append(move_id)
move_obj.action_done(cr, uid, move_ids, context=context)
self.write(cr, uid, [repair.id], {'state': 'done', 'move_id': move_id}, context=context)
res[repair.id] = move_id
return res
class ProductChangeMixin(object):
def product_id_change(self, cr, uid, ids, pricelist, product, uom=False,
product_uom_qty=0, partner_id=False, guarantee_limit=False, context=None):
""" On change of product it sets product quantity, tax account, name,
uom of product, unit price and price subtotal.
@param pricelist: Pricelist of current record.
@param product: Changed id of product.
@param uom: UoM of current record.
@param product_uom_qty: Quantity of current record.
@param partner_id: Partner of current record.
@param guarantee_limit: Guarantee limit of current record.
@return: Dictionary of values and warning message.
"""
result = {}
warning = {}
ctx = context and context.copy() or {}
ctx['uom'] = uom
if not product_uom_qty:
product_uom_qty = 1
result['product_uom_qty'] = product_uom_qty
if product:
product_obj = self.pool.get('product.product').browse(cr, uid, product, context=ctx)
if partner_id:
partner = self.pool.get('res.partner').browse(cr, uid, partner_id)
result['tax_id'] = self.pool.get('account.fiscal.position').map_tax(cr, uid, partner.property_account_position, product_obj.taxes_id, context=ctx)
result['name'] = product_obj.display_name
result['product_uom'] = product_obj.uom_id and product_obj.uom_id.id or False
if not pricelist:
warning = {
'title': _('No Pricelist!'),
'message':
_('You have to select a pricelist in the Repair form !\n'
'Please set one before choosing a product.')
}
else:
price = self.pool.get('product.pricelist').price_get(cr, uid, [pricelist],
product, product_uom_qty, partner_id, context=ctx)[pricelist]
if price is False:
warning = {
'title': _('No valid pricelist line found !'),
'message':
_("Couldn't find a pricelist line matching this product and quantity.\n"
"You have to change either the product, the quantity or the pricelist.")
}
else:
result.update({'price_unit': price, 'price_subtotal': price * product_uom_qty})
return {'value': result, 'warning': warning}
class mrp_repair_line(osv.osv, ProductChangeMixin):
_name = 'mrp.repair.line'
_description = 'Repair Line'
def _amount_line(self, cr, uid, ids, field_name, arg, context=None):
""" Calculates amount.
@param field_name: Name of field.
@param arg: Argument
@return: Dictionary of values.
"""
res = {}
tax_obj = self.pool.get('account.tax')
cur_obj = self.pool.get('res.currency')
for line in self.browse(cr, uid, ids, context=context):
if line.to_invoice:
taxes = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, line.product_uom_qty, line.product_id, line.repair_id.partner_id)
cur = line.repair_id.pricelist_id.currency_id
res[line.id] = cur_obj.round(cr, uid, cur, taxes['total'])
else:
res[line.id] = 0
return res
_columns = {
'name': fields.char('Description', required=True),
'repair_id': fields.many2one('mrp.repair', 'Repair Order Reference', ondelete='cascade', select=True),
'type': fields.selection([('add', 'Add'), ('remove', 'Remove')], 'Type', required=True),
'to_invoice': fields.boolean('To Invoice'),
'product_id': fields.many2one('product.product', 'Product', required=True),
'invoiced': fields.boolean('Invoiced', readonly=True, copy=False),
'price_unit': fields.float('Unit Price', required=True, digits_compute=dp.get_precision('Product Price')),
'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute=dp.get_precision('Account')),
'tax_id': fields.many2many('account.tax', 'repair_operation_line_tax', 'repair_operation_line_id', 'tax_id', 'Taxes'),
'product_uom_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True),
'invoice_line_id': fields.many2one('account.invoice.line', 'Invoice Line', readonly=True, copy=False),
'location_id': fields.many2one('stock.location', 'Source Location', required=True, select=True),
'location_dest_id': fields.many2one('stock.location', 'Dest. Location', required=True, select=True),
'move_id': fields.many2one('stock.move', 'Inventory Move', readonly=True, copy=False),
'lot_id': fields.many2one('stock.production.lot', 'Lot'),
'state': fields.selection([
('draft', 'Draft'),
('confirmed', 'Confirmed'),
('done', 'Done'),
('cancel', 'Cancelled')], 'Status', required=True, readonly=True, copy=False,
help=' * The \'Draft\' status is set automatically as draft when repair order in draft status. \
\n* The \'Confirmed\' status is set automatically as confirm when repair order in confirm status. \
\n* The \'Done\' status is set automatically when repair order is completed.\
\n* The \'Cancelled\' status is set automatically when user cancel repair order.'),
}
_defaults = {
'state': lambda *a: 'draft',
'product_uom_qty': lambda *a: 1,
}
def onchange_operation_type(self, cr, uid, ids, type, guarantee_limit, company_id=False, context=None):
""" On change of operation type it sets source location, destination location
and to invoice field.
@param product: Changed operation type.
@param guarantee_limit: Guarantee limit of current record.
@return: Dictionary of values.
"""
if not type:
return {'value': {
'location_id': False,
'location_dest_id': False
}}
location_obj = self.pool.get('stock.location')
warehouse_obj = self.pool.get('stock.warehouse')
location_id = location_obj.search(cr, uid, [('usage', '=', 'production')], context=context)
location_id = location_id and location_id[0] or False
if type == 'add':
# TOCHECK: Find stock location for user's company warehouse or
# repair order's company's warehouse (company_id field is added in fix of lp:831583)
args = company_id and [('company_id', '=', company_id)] or []
warehouse_ids = warehouse_obj.search(cr, uid, args, context=context)
stock_id = False
if warehouse_ids:
stock_id = warehouse_obj.browse(cr, uid, warehouse_ids[0], context=context).lot_stock_id.id
to_invoice = (guarantee_limit and datetime.strptime(guarantee_limit, '%Y-%m-%d') < datetime.now())
return {'value': {
'to_invoice': to_invoice,
'location_id': stock_id,
'location_dest_id': location_id
}}
scrap_location_ids = location_obj.search(cr, uid, [('scrap_location', '=', True)], context=context)
return {'value': {
'to_invoice': False,
'location_id': location_id,
'location_dest_id': scrap_location_ids and scrap_location_ids[0] or False,
}}
class mrp_repair_fee(osv.osv, ProductChangeMixin):
_name = 'mrp.repair.fee'
_description = 'Repair Fees Line'
def _amount_line(self, cr, uid, ids, field_name, arg, context=None):
""" Calculates amount.
@param field_name: Name of field.
@param arg: Argument
@return: Dictionary of values.
"""
res = {}
tax_obj = self.pool.get('account.tax')
cur_obj = self.pool.get('res.currency')
for line in self.browse(cr, uid, ids, context=context):
if line.to_invoice:
taxes = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, line.product_uom_qty, line.product_id, line.repair_id.partner_id)
cur = line.repair_id.pricelist_id.currency_id
res[line.id] = cur_obj.round(cr, uid, cur, taxes['total'])
else:
res[line.id] = 0
return res
_columns = {
'repair_id': fields.many2one('mrp.repair', 'Repair Order Reference', required=True, ondelete='cascade', select=True),
'name': fields.char('Description', select=True, required=True),
'product_id': fields.many2one('product.product', 'Product'),
'product_uom_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
'price_unit': fields.float('Unit Price', required=True),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True),
'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute=dp.get_precision('Account')),
'tax_id': fields.many2many('account.tax', 'repair_fee_line_tax', 'repair_fee_line_id', 'tax_id', 'Taxes'),
'invoice_line_id': fields.many2one('account.invoice.line', 'Invoice Line', readonly=True, copy=False),
'to_invoice': fields.boolean('To Invoice'),
'invoiced': fields.boolean('Invoiced', readonly=True, copy=False),
}
_defaults = {
'to_invoice': lambda *a: True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
WA4OSH/Learn_Python
|
refs/heads/master
|
oldLady.py
|
1
|
#-------------------------------------------------------------------------------
# Name: oldLady.py
# Purpose: Demo of program control, loops, branches, etc.
#
# Author: Konrad Roeder, adapted from the nusery rhyme
# There was an Old Lady song from the
# Secret History of Nursery Rhymes Book
# www.rhymes.uk/there_was_an_old_lady.htm
#
# Created: 04/16/2014
# Copyright: (cc) Konrad Roeder 2014
# Licence: CC by 4.0
#-------------------------------------------------------------------------------
#There are seven animals in this song, one for each verse
animalName = ['fly','spider','bird','cat','dog','cow','horse']
#Each verse in the song starts with this section, printing this line
def printSectionA(verse):
print("There was an old lady who swallowed a",animalName[verse-1])
#In section B, the line is different for each verse
def printSectionB(verse):
#if (verse == 1): Do nothing
if (verse == 2):
print("That wriggled and wiggled and tickled inside her")
elif (verse == 3):
print("How absurd to swallow a bird")
elif (verse == 4):
print("Fancy that to swallow a cat")
elif (verse == 5):
print("What a hog to swallow a dog")
elif (verse == 6):
print("I don't know how she swallowed a cow")
elif (verse == 7):
print("She's dead, of course!")
def printSectionC(verse):
#This section only has lines in the middle five verses
if (verse < 7):
#The for loop drops through on the first verse
#In verses 2-6, it prints one line less than the verse number
for line in range(verse-1, 0, -1):
print("She swallowed the",animalName[line],
"to catch the", animalName[line-1])
def printSectionD(verse):
#This sections exists only in the first six verses
if (verse < 7):
print("I don't know why she swallowed a fly - Perhaps she will die!")
print("")
def song():
#Print the title
print("There was an Old Lady song")
print("")
#Print each of the seven verses
for verse in range(1,8):
#Each verse has four sections
printSectionA(verse)
printSectionB(verse)
printSectionC(verse)
printSectionD(verse)
#Print the song's coda (ending)
print("")
print("There was an Old Lady song")
song()
|
cloudzfy/euler
|
refs/heads/master
|
src/27.py
|
1
|
# Euler discovered the remarkable quadratic formula:
# n^2 + n + 41
# It turns out that the formula will produce 40 primes for the consecutive integer
# values 0 <= n <= 39. However, when n = 40, 40^2 + 40 + 41 = 40 (40 + 1) + 41 is
# divisible by 41, and certainly when n = 41, 41^2 + 41 + 41 is clearly divisible
# by 41.
# The incredible formula n^2 - 79n + 1601 was discovered, which produces 80 primes
# for the consecutive values 0 <= n <= 79. The product of the coefficients, -79
# and 1601, is -126479.
# Considering quadratics of the form: n^2 + an + b, where |a| < 1000 and |b| <= 1000
# where |n| is the modulus/absolute value of n
# e.g. |11| = 11 and |-4| = 4
# Find the product of the coefficients, a and b, for the quadratic expression that
# produces the maximum number of primes for consecutive values of n, starting with
# n = 0.
def generate_primes(limit):
is_prime = [True for i in range(limit)]
for i in range(2, limit):
if is_prime[i]:
j = 2
while i * j < limit:
is_prime[i * j] = False
j += 1
return filter(lambda x: is_prime[x], range(2, limit))
primes = set(generate_primes(1000000))
b_candidates = generate_primes(1000)
def count_produced_prime(a, b):
ret = []
for n in range(0, b):
c = n * n + a * n + b
if c in primes:
ret.append(c)
else:
return len(ret)
return len(ret)
ans = 0
count = 0
for b in b_candidates:
for a in range(-999, 1000):
tmp = count_produced_prime(a, b)
if tmp > count:
ans, count = a * b, tmp
print ans
|
GhostThrone/django
|
refs/heads/master
|
tests/known_related_objects/__init__.py
|
12133432
| |
rahuldhote/edge
|
refs/heads/master
|
src/project_name/__init__.py
|
12133432
| |
26lights/PlayR
|
refs/heads/master
|
src/sphinx/ext/__init__.py
|
12133432
| |
neeraj9/hyde
|
refs/heads/master
|
hydeengine/templatetags/__init__.py
|
12133432
| |
welex91/ansible-modules-core
|
refs/heads/devel
|
packaging/os/package.py
|
117
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Ansible, inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: package
version_added: 2.0
author:
- Ansible Inc
maintainers:
- Ansible Core Team
short_description: Generic OS package manager
description:
- Installs, upgrade and removes packages using the underlying OS package manager.
options:
name:
description:
- "Package name, or package specifier with version, like C(name-1.0)."
- "Be aware that packages are not always named the same and this module will not 'translate' them per distro."
required: true
state:
description:
- Whether to install (C(present), C(latest)), or remove (C(absent)) a package.
required: true
use:
description:
- The required package manager module to use (yum, apt, etc). The default 'auto' will use existing facts or try to autodetect it.
- You should only use this field if the automatic selection is not working for some reason.
required: false
default: auto
requirements:
- Whatever is required for the package plugins specific for each system.
notes:
- This module actually calls the pertinent package modules for each system (apt, yum, etc).
'''
EXAMPLES = '''
- name: install the latest version of ntpdate
package: name=ntpdate state=latest
# This uses a variable as this changes per distribution.
- name: remove the apache package
package : name={{apache}} state=absent
'''
|
wyg3958/django-cms
|
refs/heads/develop
|
cms/models/placeholdermodel.py
|
29
|
# -*- coding: utf-8 -*-
from cms.utils.compat import DJANGO_1_7
from django.contrib import admin
from django.contrib.auth import get_permission_codename
from django.db import models
from django.template.defaultfilters import title
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from cms.exceptions import LanguageError
from cms.utils.helpers import reversion_register
from cms.utils.i18n import get_language_object
from cms.utils.placeholder import PlaceholderNoAction, get_placeholder_conf
from cms.utils.urlutils import admin_reverse
@python_2_unicode_compatible
class Placeholder(models.Model):
"""
Attributes:
is_static Set to "True" for static placeholders by the template tag
is_editable If False the content of the placeholder is not editable in the frontend
"""
slot = models.CharField(_("slot"), max_length=255, db_index=True, editable=False)
default_width = models.PositiveSmallIntegerField(_("width"), null=True, editable=False)
cache_placeholder = True
is_static = False
is_editable = True
class Meta:
app_label = 'cms'
permissions = (
(u"use_structure", u"Can use Structure mode"),
)
def __str__(self):
return self.slot
def clear(self, language=None):
if language:
qs = self.cmsplugin_set.filter(language=language)
else:
qs = self.cmsplugin_set.all()
qs = qs.order_by('-depth').select_related()
for plugin in qs:
inst, cls = plugin.get_plugin_instance()
if inst and getattr(inst, 'cmsplugin_ptr', False):
inst.cmsplugin_ptr._no_reorder = True
inst._no_reorder = True
inst.delete(no_mp=True)
else:
plugin._no_reorder = True
plugin.delete(no_mp=True)
def get_label(self):
name = get_placeholder_conf("name", self.slot, default=title(self.slot))
name = _(name)
return name
def get_add_url(self):
return self._get_url('add_plugin')
def get_edit_url(self, plugin_pk):
return self._get_url('edit_plugin', plugin_pk)
def get_move_url(self):
return self._get_url('move_plugin')
def get_delete_url(self, plugin_pk):
return self._get_url('delete_plugin', plugin_pk)
def get_changelist_url(self):
return self._get_url('changelist')
def get_clear_url(self):
return self._get_url('clear_placeholder', self.pk)
def get_copy_url(self):
return self._get_url('copy_plugins')
def get_extra_menu_items(self):
from cms.plugin_pool import plugin_pool
return plugin_pool.get_extra_placeholder_menu_items(self)
def _get_url(self, key, pk=None):
model = self._get_attached_model()
args = []
if pk:
args.append(pk)
if not model:
return admin_reverse('cms_page_%s' % key, args=args)
else:
app_label = model._meta.app_label
model_name = model.__name__.lower()
return admin_reverse('%s_%s_%s' % (app_label, model_name, key), args=args)
def _get_permission(self, request, key):
"""
Generic method to check the permissions for a request for a given key,
the key can be: 'add', 'change' or 'delete'. For each attached object
permission has to be granted either on attached model or on attached object.
* 'add' and 'change' permissions on placeholder need either on add or change
permission on attached object to be granted.
* 'delete' need either on add, change or delete
"""
if getattr(request, 'user', None) and request.user.is_superuser:
return True
perm_keys = {
'add': ('add', 'change',),
'change': ('add', 'change',),
'delete': ('add', 'change', 'delete'),
}
if key not in perm_keys:
raise Exception("%s is not a valid perm key. "
"'Only 'add', 'change' and 'delete' are allowed" % key)
objects = [self.page] if self.page else self._get_attached_objects()
obj_perm = None
for obj in objects:
obj_perm = False
for key in perm_keys[key]:
if self._get_object_permission(obj, request, key):
obj_perm = True
break
if not obj_perm:
return False
return obj_perm
def _get_object_permission(self, obj, request, key):
if not getattr(request, 'user', None):
return False
opts = obj._meta
perm_code = '%s.%s' % (opts.app_label, get_permission_codename(key, opts))
return request.user.has_perm(perm_code) or request.user.has_perm(perm_code, obj)
def has_change_permission(self, request):
return self._get_permission(request, 'change')
def has_add_permission(self, request):
return self._get_permission(request, 'add')
def has_delete_permission(self, request):
return self._get_permission(request, 'delete')
def render(self, context, width, lang=None, editable=True, use_cache=True):
'''
Set editable = False to disable front-end rendering for this render.
'''
from cms.plugin_rendering import render_placeholder
if not 'request' in context:
return '<!-- missing request -->'
width = width or self.default_width
if width:
context['width'] = width
return render_placeholder(self, context, lang=lang, editable=editable,
use_cache=use_cache)
def _get_attached_fields(self):
"""
Returns an ITERATOR of all non-cmsplugin reverse foreign key related fields.
"""
from cms.models import CMSPlugin
if not hasattr(self, '_attached_fields_cache'):
self._attached_fields_cache = []
for rel in self._meta.get_all_related_objects():
if issubclass(rel.model, CMSPlugin):
continue
from cms.admin.placeholderadmin import PlaceholderAdminMixin
if DJANGO_1_7:
parent = rel.model
else:
parent = rel.related_model
if parent in admin.site._registry and isinstance(admin.site._registry[parent], PlaceholderAdminMixin):
field = getattr(self, rel.get_accessor_name())
try:
if field.count():
self._attached_fields_cache.append(rel.field)
except:
pass
return self._attached_fields_cache
def _get_attached_field(self):
from cms.models import CMSPlugin, StaticPlaceholder, Page
if not hasattr(self, '_attached_field_cache'):
self._attached_field_cache = None
relations = self._meta.get_all_related_objects()
for rel in relations:
if DJANGO_1_7:
parent = rel.model
else:
parent = rel.related_model
if parent == Page or parent == StaticPlaceholder:
relations.insert(0, relations.pop(relations.index(rel)))
for rel in relations:
if issubclass(rel.model, CMSPlugin):
continue
from cms.admin.placeholderadmin import PlaceholderAdminMixin
if DJANGO_1_7:
parent = rel.model
else:
parent = rel.related_model
if parent in admin.site._registry and isinstance(admin.site._registry[parent], PlaceholderAdminMixin):
field = getattr(self, rel.get_accessor_name())
try:
if field.count():
self._attached_field_cache = rel.field
break
except:
pass
return self._attached_field_cache
def _get_attached_field_name(self):
field = self._get_attached_field()
if field:
return field.name
return None
def _get_attached_model(self):
if hasattr(self, '_attached_model_cache'):
return self._attached_model_cache
if self.page or self.page_set.all().count():
from cms.models import Page
self._attached_model_cache = Page
return Page
field = self._get_attached_field()
if field:
self._attached_model_cache = field.model
return field.model
self._attached_model_cache = None
return None
def _get_attached_models(self):
"""
Returns a list of models of attached to this placeholder.
"""
if hasattr(self, '_attached_models_cache'):
return self._attached_models_cache
self._attached_models_cache = [field.model for field in self._get_attached_fields()]
return self._attached_models_cache
def _get_attached_objects(self):
"""
Returns a list of objects attached to this placeholder.
"""
return [obj for field in self._get_attached_fields()
for obj in getattr(self, field.related.get_accessor_name()).all()]
def page_getter(self):
if not hasattr(self, '_page'):
from cms.models.pagemodel import Page
try:
self._page = Page.objects.get(placeholders=self)
except (Page.DoesNotExist, Page.MultipleObjectsReturned,):
self._page = None
return self._page
def page_setter(self, value):
self._page = value
page = property(page_getter, page_setter)
def get_plugins_list(self, language=None):
return list(self.get_plugins(language))
def get_plugins(self, language=None):
if language:
return self.cmsplugin_set.filter(language=language).order_by('path')
else:
return self.cmsplugin_set.all().order_by('path')
def get_filled_languages(self):
"""
Returns language objects for every language for which the placeholder
has plugins.
This is not cached as it's meant to eb used in the frontend editor.
"""
languages = []
for lang_code in set(self.get_plugins().values_list('language', flat=True)):
try:
languages.append(get_language_object(lang_code))
except LanguageError:
pass
return languages
def get_cached_plugins(self):
return getattr(self, '_plugins_cache', [])
@property
def actions(self):
if not hasattr(self, '_actions_cache'):
field = self._get_attached_field()
self._actions_cache = getattr(field, 'actions', PlaceholderNoAction())
return self._actions_cache
reversion_register(Placeholder) # follow=["cmsplugin_set"] not following plugins since they are a spechial case
|
popazerty/e2-gui
|
refs/heads/master
|
lib/python/Components/Converter/RemainingToText.py
|
6
|
from Components.Converter.Converter import Converter
from Poll import Poll
from Components.Element import cached
from Components.config import config
class RemainingToText(Poll, Converter, object):
DEFAULT = 0
WITH_SECONDS = 2
NO_SECONDS = 2
IN_SECONDS = 3
PERCENTAGE = 4
VFD = 5
VFD_WITH_SECONDS = 6
VFD_NO_SECONDS = 7
VFD_IN_SECONDS = 8
VFD_PERCENTAGE = 9
def __init__(self, type):
Poll.__init__(self)
Converter.__init__(self, type)
if type == "WithSeconds":
self.type = self.WITH_SECONDS
self.poll_interval = 1000
self.poll_enabled = True
elif type == "NoSeconds":
self.type = self.NO_SECONDS
self.poll_interval = 60*1000
self.poll_enabled = True
elif type == "InSeconds":
self.type = self.IN_SECONDS
self.poll_interval = 1000
self.poll_enabled = True
elif type == "Percentage":
self.type = self.PERCENTAGE
self.poll_interval = 60*1000
self.poll_enabled = True
elif type == "VFD":
self.type = self.VFD
elif type == "VFDWithSeconds":
self.type = self.VFD_WITH_SECONDS
self.poll_interval = 1000
self.poll_enabled = True
elif type == "VFDNoSeconds":
self.type = self.VFD_NO_SECONDS
self.poll_interval = 60*1000
self.poll_enabled = True
elif type == "VFDInSeconds":
self.type = self.VFD_IN_SECONDS
self.poll_interval = 1000
self.poll_enabled = True
elif type == "VFDPercentage":
self.type = self.VFD_PERCENTAGE
self.poll_interval = 60*1000
self.poll_enabled = True
else:
self.type = self.DEFAULT
if config.usage.swap_time_display_on_osd.value == "1" or config.usage.swap_time_display_on_osd.value == "3" or config.usage.swap_time_display_on_osd.value == "5" or config.usage.swap_time_display_on_vfd.value == "1" or config.usage.swap_time_display_on_vfd.value == "3" or config.usage.swap_time_display_on_vfd.value == "5":
self.poll_interval = 60*1000
self.poll_enabled = True
if config.usage.swap_time_display_on_osd.value == "2" or config.usage.swap_time_display_on_osd.value == "4" or config.usage.swap_time_display_on_vfd.value == "2" or config.usage.swap_time_display_on_vfd.value == "4":
self.poll_interval = 1000
self.poll_enabled = True
@cached
def getText(self):
time = self.source.time
if time is None:
return ""
duration = 0
elapsed = 0
remaining = 0
if str(time[1]) != 'None':
if self.type < 5:
if config.usage.swap_time_remaining_on_osd.value == "0":
(duration, remaining) = self.source.time
elif config.usage.swap_time_remaining_on_osd.value == "1":
(duration, elapsed) = self.source.time
elif config.usage.swap_time_remaining_on_osd.value == "2":
(duration, elapsed, remaining) = self.source.time
elif config.usage.swap_time_remaining_on_osd.value == "3":
(duration, remaining, elapsed) = self.source.time
else:
if config.usage.swap_time_remaining_on_vfd.value == "0":
(duration, remaining) = self.source.time
elif config.usage.swap_time_remaining_on_vfd.value == "1":
(duration, elapsed) = self.source.time
elif config.usage.swap_time_remaining_on_vfd.value == "2":
(duration, elapsed, remaining) = self.source.time
elif config.usage.swap_time_remaining_on_vfd.value == "3":
(duration, remaining, elapsed) = self.source.time
else:
(duration, remaining) = self.source.time
l = duration # Length
p = elapsed # Position
r = remaining # Remaining
sign_l = ""
if self.type < 5:
if config.usage.elapsed_time_positive_osd.value:
sign_p = "+"
sign_r = "-"
else:
sign_p = "-"
sign_r = "+"
if config.usage.swap_time_display_on_osd.value == "1":
if remaining is not None:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
return sign_p + "%d " % (p/60) + sign_r + ngettext("%d Min", "%d Mins", (r/60)) % (r/60)
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return sign_r + "%d " % (r/60) + sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
else:
return sign_r + ngettext(_("%d Min"), _("%d Mins"), (r/60)) % (r/60)
else:
return ngettext(_("%d Min"), _("%d Mins"), (l/60)) % (l/60)
elif config.usage.swap_time_display_on_osd.value == "2":
if remaining is not None:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/60, p%60)
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d " % (p/60, p%60) + sign_r + "%d:%02d" % (r/60, r%60)
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d " % (r/60, r%60) + sign_p + "%d:%02d" % (p/60, p%60)
else:
return sign_r + "%d:%02d" % (r/60, r%60)
else:
return "%d:%02d" % (l/60, l%60)
elif config.usage.swap_time_display_on_osd.value == "3":
if remaining is not None:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/3600, p%3600/60)
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d " % (p/3600, p%3600/60) + sign_r + "%d:%02d" % (r/3600, r%3600/60)
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d " % (r/3600, r%3600/60) + sign_p + "%d:%02d" % (p/3600, p%3600/60)
else:
return sign_r + "%d:%02d" % (r/3600, r%3600/60)
else:
return sign_l + "%d:%02d" % (l/3600, l%3600/60)
elif config.usage.swap_time_display_on_osd.value == "4":
if remaining is not None:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d:%02d " % (p/3600, p%3600/60, p%60) + sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d:%02d " % (r/3600, r%3600/60, r%60) + sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
else:
return sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
else:
return sign_l + "%d:%02d:%02d" % (l/3600, l%3600/60, l%60)
elif config.usage.swap_time_display_on_osd.value == "5":
if remaining is not None:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
try:
return sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
try:
return sign_p + "%d%% " % ((float(p + 0.0) / float(l + 0.0)) * 100) + sign_r + "%d%%" % ((float(r + 0.0) / float(l + 0.0)) * 100 + 1)
except:
return ""
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
try:
return sign_r + "%d%% " % ((float(r + 0.0) / float(l + 0.0)) * 100 +1 ) + sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
else:
try:
return sign_r + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
else:
return sign_l + "%d:%02d:%02d" % (l/3600, l%3600/60, l%60)
else:
if self.type == self.DEFAULT:
if remaining is not None:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
return sign_p + "%d " % (p/60) + sign_r + ngettext("%d Min", "%d Mins", (r/60)) % (r/60)
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return sign_r + "%d " % (r/60) + sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
else:
return sign_r + ngettext(_("%d Min"), _("%d Mins"), (r/60)) % (r/60)
else:
return ngettext(_("%d Min"), _("%d Mins"), (l/60)) % (l/60)
elif self.type == self.WITH_SECONDS:
if remaining is not None:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d:%02d " % (p/3600, p%3600/60, p%60) + sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d:%02d " % (r/3600, r%3600/60, r%60) + sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
else:
return sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
else:
return sign_l + "%d:%02d:%02d" % (l/3600, l%3600/60, l%60)
elif self.type == self.NO_SECONDS:
if remaining is not None:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/3600, p%3600/60)
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d " % (p/3600, p%3600/60) + sign_r + "%d:%02d" % (r/3600, r%3600/60)
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d " % (r/3600, r%3600/60) + sign_p + "%d:%02d" % (p/3600, p%3600/60)
else:
return sign_r + "%d:%02d" % (r/3600, r%3600/60)
else:
return sign_l + "%d:%02d" % (l/3600, l%3600/60)
elif self.type == self.IN_SECONDS:
if remaining is not None:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + "%d " % p
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
return sign_p + "%d " % p + sign_r + "%d " % r
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return sign_r + "%d " % r + sign_p + "%d " % p
else:
return sign_r + "%d " % r
else:
return "%d " % l + _("Mins")
elif self.type == self.PERCENTAGE:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
try:
return sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
try:
return sign_p + "%d%% " % ((float(p + 0.0) / float(l + 0.0)) * 100) + sign_r + "%d%%" % ((float(r + 0.0) / float(l + 0.0)) * 100 + 1)
except:
return ""
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
try:
return sign_r + "%d%% " % ((float(r + 0.0) / float(l + 0.0)) * 100 +1 ) + sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
else:
try:
return sign_r + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
else:
return sign_l + "%d" % l
else:
if config.usage.elapsed_time_positive_vfd.value:
sign_p = "+"
sign_r = "-"
else:
sign_p = "-"
sign_r = "+"
if config.usage.swap_time_display_on_vfd.value == "1":
if remaining is not None:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
return sign_p + "%d " % (p/60) + sign_r + ngettext("%d Min", "%d Mins", (r/60)) % (r/60)
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return sign_r + "%d " % (r/60) + sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
else:
return sign_r + ngettext(_("%d Min"), _("%d Mins"), (r/60)) % (r/60)
else:
return ngettext(_("%d Min"), _("%d Mins"), (l/60)) % (l/60)
elif config.usage.swap_time_display_on_vfd.value == "2":
if remaining is not None:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/60, p%60)
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d " % (p/60, p%60) + sign_r + "%d:%02d" % (r/60, r%60)
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d " % (r/60, r%60) + sign_p + "%d:%02d" % (p/60, p%60)
else:
return sign_r + "%d:%02d" % (r/60, r%60)
else:
return "%d:%02d" % (l/60, l%60)
elif config.usage.swap_time_display_on_vfd.value == "3":
if remaining is not None:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/3600, p%3600/60)
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d " % (p/3600, p%3600/60) + sign_r + "%d:%02d" % (r/3600, r%3600/60)
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d " % (r/3600, r%3600/60) + sign_p + "%d:%02d" % (p/3600, p%3600/60)
else:
return sign_r + "%d:%02d" % (r/3600, r%3600/60)
else:
return sign_l + "%d:%02d" % (l/3600, l%3600/60)
elif config.usage.swap_time_display_on_vfd.value == "4":
if remaining is not None:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d:%02d " % (p/3600, p%3600/60, p%60) + sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d:%02d " % (r/3600, r%3600/60, r%60) + sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
else:
return sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
else:
return sign_l + "%d:%02d:%02d" % (l/3600, l%3600/60, l%60)
elif config.usage.swap_time_display_on_vfd.value == "5":
if remaining is not None:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
try:
return sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
try:
return sign_p + "%d%% " % ((float(p + 0.0) / float(l + 0.0)) * 100) + sign_r + "%d%%" % ((float(r + 0.0) / float(l + 0.0)) * 100 + 1)
except:
return ""
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
try:
return sign_r + "%d%% " % ((float(r + 0.0) / float(l + 0.0)) * 100 +1 ) + sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
else:
try:
return sign_r + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
else:
return sign_l + "%d:%02d:%02d" % (l/3600, l%3600/60, l%60)
else:
if self.type == self.VFD:
if remaining is not None:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
return sign_p + "%d " % (p/60) + sign_r + ngettext("%d Min", "%d Mins", (r/60)) % (r/60)
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return sign_r + "%d " % (r/60) + sign_p + ngettext("%d Min", "%d Mins", (p/60)) % (p/60)
else:
return sign_r + ngettext(_("%d Min"), _("%d Mins"), (r/60)) % (r/60)
else:
return ngettext(_("%d Min"), _("%d Mins"), (l/60)) % (l/60)
elif self.type == self.VFD_WITH_SECONDS:
if remaining is not None:
if config.usage.swap_time_remaining_on_osd.value == "1": # Elapsed
return sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
elif config.usage.swap_time_remaining_on_osd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d:%02d " % (p/3600, p%3600/60, p%60) + sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
elif config.usage.swap_time_remaining_on_osd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d:%02d " % (r/3600, r%3600/60, r%60) + sign_p + "%d:%02d:%02d" % (p/3600, p%3600/60, p%60)
else:
return sign_r + "%d:%02d:%02d" % (r/3600, r%3600/60, r%60)
else:
return sign_l + "%d:%02d:%02d" % (l/3600, l%3600/60, l%60)
elif self.type == self.VFD_NO_SECONDS:
if remaining is not None:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + "%d:%02d" % (p/3600, p%3600/60)
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
return sign_p + "%d:%02d " % (p/3600, p%3600/60) + sign_r + "%d:%02d" % (r/3600, r%3600/60)
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return sign_r + "%d:%02d " % (r/3600, r%3600/60) + sign_p + "%d:%02d" % (p/3600, p%3600/60)
else:
return sign_r + "%d:%02d" % (r/3600, r%3600/60)
else:
return sign_l + "%d:%02d" % (l/3600, l%3600/60)
elif self.type == self.VFD_IN_SECONDS:
if remaining is not None:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
return sign_p + "%d " % p
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
return sign_p + "%d " % p + sign_r + "%d " % r
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
return sign_r + "%d " % r + sign_p + "%d " % p
else:
return sign_r + "%d " % r
else:
return "%d " % l + _("Mins")
elif self.type == self.VFD_PERCENTAGE:
if config.usage.swap_time_remaining_on_vfd.value == "1": # Elapsed
try:
return sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
elif config.usage.swap_time_remaining_on_vfd.value == "2": # Elapsed & Remaining
try:
return sign_p + "%d%% " % ((float(p + 0.0) / float(l + 0.0)) * 100) + sign_r + "%d%%" % ((float(r + 0.0) / float(l + 0.0)) * 100 + 1)
except:
return ""
elif config.usage.swap_time_remaining_on_vfd.value == "3": # Remaining & Elapsed
try:
return sign_r + "%d%% " % ((float(r + 0.0) / float(l + 0.0)) * 100 +1 ) + sign_p + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
else:
try:
return sign_r + "%d%%" % ((float(p + 0.0) / float(l + 0.0)) * 100)
except:
return ""
else:
return sign_l + "%d" % l
text = property(getText)
|
Fusion-Rom/android_external_skia
|
refs/heads/lp5.1
|
platform_tools/android/gyp_gen/gypd_parser.py
|
144
|
#!/usr/bin/python
# Copyright 2014 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Functions for parsing the gypd output from gyp.
"""
import os
def parse_dictionary(var_dict, d, current_target_name, dest_dir):
"""Helper function to get the meaningful entries in a dictionary.
Parse dictionary d, and store unique relevant entries in var_dict.
Recursively parses internal dictionaries and files that are referenced.
When parsing the 'libraries' list from gyp, entries in the form
'-l<name>' get assigned to var_dict.LOCAL_SHARED_LIBRARIES as 'lib<name>',
and entries in the form '[lib]<name>.a' get assigned to
var_dict.LOCAL_STATIC_LIBRARIES as 'lib<name>'.
Args:
var_dict: VarsDict object for storing the results of the parsing.
d: Dictionary object to parse.
current_target_name: The current target being parsed. If this dictionary
is a target, this will be its entry 'target_name'. Otherwise, this will
be the name of the target which contains this dictionary.
dest_dir: Destination for the eventual Android.mk that will be created from
this parse, relative to Skia trunk. Used to determine path for source
files.
"""
for source in d.get('sources', []):
# Compare against a lowercase version, in case files are named .H or .GYPI
lowercase_source = source.lower()
if lowercase_source.endswith('.h'):
# Android.mk does not need the header files.
continue
if lowercase_source.endswith('gypi'):
# The gypi files are included in sources, but the sources they included
# are also included. No need to parse them again.
continue
# The path is relative to the gyp folder, but Android wants the path
# relative to dest_dir.
rel_source = os.path.relpath(source, os.pardir)
rel_source = os.path.relpath(rel_source, dest_dir)
var_dict.LOCAL_SRC_FILES.add(rel_source)
for lib in d.get('libraries', []):
if lib.endswith('.a'):
# Remove the '.a'
lib = lib[:-2]
# Add 'lib', if necessary
if not lib.startswith('lib'):
lib = 'lib' + lib
var_dict.LOCAL_STATIC_LIBRARIES.add(lib)
else:
# lib will be in the form of '-l<name>'. Change it to 'lib<name>'
lib = lib.replace('-l', 'lib', 1)
var_dict.LOCAL_SHARED_LIBRARIES.add(lib)
for dependency in d.get('dependencies', []):
# Each dependency is listed as
# <path_to_file>:<target>#target
li = dependency.split(':')
assert(len(li) <= 2 and len(li) >= 1)
sub_targets = []
if len(li) == 2 and li[1] != '*':
sub_targets.append(li[1].split('#')[0])
sub_path = li[0]
assert(sub_path.endswith('.gyp'))
# Although the original reference is to a .gyp, parse the corresponding
# gypd file, which was constructed by gyp.
sub_path = sub_path + 'd'
parse_gypd(var_dict, sub_path, dest_dir, sub_targets)
if 'default_configuration' in d:
config_name = d['default_configuration']
# default_configuration is meaningless without configurations
assert('configurations' in d)
config = d['configurations'][config_name]
parse_dictionary(var_dict, config, current_target_name, dest_dir)
for flag in d.get('cflags', []):
var_dict.LOCAL_CFLAGS.add(flag)
for flag in d.get('cflags_cc', []):
var_dict.LOCAL_CPPFLAGS.add(flag)
for include in d.get('include_dirs', []):
if include.startswith('external'):
# This path is relative to the Android root. Leave it alone.
rel_include = include
else:
# As with source, the input path will be relative to gyp/, but Android
# wants relative to dest_dir.
rel_include = os.path.relpath(include, os.pardir)
rel_include = os.path.relpath(rel_include, dest_dir)
# No need to include the base directory.
if rel_include is os.curdir:
continue
rel_include = os.path.join('$(LOCAL_PATH)', rel_include)
# Remove a trailing slash, if present.
if rel_include.endswith('/'):
rel_include = rel_include[:-1]
var_dict.LOCAL_C_INCLUDES.add(rel_include)
# For the top level, libskia, include directories should be exported.
# FIXME (scroggo): Do not hard code this.
if current_target_name == 'libskia':
var_dict.LOCAL_EXPORT_C_INCLUDE_DIRS.add(rel_include)
for define in d.get('defines', []):
var_dict.DEFINES.add(define)
def parse_gypd(var_dict, path, dest_dir, desired_targets=None):
"""Parse a gypd file.
Open a file that consists of python dictionaries representing build targets.
Parse those dictionaries using parse_dictionary. Recursively parses
referenced files.
Args:
var_dict: VarsDict object for storing the result of the parse.
path: Path to gypd file.
dest_dir: Destination for the eventual Android.mk that will be created from
this parse, relative to Skia trunk. Used to determine path for source
files and include directories.
desired_targets: List of targets to be parsed from this file. If empty,
parse all targets.
"""
d = {}
with open(path, 'r') as f:
# Read the entire file as a dictionary
d = eval(f.read())
# The gypd file is structured such that the top level dictionary has an entry
# named 'targets'
for target in d['targets']:
target_name = target['target_name']
if target_name in var_dict.KNOWN_TARGETS:
# Avoid circular dependencies
continue
if desired_targets and target_name not in desired_targets:
# Our caller does not depend on this one
continue
# Add it to our known targets so we don't parse it again
var_dict.KNOWN_TARGETS.add(target_name)
parse_dictionary(var_dict, target, target_name, dest_dir)
|
kumar303/olympia
|
refs/heads/master
|
src/olympia/stats/utils.py
|
6
|
from olympia.stats.models import ThemeUpdateCount, UpdateCount
def migrate_theme_update_count(lwt, static_theme, **kw):
"""Create UpdateCount instances from ThemeUpdateCount instances.
By default all instances for the specified lwt (lightweight theme) are
copied. Any additional **kw are passed to the filter to - for example to
limit to a certain day or day range."""
theme_update_counts = ThemeUpdateCount.objects.filter(
addon_id=lwt.id, **kw).iterator()
update_counts = [
UpdateCount(addon_id=static_theme.id, date=tuc.date, count=tuc.count)
for tuc in theme_update_counts
]
UpdateCount.objects.bulk_create(update_counts, 100)
|
CoDEmanX/ArangoDB
|
refs/heads/devel
|
3rdParty/V8-4.3.61/third_party/python_26/Lib/test/test_mimetypes.py
|
77
|
import mimetypes
import StringIO
import unittest
from test import test_support
# Tell it we don't know about external files:
mimetypes.knownfiles = []
mimetypes.inited = False
mimetypes._default_mime_types()
class MimeTypesTestCase(unittest.TestCase):
def setUp(self):
self.db = mimetypes.MimeTypes()
def test_default_data(self):
eq = self.assertEqual
eq(self.db.guess_type("foo.html"), ("text/html", None))
eq(self.db.guess_type("foo.tgz"), ("application/x-tar", "gzip"))
eq(self.db.guess_type("foo.tar.gz"), ("application/x-tar", "gzip"))
eq(self.db.guess_type("foo.tar.Z"), ("application/x-tar", "compress"))
def test_data_urls(self):
eq = self.assertEqual
guess_type = self.db.guess_type
eq(guess_type("data:,thisIsTextPlain"), ("text/plain", None))
eq(guess_type("data:;base64,thisIsTextPlain"), ("text/plain", None))
eq(guess_type("data:text/x-foo,thisIsTextXFoo"), ("text/x-foo", None))
def test_file_parsing(self):
eq = self.assertEqual
sio = StringIO.StringIO("x-application/x-unittest pyunit\n")
self.db.readfp(sio)
eq(self.db.guess_type("foo.pyunit"),
("x-application/x-unittest", None))
eq(self.db.guess_extension("x-application/x-unittest"), ".pyunit")
def test_non_standard_types(self):
eq = self.assertEqual
# First try strict
eq(self.db.guess_type('foo.xul', strict=True), (None, None))
eq(self.db.guess_extension('image/jpg', strict=True), None)
# And then non-strict
eq(self.db.guess_type('foo.xul', strict=False), ('text/xul', None))
eq(self.db.guess_extension('image/jpg', strict=False), '.jpg')
def test_guess_all_types(self):
eq = self.assertEqual
unless = self.failUnless
# First try strict. Use a set here for testing the results because if
# test_urllib2 is run before test_mimetypes, global state is modified
# such that the 'all' set will have more items in it.
all = set(self.db.guess_all_extensions('text/plain', strict=True))
unless(all >= set(['.bat', '.c', '.h', '.ksh', '.pl', '.txt']))
# And now non-strict
all = self.db.guess_all_extensions('image/jpg', strict=False)
all.sort()
eq(all, ['.jpg'])
# And now for no hits
all = self.db.guess_all_extensions('image/jpg', strict=True)
eq(all, [])
def test_main():
test_support.run_unittest(MimeTypesTestCase)
if __name__ == "__main__":
test_main()
|
joequery/django
|
refs/heads/master
|
django/conf/locale/fr/formats.py
|
504
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'j N Y'
SHORT_DATETIME_FORMAT = 'j N Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
'%d.%m.%Y', '%d.%m.%y', # Swiss [fr_CH), '25.10.2006', '25.10.06'
# '%d %B %Y', '%d %b %Y', # '25 octobre 2006', '25 oct. 2006'
]
DATETIME_INPUT_FORMATS = [
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M:%S.%f', # '25/10/2006 14:30:59.000200'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d.%m.%Y %H:%M:%S', # Swiss [fr_CH), '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # Swiss (fr_CH), '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # Swiss (fr_CH), '25.10.2006 14:30'
'%d.%m.%Y', # Swiss (fr_CH), '25.10.2006'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
|
hiepcm/update-script
|
refs/heads/master
|
tests/ltsi-3.10/controller/common/tcp_check.py
|
6
|
#!/usr/bin/python
import errno
import getopt
import os
import subprocess
import sys
import time
import signal
def try_kill (proc):
try:
proc.kill()
except OSError, e:
if e.errno != errno.ESRCH:
print >>sys.stderr, 'error: kill failed:', e
return False
return True
verbose = False
def err (str):
print >>sys.stderr, "error: %s" % str
class Test:
def __init__(self, board_hostname, board_username, ip_addr, target, log_file):
self.board_hostname = board_hostname
self.board_username = board_username
self.ip_addr = ip_addr
self.target = target
self.log_file = log_file
# COMMON FUNCTION:
def board_cmd_args(self, cmd):
return ['ssh',self.board_username + '@' + self.board_hostname ] + cmd
def prepare_cmd(self, cmd):
return [ ' '.join(cmd) ]
def command(self):
if self.target == "BOARD":
cmd = ['iperf','-c', self.ip_addr, '>', '/dev/null' ]
elif self.target == "PC":
cmd = ['iperf','-c', self.ip_addr, '>', '/dev/null' ]
cmd = self.board_cmd_args(cmd)
return self.prepare_cmd(cmd)
# TCP CHECK:
#Case of transfer messages from board to PC:
def transfer_mesg_to_pc(self, cmd):
#Receive on pc:
receive = subprocess.Popen([ 'iperf', '-s' ], stdout=subprocess.PIPE)
time.sleep(2)
if not receive:
print "Not ready to receive message"
return False
#Send from board:
send = subprocess.call( cmd, shell=True)
if send:
print "Send messages failed"
return False
time.sleep(2)
if not try_kill(receive):
print "Kill Failed"
return False
output = receive.communicate()[0]
if output:
f = open(self.log_file, 'w')
f.write(str(output))
f.close()
return output
#------------------------------------------
#Case of transfer messages from PC to board:
def transfer_mesg_to_board(self, cmd):
#Receive on board:
rev_cmd = ['iperf', '-s']
rev_cmd = self.board_cmd_args(self.prepare_cmd(rev_cmd))
receive = subprocess.Popen(rev_cmd, stdout=subprocess.PIPE)
time.sleep(2)
#exp: 'ssh', 'root@armadillo800','iperf -s'#
if not receive:
print "Not ready to receive message"
return False
#Send from board:
send = subprocess.call(cmd, shell=True)
if send:
print "Send messages failed"
return False
time.sleep(2)
if not try_kill(receive):
print "Kill Failed"
return False
output = receive.communicate()[0]
if output:
f = open(self.log_file, 'w')
f.write(str(output))
f.close()
return True
#=======================================================
#Running program:
def run(self):
result = True
if self.target == "BOARD":
out = self.transfer_mesg_to_board(self.command())
elif self.target == "PC":
out = self.transfer_mesg_to_pc(self.command())
else:
print "Unknown Target."
result = False
if not out:
result = False
return result
if len(sys.argv) < 1:
err("Too few arguments\n")
try:
opts, args = getopt.getopt(sys.argv[1:], "hv", [])
except getopt.GetoptError:
err("Unknown arguments\n")
if len(sys.argv) < 5:
err("Too few arguments\n")
for opt, arg in opts:
if opt == '-h':
usage();
if opt == '-v':
verbose = True
test = Test(*args)
retval = test.run()
if retval == False:
exit(1)
|
hynekcer/django
|
refs/heads/master
|
tests/admin_views/__init__.py
|
12133432
| |
myvoice-nigeria/myvoice
|
refs/heads/develop
|
myvoice/clinics/management/__init__.py
|
12133432
| |
bangoocms/bangoo
|
refs/heads/master
|
bangoo/theming/management/__init__.py
|
12133432
| |
LINKIWI/linkr
|
refs/heads/master
|
test/backend/__init__.py
|
12133432
| |
gsehub/edx-platform
|
refs/heads/gsehub-release
|
lms/djangoapps/instructor/tests/test_services.py
|
19
|
"""
Tests for the InstructorService
"""
import json
import mock
from nose.plugins.attrib import attr
from courseware.models import StudentModule
from lms.djangoapps.instructor.access import allow_access
from lms.djangoapps.instructor.services import InstructorService
from lms.djangoapps.instructor.tests.test_tools import msk_from_problem_urlname
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@attr(shard=1)
class InstructorServiceTests(SharedModuleStoreTestCase):
"""
Tests for the InstructorService
"""
@classmethod
def setUpClass(cls):
super(InstructorServiceTests, cls).setUpClass()
cls.course = CourseFactory.create()
cls.problem_location = msk_from_problem_urlname(
cls.course.id,
'robot-some-problem-urlname'
)
cls.other_problem_location = msk_from_problem_urlname(
cls.course.id,
'robot-some-other_problem-urlname'
)
cls.problem_urlname = unicode(cls.problem_location)
cls.other_problem_urlname = unicode(cls.other_problem_location)
def setUp(self):
super(InstructorServiceTests, self).setUp()
self.student = UserFactory()
CourseEnrollment.enroll(self.student, self.course.id)
self.service = InstructorService()
self.module_to_reset = StudentModule.objects.create(
student=self.student,
course_id=self.course.id,
module_state_key=self.problem_location,
state=json.dumps({'attempts': 2}),
)
@mock.patch('lms.djangoapps.grades.signals.handlers.PROBLEM_WEIGHTED_SCORE_CHANGED.send')
def test_reset_student_attempts_delete(self, _mock_signal):
"""
Test delete student state.
"""
# make sure the attempt is there
self.assertEqual(
StudentModule.objects.filter(
student=self.module_to_reset.student,
course_id=self.course.id,
module_state_key=self.module_to_reset.module_state_key,
).count(),
1
)
self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
self.problem_urlname,
requesting_user=self.student,
)
# make sure the module has been deleted
self.assertEqual(
StudentModule.objects.filter(
student=self.module_to_reset.student,
course_id=self.course.id,
module_state_key=self.module_to_reset.module_state_key,
).count(),
0
)
def test_reset_bad_content_id(self):
"""
Negative test of trying to reset attempts with bad content_id
"""
result = self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
'foo/bar/baz',
requesting_user=self.student,
)
self.assertIsNone(result)
def test_reset_bad_user(self):
"""
Negative test of trying to reset attempts with bad user identifier
"""
result = self.service.delete_student_attempt(
'bad_student',
unicode(self.course.id),
'foo/bar/baz',
requesting_user=self.student,
)
self.assertIsNone(result)
def test_reset_non_existing_attempt(self):
"""
Negative test of trying to reset attempts with bad user identifier
"""
result = self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
self.other_problem_urlname,
requesting_user=self.student,
)
self.assertIsNone(result)
def test_is_user_staff(self):
"""
Test to assert that the user is staff or not
"""
result = self.service.is_course_staff(
self.student,
unicode(self.course.id)
)
self.assertFalse(result)
# allow staff access to the student
allow_access(self.course, self.student, 'staff')
result = self.service.is_course_staff(
self.student,
unicode(self.course.id)
)
self.assertTrue(result)
def test_report_suspicious_attempt(self):
"""
Test to verify that the create_zendesk_ticket() is called
"""
requester_name = "edx-proctoring"
email = "edx-proctoring@edx.org"
subject = "Proctored Exam Review: {review_status}".format(review_status="Suspicious")
body = "A proctored exam attempt for {exam_name} in {course_name} by username: {student_username} was " \
"reviewed as {review_status} by the proctored exam review provider."
body = body.format(
exam_name="test_exam", course_name=self.course.display_name, student_username="test_student",
review_status="Suspicious"
)
tags = ["proctoring"]
with mock.patch("lms.djangoapps.instructor.services.create_zendesk_ticket") as mock_create_zendesk_ticket:
self.service.send_support_notification(
course_id=unicode(self.course.id),
exam_name="test_exam",
student_username="test_student",
review_status="Suspicious"
)
mock_create_zendesk_ticket.assert_called_with(requester_name, email, subject, body, tags)
|
alazyer/oscar
|
refs/heads/master
|
frobshop/oscar/apps/catalogue/signals.py
|
35
|
import django.dispatch
product_viewed = django.dispatch.Signal(
providing_args=["product", "user", "request", "response"])
|
douglaskastle/pypcapfile
|
refs/heads/master
|
pcapfile/protocols/transport/tcp.py
|
2
|
"""
TCP transport definition
"""
import ctypes
import struct
class TCP(ctypes.Structure):
"""
Represents a TCP packet
"""
_fields_ = [('src_port', ctypes.c_ushort), # source port
('dst_port', ctypes.c_ushort), # destination port
('seqnum', ctypes.c_uint), # sequence number
('acknum', ctypes.c_uint), # acknowledgment number
('data_offset', ctypes.c_uint), # data offset in bytes
('urg', ctypes.c_bool), # URG
('ack', ctypes.c_bool), # ACK
('psh', ctypes.c_bool), # PSH
('rst', ctypes.c_bool), # RST
('syn', ctypes.c_bool), # SYN
('fin', ctypes.c_bool), # FIN
('win', ctypes.c_ushort), # window size
('sum', ctypes.c_ushort)] # checksum
tcp_min_header_size = 20
def __init__(self, packet, layers=0):
super(TCP, self).__init__()
fields = struct.unpack("!HHIIBBHHH", packet[:self.tcp_min_header_size])
self.src_port = fields[0]
self.dst_port = fields[1]
self.seqnum = fields[2]
self.acknum = fields[3]
self.data_offset = 4 * (fields[4] >> 4)
self.urg = fields[5] & 32
self.ack = fields[5] & 16
self.psh = fields[5] & 8
self.rst = fields[5] & 4
self.syn = fields[5] & 2
self.fin = fields[5] & 1
self.win = fields[6]
self.sum = fields[7]
# urg_offset = 4 * fields[8] # rarely used
if self.data_offset < 20:
self.opt = b''
self.payload = b''
else:
self.opt = packet[20:self.data_offset]
self.payload = packet[self.data_offset:]
def __str__(self):
packet = 'tcp %s packet from port %d to port %d carrying %d bytes'
str_flags = ''
if self.syn:
str_flags += 'S'
if self.ack:
str_flags += 'A'
if self.rst:
str_flags += 'R'
if self.fin:
str_flags += 'F'
if self.urg:
str_flags += 'U'
packet = packet % (str_flags, self.src_port, self.dst_port, len(self.payload))
return packet
def __len__(self):
return max(self.data_offset, self.tcp_min_header_size) + len(self.payload)
|
wiki2014/Learning-Summary
|
refs/heads/master
|
alps/cts/apps/CameraITS/tests/scene1/test_yuv_plus_raw.py
|
1
|
# Copyright 2014 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import its.image
import its.caps
import its.device
import its.objects
import its.target
import os.path
import math
def main():
"""Test capturing a single frame as both RAW and YUV outputs.
"""
NAME = os.path.basename(__file__).split(".")[0]
THRESHOLD_MAX_RMS_DIFF = 0.035
with its.device.ItsSession() as cam:
props = cam.get_camera_properties()
its.caps.skip_unless(its.caps.compute_target_exposure(props) and
its.caps.raw16(props) and
its.caps.per_frame_control(props))
# Use a manual request with a linear tonemap so that the YUV and RAW
# should look the same (once converted by the its.image module).
e, s = its.target.get_target_exposure_combos(cam)["midExposureTime"]
req = its.objects.manual_capture_request(s, e, 0.0, True, props)
max_raw_size = \
its.objects.get_available_output_sizes("raw", props)[0]
w,h = its.objects.get_available_output_sizes(
"yuv", props, (1920, 1080), max_raw_size)[0]
out_surfaces = [{"format":"raw"},
{"format":"yuv", "width":w, "height":h}]
cap_raw, cap_yuv = cam.do_capture(req, out_surfaces)
img = its.image.convert_capture_to_rgb_image(cap_yuv)
its.image.write_image(img, "%s_yuv.jpg" % (NAME), True)
tile = its.image.get_image_patch(img, 0.45, 0.45, 0.1, 0.1)
rgb0 = its.image.compute_image_means(tile)
# Raw shots are 1/2 x 1/2 smaller after conversion to RGB, so scale the
# tile appropriately.
img = its.image.convert_capture_to_rgb_image(cap_raw, props=props)
its.image.write_image(img, "%s_raw.jpg" % (NAME), True)
tile = its.image.get_image_patch(img, 0.475, 0.475, 0.05, 0.05)
rgb1 = its.image.compute_image_means(tile)
rms_diff = math.sqrt(
sum([pow(rgb0[i] - rgb1[i], 2.0) for i in range(3)]) / 3.0)
print "RMS difference:", rms_diff
assert(rms_diff < THRESHOLD_MAX_RMS_DIFF)
if __name__ == '__main__':
main()
|
KrisCheng/ML-Learning
|
refs/heads/master
|
archive/MOOC/Deeplearning_AI/NeuralNetworksandDeepLearning/BuildingyourDeepNeuralNetworkStepbyStep/Deep+Neural+Network+-+Application+v3.py
|
1
|
# coding: utf-8
# # Deep Neural Network for Image Classification: Application
#
# When you finish this, you will have finished the last programming assignment of Week 4, and also the last programming assignment of this course!
#
# You will use use the functions you'd implemented in the previous assignment to build a deep network, and apply it to cat vs non-cat classification. Hopefully, you will see an improvement in accuracy relative to your previous logistic regression implementation.
#
# **After this assignment you will be able to:**
# - Build and apply a deep neural network to supervised learning.
#
# Let's get started!
# ## 1 - Packages
# Let's first import all the packages that you will need during this assignment.
# - [numpy](www.numpy.org) is the fundamental package for scientific computing with Python.
# - [matplotlib](http://matplotlib.org) is a library to plot graphs in Python.
# - [h5py](http://www.h5py.org) is a common package to interact with a dataset that is stored on an H5 file.
# - [PIL](http://www.pythonware.com/products/pil/) and [scipy](https://www.scipy.org/) are used here to test your model with your own picture at the end.
# - dnn_app_utils provides the functions implemented in the "Building your Deep Neural Network: Step by Step" assignment to this notebook.
# - np.random.seed(1) is used to keep all the random function calls consistent. It will help us grade your work.
# In[9]:
import time
import numpy as np
import h5py
import matplotlib.pyplot as plt
import scipy
from PIL import Image
from scipy import ndimage
from dnn_app_utils_v2 import *
get_ipython().magic('matplotlib inline')
plt.rcParams['figure.figsize'] = (5.0, 4.0) # set default size of plots
plt.rcParams['image.interpolation'] = 'nearest'
plt.rcParams['image.cmap'] = 'gray'
get_ipython().magic('load_ext autoreload')
get_ipython().magic('autoreload 2')
np.random.seed(1)
# ## 2 - Dataset
#
# You will use the same "Cat vs non-Cat" dataset as in "Logistic Regression as a Neural Network" (Assignment 2). The model you had built had 70% test accuracy on classifying cats vs non-cats images. Hopefully, your new model will perform a better!
#
# **Problem Statement**: You are given a dataset ("data.h5") containing:
# - a training set of m_train images labelled as cat (1) or non-cat (0)
# - a test set of m_test images labelled as cat and non-cat
# - each image is of shape (num_px, num_px, 3) where 3 is for the 3 channels (RGB).
#
# Let's get more familiar with the dataset. Load the data by running the cell below.
# In[10]:
train_x_orig, train_y, test_x_orig, test_y, classes = load_data()
# The following code will show you an image in the dataset. Feel free to change the index and re-run the cell multiple times to see other images.
# In[11]:
# Example of a picture
index = 2
plt.imshow(train_x_orig[index])
print ("y = " + str(train_y[0,index]) + ". It's a " + classes[train_y[0,index]].decode("utf-8") + " picture.")
# In[12]:
# Explore your dataset
m_train = train_x_orig.shape[0]
num_px = train_x_orig.shape[1]
m_test = test_x_orig.shape[0]
print ("Number of training examples: " + str(m_train))
print ("Number of testing examples: " + str(m_test))
print ("Each image is of size: (" + str(num_px) + ", " + str(num_px) + ", 3)")
print ("train_x_orig shape: " + str(train_x_orig.shape))
print ("train_y shape: " + str(train_y.shape))
print ("test_x_orig shape: " + str(test_x_orig.shape))
print ("test_y shape: " + str(test_y.shape))
# As usual, you reshape and standardize the images before feeding them to the network. The code is given in the cell below.
#
# <img src="images/imvectorkiank.png" style="width:450px;height:300px;">
#
# <caption><center> <u>Figure 1</u>: Image to vector conversion. <br> </center></caption>
# In[13]:
# Reshape the training and test examples
train_x_flatten = train_x_orig.reshape(train_x_orig.shape[0], -1).T # The "-1" makes reshape flatten the remaining dimensions
test_x_flatten = test_x_orig.reshape(test_x_orig.shape[0], -1).T
# Standardize data to have feature values between 0 and 1.
train_x = train_x_flatten/255.
test_x = test_x_flatten/255.
print ("train_x's shape: " + str(train_x.shape))
print ("test_x's shape: " + str(test_x.shape))
# $12,288$ equals $64 \times 64 \times 3$ which is the size of one reshaped image vector.
# ## 3 - Architecture of your model
# Now that you are familiar with the dataset, it is time to build a deep neural network to distinguish cat images from non-cat images.
#
# You will build two different models:
# - A 2-layer neural network
# - An L-layer deep neural network
#
# You will then compare the performance of these models, and also try out different values for $L$.
#
# Let's look at the two architectures.
#
# ### 3.1 - 2-layer neural network
#
# <img src="images/2layerNN_kiank.png" style="width:650px;height:400px;">
# <caption><center> <u>Figure 2</u>: 2-layer neural network. <br> The model can be summarized as: ***INPUT -> LINEAR -> RELU -> LINEAR -> SIGMOID -> OUTPUT***. </center></caption>
#
# <u>Detailed Architecture of figure 2</u>:
# - The input is a (64,64,3) image which is flattened to a vector of size $(12288,1)$.
# - The corresponding vector: $[x_0,x_1,...,x_{12287}]^T$ is then multiplied by the weight matrix $W^{[1]}$ of size $(n^{[1]}, 12288)$.
# - You then add a bias term and take its relu to get the following vector: $[a_0^{[1]}, a_1^{[1]},..., a_{n^{[1]}-1}^{[1]}]^T$.
# - You then repeat the same process.
# - You multiply the resulting vector by $W^{[2]}$ and add your intercept (bias).
# - Finally, you take the sigmoid of the result. If it is greater than 0.5, you classify it to be a cat.
#
# ### 3.2 - L-layer deep neural network
#
# It is hard to represent an L-layer deep neural network with the above representation. However, here is a simplified network representation:
#
# <img src="images/LlayerNN_kiank.png" style="width:650px;height:400px;">
# <caption><center> <u>Figure 3</u>: L-layer neural network. <br> The model can be summarized as: ***[LINEAR -> RELU] $\times$ (L-1) -> LINEAR -> SIGMOID***</center></caption>
#
# <u>Detailed Architecture of figure 3</u>:
# - The input is a (64,64,3) image which is flattened to a vector of size (12288,1).
# - The corresponding vector: $[x_0,x_1,...,x_{12287}]^T$ is then multiplied by the weight matrix $W^{[1]}$ and then you add the intercept $b^{[1]}$. The result is called the linear unit.
# - Next, you take the relu of the linear unit. This process could be repeated several times for each $(W^{[l]}, b^{[l]})$ depending on the model architecture.
# - Finally, you take the sigmoid of the final linear unit. If it is greater than 0.5, you classify it to be a cat.
#
# ### 3.3 - General methodology
#
# As usual you will follow the Deep Learning methodology to build the model:
# 1. Initialize parameters / Define hyperparameters
# 2. Loop for num_iterations:
# a. Forward propagation
# b. Compute cost function
# c. Backward propagation
# d. Update parameters (using parameters, and grads from backprop)
# 4. Use trained parameters to predict labels
#
# Let's now implement those two models!
# ## 4 - Two-layer neural network
#
# **Question**: Use the helper functions you have implemented in the previous assignment to build a 2-layer neural network with the following structure: *LINEAR -> RELU -> LINEAR -> SIGMOID*. The functions you may need and their inputs are:
# ```python
# def initialize_parameters(n_x, n_h, n_y):
# ...
# return parameters
# def linear_activation_forward(A_prev, W, b, activation):
# ...
# return A, cache
# def compute_cost(AL, Y):
# ...
# return cost
# def linear_activation_backward(dA, cache, activation):
# ...
# return dA_prev, dW, db
# def update_parameters(parameters, grads, learning_rate):
# ...
# return parameters
# ```
# In[14]:
### CONSTANTS DEFINING THE MODEL ####
n_x = 12288 # num_px * num_px * 3
n_h = 7
n_y = 1
layers_dims = (n_x, n_h, n_y)
# In[17]:
# GRADED FUNCTION: two_layer_model
def two_layer_model(X, Y, layers_dims, learning_rate = 0.0075, num_iterations = 3000, print_cost=False):
"""
Implements a two-layer neural network: LINEAR->RELU->LINEAR->SIGMOID.
Arguments:
X -- input data, of shape (n_x, number of examples)
Y -- true "label" vector (containing 0 if cat, 1 if non-cat), of shape (1, number of examples)
layers_dims -- dimensions of the layers (n_x, n_h, n_y)
num_iterations -- number of iterations of the optimization loop
learning_rate -- learning rate of the gradient descent update rule
print_cost -- If set to True, this will print the cost every 100 iterations
Returns:
parameters -- a dictionary containing W1, W2, b1, and b2
"""
np.random.seed(1)
grads = {}
costs = [] # to keep track of the cost
m = X.shape[1] # number of examples
(n_x, n_h, n_y) = layers_dims
# Initialize parameters dictionary, by calling one of the functions you'd previously implemented
### START CODE HERE ### (≈ 1 line of code)
parameters = initialize_parameters(n_x, n_h, n_y)
### END CODE HERE ###
# Get W1, b1, W2 and b2 from the dictionary parameters.
W1 = parameters["W1"]
b1 = parameters["b1"]
W2 = parameters["W2"]
b2 = parameters["b2"]
# Loop (gradient descent)
for i in range(0, num_iterations):
# Forward propagation: LINEAR -> RELU -> LINEAR -> SIGMOID. Inputs: "X, W1, b1". Output: "A1, cache1, A2, cache2".
### START CODE HERE ### (≈ 2 lines of code)
A1, cache1 = linear_activation_forward(X, W1, b1, activation = "relu")
A2, cache2 = linear_activation_forward(A1, W2, b2, activation = "sigmoid")
### END CODE HERE ###
# Compute cost
### START CODE HERE ### (≈ 1 line of code)
cost = compute_cost(A2, Y)
### END CODE HERE ###
# Initializing backward propagation
dA2 = - (np.divide(Y, A2) - np.divide(1 - Y, 1 - A2))
# Backward propagation. Inputs: "dA2, cache2, cache1". Outputs: "dA1, dW2, db2; also dA0 (not used), dW1, db1".
### START CODE HERE ### (≈ 2 lines of code)
dA1, dW2, db2 = linear_activation_backward(dA2, cache2, activation = "sigmoid")
dA0, dW1, db1 = linear_activation_backward(dA1, cache1, activation = "relu")
### END CODE HERE ###
# Set grads['dWl'] to dW1, grads['db1'] to db1, grads['dW2'] to dW2, grads['db2'] to db2
grads['dW1'] = dW1
grads['db1'] = db1
grads['dW2'] = dW2
grads['db2'] = db2
# Update parameters.
### START CODE HERE ### (approx. 1 line of code)
parameters = update_parameters(parameters, grads, learning_rate)
### END CODE HERE ###
# Retrieve W1, b1, W2, b2 from parameters
W1 = parameters["W1"]
b1 = parameters["b1"]
W2 = parameters["W2"]
b2 = parameters["b2"]
# Print the cost every 100 training example
if print_cost and i % 100 == 0:
print("Cost after iteration {}: {}".format(i, np.squeeze(cost)))
if print_cost and i % 100 == 0:
costs.append(cost)
# plot the cost
plt.plot(np.squeeze(costs))
plt.ylabel('cost')
plt.xlabel('iterations (per tens)')
plt.title("Learning rate =" + str(learning_rate))
plt.show()
return parameters
# Run the cell below to train your parameters. See if your model runs. The cost should be decreasing. It may take up to 5 minutes to run 2500 iterations. Check if the "Cost after iteration 0" matches the expected output below, if not click on the square (⬛) on the upper bar of the notebook to stop the cell and try to find your error.
# In[16]:
parameters = two_layer_model(train_x, train_y, layers_dims = (n_x, n_h, n_y), num_iterations = 2500, print_cost=True)
# **Expected Output**:
# <table>
# <tr>
# <td> **Cost after iteration 0**</td>
# <td> 0.6930497356599888 </td>
# </tr>
# <tr>
# <td> **Cost after iteration 100**</td>
# <td> 0.6464320953428849 </td>
# </tr>
# <tr>
# <td> **...**</td>
# <td> ... </td>
# </tr>
# <tr>
# <td> **Cost after iteration 2400**</td>
# <td> 0.048554785628770206 </td>
# </tr>
# </table>
# Good thing you built a vectorized implementation! Otherwise it might have taken 10 times longer to train this.
#
# Now, you can use the trained parameters to classify images from the dataset. To see your predictions on the training and test sets, run the cell below.
# In[4]:
predictions_train = predict(train_x, train_y, parameters)
# **Expected Output**:
# <table>
# <tr>
# <td> **Accuracy**</td>
# <td> 1.0 </td>
# </tr>
# </table>
# In[5]:
predictions_test = predict(test_x, test_y, parameters)
# **Expected Output**:
#
# <table>
# <tr>
# <td> **Accuracy**</td>
# <td> 0.72 </td>
# </tr>
# </table>
# **Note**: You may notice that running the model on fewer iterations (say 1500) gives better accuracy on the test set. This is called "early stopping" and we will talk about it in the next course. Early stopping is a way to prevent overfitting.
#
# Congratulations! It seems that your 2-layer neural network has better performance (72%) than the logistic regression implementation (70%, assignment week 2). Let's see if you can do even better with an $L$-layer model.
# ## 5 - L-layer Neural Network
#
# **Question**: Use the helper functions you have implemented previously to build an $L$-layer neural network with the following structure: *[LINEAR -> RELU]$\times$(L-1) -> LINEAR -> SIGMOID*. The functions you may need and their inputs are:
# ```python
# def initialize_parameters_deep(layer_dims):
# ...
# return parameters
# def L_model_forward(X, parameters):
# ...
# return AL, caches
# def compute_cost(AL, Y):
# ...
# return cost
# def L_model_backward(AL, Y, caches):
# ...
# return grads
# def update_parameters(parameters, grads, learning_rate):
# ...
# return parameters
# ```
# In[6]:
### CONSTANTS ###
layers_dims = [12288, 20, 7, 5, 1] # 5-layer model
# In[7]:
# GRADED FUNCTION: L_layer_model
def L_layer_model(X, Y, layers_dims, learning_rate = 0.0075, num_iterations = 3000, print_cost=False):#lr was 0.009
"""
Implements a L-layer neural network: [LINEAR->RELU]*(L-1)->LINEAR->SIGMOID.
Arguments:
X -- data, numpy array of shape (number of examples, num_px * num_px * 3)
Y -- true "label" vector (containing 0 if cat, 1 if non-cat), of shape (1, number of examples)
layers_dims -- list containing the input size and each layer size, of length (number of layers + 1).
learning_rate -- learning rate of the gradient descent update rule
num_iterations -- number of iterations of the optimization loop
print_cost -- if True, it prints the cost every 100 steps
Returns:
parameters -- parameters learnt by the model. They can then be used to predict.
"""
np.random.seed(1)
costs = [] # keep track of cost
# Parameters initialization.
### START CODE HERE ###
parameters = initialize_parameters_deep(layers_dims)
### END CODE HERE ###
# Loop (gradient descent)
for i in range(0, num_iterations):
# Forward propagation: [LINEAR -> RELU]*(L-1) -> LINEAR -> SIGMOID.
### START CODE HERE ### (≈ 1 line of code)
AL, caches = L_model_forward(X, parameters)
### END CODE HERE ###
# Compute cost.
### START CODE HERE ### (≈ 1 line of code)
cost = compute_cost(AL, Y)
### END CODE HERE ###
# Backward propagation.
### START CODE HERE ### (≈ 1 line of code)
grads = L_model_backward(AL, Y, caches)
### END CODE HERE ###
# Update parameters.
### START CODE HERE ### (≈ 1 line of code)
parameters = update_parameters(parameters, grads, learning_rate)
### END CODE HERE ###
# Print the cost every 100 training example
if print_cost and i % 100 == 0:
print ("Cost after iteration %i: %f" %(i, cost))
if print_cost and i % 100 == 0:
costs.append(cost)
# plot the cost
plt.plot(np.squeeze(costs))
plt.ylabel('cost')
plt.xlabel('iterations (per tens)')
plt.title("Learning rate =" + str(learning_rate))
plt.show()
return parameters
# You will now train the model as a 5-layer neural network.
#
# Run the cell below to train your model. The cost should decrease on every iteration. It may take up to 5 minutes to run 2500 iterations. Check if the "Cost after iteration 0" matches the expected output below, if not click on the square (⬛) on the upper bar of the notebook to stop the cell and try to find your error.
# In[8]:
parameters = L_layer_model(train_x, train_y, layers_dims, num_iterations = 2500, print_cost = True)
# **Expected Output**:
# <table>
# <tr>
# <td> **Cost after iteration 0**</td>
# <td> 0.771749 </td>
# </tr>
# <tr>
# <td> **Cost after iteration 100**</td>
# <td> 0.672053 </td>
# </tr>
# <tr>
# <td> **...**</td>
# <td> ... </td>
# </tr>
# <tr>
# <td> **Cost after iteration 2400**</td>
# <td> 0.092878 </td>
# </tr>
# </table>
# In[ ]:
pred_train = predict(train_x, train_y, parameters)
# <table>
# <tr>
# <td>
# **Train Accuracy**
# </td>
# <td>
# 0.985645933014
# </td>
# </tr>
# </table>
# In[ ]:
pred_test = predict(test_x, test_y, parameters)
# **Expected Output**:
#
# <table>
# <tr>
# <td> **Test Accuracy**</td>
# <td> 0.8 </td>
# </tr>
# </table>
# Congrats! It seems that your 5-layer neural network has better performance (80%) than your 2-layer neural network (72%) on the same test set.
#
# This is good performance for this task. Nice job!
#
# Though in the next course on "Improving deep neural networks" you will learn how to obtain even higher accuracy by systematically searching for better hyperparameters (learning_rate, layers_dims, num_iterations, and others you'll also learn in the next course).
# ## 6) Results Analysis
#
# First, let's take a look at some images the L-layer model labeled incorrectly. This will show a few mislabeled images.
# In[ ]:
print_mislabeled_images(classes, test_x, test_y, pred_test)
# **A few type of images the model tends to do poorly on include:**
# - Cat body in an unusual position
# - Cat appears against a background of a similar color
# - Unusual cat color and species
# - Camera Angle
# - Brightness of the picture
# - Scale variation (cat is very large or small in image)
# ## 7) Test with your own image (optional/ungraded exercise) ##
#
# Congratulations on finishing this assignment. You can use your own image and see the output of your model. To do that:
# 1. Click on "File" in the upper bar of this notebook, then click "Open" to go on your Coursera Hub.
# 2. Add your image to this Jupyter Notebook's directory, in the "images" folder
# 3. Change your image's name in the following code
# 4. Run the code and check if the algorithm is right (1 = cat, 0 = non-cat)!
# In[39]:
## START CODE HERE ##
my_image = "my_image.jpg" # change this to the name of your image file
my_label_y = [1] # the true class of your image (1 -> cat, 0 -> non-cat)
## END CODE HERE ##
fname = "images/" + my_image
image = np.array(ndimage.imread(fname, flatten=False))
my_image = scipy.misc.imresize(image, size=(num_px,num_px)).reshape((num_px*num_px*3,1))
my_predicted_image = predict(my_image, my_label_y, parameters)
plt.imshow(image)
print ("y = " + str(np.squeeze(my_predicted_image)) + ", your L-layer model predicts a \"" + classes[int(np.squeeze(my_predicted_image)),].decode("utf-8") + "\" picture.")
# **References**:
#
# - for auto-reloading external module: http://stackoverflow.com/questions/1907993/autoreload-of-modules-in-ipython
# In[ ]:
|
dcunited001/gtfs-editor
|
refs/heads/master
|
scripts/load.py
|
7
|
#!/usr/bin/python
# bulk-load feeds to the GTFS Editor
from sys import argv
from getpass import getpass
import urllib2
from urllib import urlencode
from cookielib import CookieJar
from poster.encode import multipart_encode
from poster.streaminghttp import register_openers
server = argv[-1]
if len(argv) < 3:
print 'usage: %s feed.zip [feed2.zip feed3.zip . . . feedn.zip] http://localhost:9000/' % argv[0]
# log in to the server
print 'Please authenticate'
uname = raw_input('username: ')
pw = getpass('password: ')
# strip trailing slash to normalize url
server = server if not server.endswith('/') else server[:-1]
# cookie handling
# http://www.techchorus.net/using-cookie-jar-urllib2
# and http://stackoverflow.com/questions/1690446
cj = CookieJar()
opener = register_openers()
opener.add_handler(urllib2.HTTPCookieProcessor(cj))
# authenticate
opener.open(server + '/secure/authenticate', urlencode(dict(username=uname, password=pw)))
# load each feed
for feed in argv[1:-1]:
print 'processing feed %s' % feed,
# upload the feed
data, head = multipart_encode(dict(gtfsUpload=open(feed, 'rb')))
req = urllib2.Request(server + '/application/uploadgtfs', data, head)
opener.open(req)
print 'done'
|
NicovincX2/Python-3.5
|
refs/heads/master
|
Physique/Mesure physique/Traitement du signal/Filtre (électronique)/bode.py
|
1
|
# -*- coding: utf-8 -*-
'''
Petit module pour rassembler la procédure de tracé de diagramme de Bode pour
pouvoir se concentrer uniquement sur ce qu'il y a autour.
'''
import matplotlib.pyplot as plt # Pour les dessins
def diag_bode(f, GdB, phase, out_file, titre=None):
'''Dessine un diagramme de bode quand on donne la fréquence, le gain et la
phase correspondants. Le résultat est écrit dans le fichier 'out_file'.'''
plt.figure() # Ouverture de la figure
plt.subplot(211) # La première sous-figure
if titre:
plt.title(titre) # Rajout du titre si demandé
plt.semilogx(f, GdB) # Graphique semi-log en x pour le gain
plt.grid(which='both') # On rajoute la grille
plt.ylabel(r'Gain (dB)') # Label vertical
plt.subplot(212) # La seconde sous-figure
plt.semilogx(f, phase) # Graphique semi-log en x pour la phase
plt.ylabel(r'Phase (deg)') # Label en y
plt.xlabel(r'Frequence (Hz)') # et label en x commun
plt.grid(which='both') # On rajoute la grille
plt.savefig(out_file) # Sauvegarde du fichier
plt.close() # et fermeture de la figure
# Le module signal possède une fonction "bode" dédiée que l'on va utiliser
from scipy import signal
def second_ordre(f0, Q, filename='defaut.png', type='PBs', f=None):
'''Petite fonction pour faciliter l'utilisation de la fonction "bode" du
module "signal" quand on s'intéresse à des filtres du 2e ordre. Il suffit
de donner la fréquence propre f0 et le facteur de qualité Q pour obtenir
ce que l'on veut. Autres paramètres:
* filename: le nom du fichier ('defaut.png' par défaut)
* type: le type du filtre, à choisir parmi 'PBs' (passe-bas),
'PBd' (passe-bande) et 'PHt' (passe-haut). On peut aussi définir soi-même
le numérateur sous forme d'une liste de plusieurs éléments, le degré le
plus haut donné en premier. NB: le '1.01' des définitions est juste là
pour améliorer sans effort le rendu graphique.
* f: les fréquences à échantillonner (si None, la fonction choisit
d'elle-même un intervalle adéquat).
'''
den = [1. / f0**2, 1. /
(Q * f0), 1] # Le dénominateur de la fonction de transfert
if type == 'PBs':
num = [1.01] # Le numérateur pour un passe-bas
elif type == 'PBd':
num = [1.01 / (Q * f0), 0] # pour un passe-bande
elif type == 'PHt':
num = [1.01 / f0**2, 0, 0] # pour un passe-haut
else:
# sinon, c'est l'utilisateur qui le définit.
num = type
# Définition de la fonction de transfert
s1 = signal.lti(num, den)
# Obtention des valeurs adéquates
f, GdB, phase = signal.bode(s1, f)
# Dessin du diagramme proprement dit
diag_bode(f, GdB, phase, filename)
|
sc0ttkclark/elasticsearch
|
refs/heads/master
|
dev-tools/upload-s3.py
|
255
|
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import sys
import argparse
try:
import boto.s3
except:
raise RuntimeError("""
S3 upload requires boto to be installed
Use one of:
'pip install -U boto'
'apt-get install python-boto'
'easy_install boto'
""")
import boto.s3
def list_buckets(conn):
return conn.get_all_buckets()
def upload_s3(conn, path, key, file, bucket):
print 'Uploading %s to Amazon S3 bucket %s/%s' % \
(file, bucket, os.path.join(path, key))
def percent_cb(complete, total):
sys.stdout.write('.')
sys.stdout.flush()
bucket = conn.create_bucket(bucket)
k = bucket.new_key(os.path.join(path, key))
k.set_contents_from_filename(file, cb=percent_cb, num_cb=100)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Uploads files to Amazon S3')
parser.add_argument('--file', '-f', metavar='path to file',
help='the branch to release from', required=True)
parser.add_argument('--bucket', '-b', metavar='B42', default='download.elasticsearch.org',
help='The S3 Bucket to upload to')
parser.add_argument('--path', '-p', metavar='elasticsearch/elasticsearch', default='elasticsearch/elasticsearch',
help='The key path to use')
parser.add_argument('--key', '-k', metavar='key', default=None,
help='The key - uses the file name as default key')
args = parser.parse_args()
if args.key:
key = args.key
else:
key = os.path.basename(args.file)
connection = boto.connect_s3()
upload_s3(connection, args.path, key, args.file, args.bucket);
|
ericblau/ipf-xsede
|
refs/heads/master
|
ipf/glue2/moab.py
|
1
|
###############################################################################
# Copyright 2011-2014 The University of Texas at Austin #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
###############################################################################
import subprocess
import datetime
import os
import re
import sys
import xml.dom.minidom
from ipf.dt import *
from ipf.error import StepError
from . import computing_activity
##############################################################################################################
class ComputingActivitiesStep(computing_activity.ComputingActivitiesStep):
def __init__(self):
computing_activity.ComputingActivitiesStep.__init__(self)
self.requires.append(computing_activity.ComputingActivities)
self._acceptParameter("showq","the path to the Moab showq program (default 'showq')",False)
self.sched_name = "Moab"
def _run(self):
rm_jobs = {}
for job in self._getInput(computing_activity.ComputingActivities).activities:
rm_jobs[job.LocalIDFromManager] = job
moab_jobs = []
try:
self._addJobs("-c",moab_jobs) # get recently competed jobs - no big deal if it fails
except StepError:
pass
self._addJobs("",moab_jobs) # get the rest of the jobs
# use jobs from Moab to order and correct jobs received from the resource manager
for pos in range(0,len(moab_jobs)):
moab_job = moab_jobs[pos]
try:
job = rm_jobs[moab_job.LocalIDFromManager]
job.position = pos
if job.State[0] != moab_job.State[0]:
job.State[0] = moab_job.State[0]
job.State.append(moab_job.State[1])
except KeyError:
pass
jobs = list(rm_jobs.values())
jobs = sorted(jobs,key=self._jobPosition)
jobs = sorted(jobs,key=self._jobStateKey)
return jobs
def _jobPosition(self, job):
try:
return job.position
except AttributeError:
self.warning("didn't find queue position for job %s in state %s" % (job.LocalIDFromManager,job.State))
return sys.maxsize
def _addJobs(self, flag, jobs):
try:
showq = self.params["showq"]
except KeyError:
showq = "showq"
cmd = showq + " "+flag+" --xml"
self.debug("running "+cmd)
status, output = subprocess.getstatusoutput(cmd)
if status != 0:
raise StepError("showq failed: "+output+"\n")
doc = xml.dom.minidom.parseString(output)
now = datetime.datetime.now(localtzoffset())
procsPerNode = 1.0
for node in doc.firstChild.childNodes:
if node.nodeName == "cluster":
procsPerNode = float(node.getAttribute("LocalUpProcs")) / float(node.getAttribute("LocalUpNodes"))
procsPerNode = round(procsPerNode,0)
if node.nodeName == "queue":
status = node.getAttribute("option")
for jobElement in node.childNodes:
job = self._getJob(jobElement,procsPerNode,status)
if self._includeQueue(job.Queue,True):
if job.EndTime == None:
jobs.append(job)
else:
# only provide info on the last 15 mins of completed jobs
if time.mktime(now.timetuple()) - time.mktime(job.EndTime.timetuple()) < 15 * 60:
jobs.append(job)
doc.unlink()
def _getJob(self, jobElement, procsPerNode, status):
job = computing_activity.ComputingActivity()
job.LocalIDFromManager = jobElement.getAttribute("JobID")
job.Name = jobElement.getAttribute("JobName") # showing as NONE
job.LocalOwner = jobElement.getAttribute("User")
job.Extension["LocalAccount"] = jobElement.getAttribute("Account")
job.Queue = jobElement.getAttribute("Class")
# using status is more accurate than using job State since Idle jobs can be blocked
if status == "active":
job.State = [computing_activity.ComputingActivity.STATE_RUNNING]
elif status == "completed":
job.State = [computing_activity.ComputingActivity.STATE_FINISHED]
elif status == "eligible":
job.State = [computing_activity.ComputingActivity.STATE_PENDING]
elif status == "blocked":
job.State = [computing_activity.ComputingActivity.STATE_HELD]
else:
logger.warn("found unknown Moab option '%s'",status)
job.State = [computing_activity.ComputingActivity.STATE_UNKNOWN]
job.State.append("moab:"+status)
epoch = float(jobElement.getAttribute("SubmissionTime"))
job.SubmissionTime = datetime.datetime.fromtimestamp(epoch,localtzoffset())
job.ComputingManagerSubmissionTime = job.SubmissionTime
epoch = jobElement.getAttribute("StartTime")
if (epoch != "") and (epoch != "0"):
job.StartTime = datetime.datetime.fromtimestamp(float(epoch),localtzoffset())
job.RequestedSlots = int(jobElement.getAttribute("ReqProcs"))
epoch = jobElement.getAttribute("CompletionTime")
if (epoch != "") and (epoch != "0"):
job.ComputingManagerEndTime = datetime.datetime.fromtimestamp(float(epoch),localtzoffset())
wallTime = jobElement.getAttribute("ReqAWDuration")
if wallTime != "":
job.RequestedTotalWallTime = int(wallTime) * job.RequestedSlots
usedWallTime = jobElement.getAttribute("AWDuration")
if usedWallTime != "":
job.UsedTotalWallTime = int(usedWallTime) * job.RequestedSlots
exitCode = jobElement.getAttribute("CompletionCode")
if exitCode != "":
job.ComputingManagerExitCode = exitCode
# don't see used CPU time anywhere
#job.UsedTotalCPUTime =
return job
def _getDuration(self, dStr):
"""Format is Days+Hours:Minutes:Seconds (XXX Seconds)"""
start = dStr.find("(")
end = dStr.find(" Seconds")
if start >= 0 and end > start:
return int(line[start:end])
monthDict = {"Jan":1, "Feb":2, "Mar":3, "Apr":4, "May":5, "Jun":6,
"Jul":7, "Aug":8, "Sep":9, "Oct":10, "Nov":11, "Dec":12}
def _getDateTime(self, aStr):
# Example: Thu 04 Dec 2008 10:27:23 AM EST
dayOfWeek = aStr[:3]
day = int(aStr[4:7])
month = aStr[7:10]
year = int(aStr[11:15])
hour = int(aStr[16:18])
minute = int(aStr[19:21])
second = int(aStr[22:24])
ampm = aStr[25:27]
if ampm == "PM" and hour < 12:
hour = hour + 12
if ampm == "AM" and hour == 12:
hour = 0
# assume current time zone
return datetime.datetime(year=year,
month=self.monthDict[month],
day=day,
hour=hour,
minute=minute,
second=second,
tzinfo=localtzoffset())
##############################################################################################################
# don't see a Moab log file with job information, so no update class.
|
SaschaMester/delicium
|
refs/heads/master
|
build/android/pylib/instrumentation/instrumentation_test_instance_test.py
|
49
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for instrumentation.TestRunner."""
# pylint: disable=W0212
import os
import sys
import unittest
from pylib import constants
from pylib.base import base_test_result
from pylib.instrumentation import instrumentation_test_instance
sys.path.append(os.path.join(
constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
import mock # pylint: disable=F0401
class InstrumentationTestInstanceTest(unittest.TestCase):
def setUp(self):
options = mock.Mock()
options.tool = ''
def testGenerateTestResults_noStatus(self):
results = instrumentation_test_instance.GenerateTestResults(
None, None, [], 0, 1000)
self.assertEqual([], results)
def testGenerateTestResults_testPassed(self):
statuses = [
(1, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
(0, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
]
results = instrumentation_test_instance.GenerateTestResults(
None, None, statuses, 0, 1000)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
def testGenerateTestResults_testSkipped_true(self):
statuses = [
(1, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
(0, {
'test_skipped': 'true',
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
(0, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
]
results = instrumentation_test_instance.GenerateTestResults(
None, None, statuses, 0, 1000)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
def testGenerateTestResults_testSkipped_false(self):
statuses = [
(1, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
(0, {
'test_skipped': 'false',
}),
(0, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
]
results = instrumentation_test_instance.GenerateTestResults(
None, None, statuses, 0, 1000)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
def testGenerateTestResults_testFailed(self):
statuses = [
(1, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
(-2, {
'class': 'test.package.TestClass',
'test': 'testMethod',
}),
]
results = instrumentation_test_instance.GenerateTestResults(
None, None, statuses, 0, 1000)
self.assertEqual(1, len(results))
self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
if __name__ == '__main__':
unittest.main(verbosity=2)
|
tboyce021/home-assistant
|
refs/heads/dev
|
homeassistant/components/tibber/sensor.py
|
7
|
"""Support for Tibber sensors."""
import asyncio
from datetime import timedelta
import logging
from random import randrange
import aiohttp
from homeassistant.components.sensor import DEVICE_CLASS_POWER
from homeassistant.const import POWER_WATT
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle, dt as dt_util
from .const import DOMAIN as TIBBER_DOMAIN, MANUFACTURER
_LOGGER = logging.getLogger(__name__)
ICON = "mdi:currency-usd"
SCAN_INTERVAL = timedelta(minutes=1)
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5)
PARALLEL_UPDATES = 0
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the Tibber sensor."""
tibber_connection = hass.data.get(TIBBER_DOMAIN)
dev = []
for home in tibber_connection.get_homes(only_active=False):
try:
await home.update_info()
except asyncio.TimeoutError as err:
_LOGGER.error("Timeout connecting to Tibber home: %s ", err)
raise PlatformNotReady() from err
except aiohttp.ClientError as err:
_LOGGER.error("Error connecting to Tibber home: %s ", err)
raise PlatformNotReady() from err
if home.has_active_subscription:
dev.append(TibberSensorElPrice(home))
if home.has_real_time_consumption:
dev.append(TibberSensorRT(home))
async_add_entities(dev, True)
class TibberSensor(Entity):
"""Representation of a generic Tibber sensor."""
def __init__(self, tibber_home):
"""Initialize the sensor."""
self._tibber_home = tibber_home
self._last_updated = None
self._state = None
self._is_available = False
self._device_state_attributes = {}
self._name = tibber_home.info["viewer"]["home"]["appNickname"]
if self._name is None:
self._name = tibber_home.info["viewer"]["home"]["address"].get(
"address1", ""
)
self._spread_load_constant = randrange(3600)
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._device_state_attributes
@property
def model(self):
"""Return the model of the sensor."""
return None
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def device_id(self):
"""Return the ID of the physical device this sensor is part of."""
home = self._tibber_home.info["viewer"]["home"]
return home["meteringPointData"]["consumptionEan"]
@property
def device_info(self):
"""Return the device_info of the device."""
device_info = {
"identifiers": {(TIBBER_DOMAIN, self.device_id)},
"name": self.name,
"manufacturer": MANUFACTURER,
}
if self.model is not None:
device_info["model"] = self.model
return device_info
class TibberSensorElPrice(TibberSensor):
"""Representation of a Tibber sensor for el price."""
async def async_update(self):
"""Get the latest data and updates the states."""
now = dt_util.now()
if (
self._tibber_home.current_price_total
and self._last_updated
and self._last_updated.hour == now.hour
and self._tibber_home.last_data_timestamp
):
return
if (
not self._tibber_home.last_data_timestamp
or (self._tibber_home.last_data_timestamp - now).total_seconds()
< 5 * 3600 + self._spread_load_constant
or not self._is_available
):
_LOGGER.debug("Asking for new data")
await self._fetch_data()
res = self._tibber_home.current_price_data()
self._state, price_level, self._last_updated = res
self._device_state_attributes["price_level"] = price_level
attrs = self._tibber_home.current_attributes()
self._device_state_attributes.update(attrs)
self._is_available = self._state is not None
@property
def available(self):
"""Return True if entity is available."""
return self._is_available
@property
def name(self):
"""Return the name of the sensor."""
return f"Electricity price {self._name}"
@property
def model(self):
"""Return the model of the sensor."""
return "Price Sensor"
@property
def icon(self):
"""Return the icon to use in the frontend."""
return ICON
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return self._tibber_home.price_unit
@property
def unique_id(self):
"""Return a unique ID."""
return self.device_id
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def _fetch_data(self):
_LOGGER.debug("Fetching data")
try:
await self._tibber_home.update_info_and_price_info()
except (asyncio.TimeoutError, aiohttp.ClientError):
return
data = self._tibber_home.info["viewer"]["home"]
self._device_state_attributes["app_nickname"] = data["appNickname"]
self._device_state_attributes["grid_company"] = data["meteringPointData"][
"gridCompany"
]
self._device_state_attributes["estimated_annual_consumption"] = data[
"meteringPointData"
]["estimatedAnnualConsumption"]
class TibberSensorRT(TibberSensor):
"""Representation of a Tibber sensor for real time consumption."""
async def async_added_to_hass(self):
"""Start listen for real time data."""
await self._tibber_home.rt_subscribe(self.hass.loop, self._async_callback)
async def _async_callback(self, payload):
"""Handle received data."""
errors = payload.get("errors")
if errors:
_LOGGER.error(errors[0])
return
data = payload.get("data")
if data is None:
return
live_measurement = data.get("liveMeasurement")
if live_measurement is None:
return
self._state = live_measurement.pop("power", None)
for key, value in live_measurement.items():
if value is None:
continue
self._device_state_attributes[key] = value
self.async_write_ha_state()
@property
def available(self):
"""Return True if entity is available."""
return self._tibber_home.rt_subscription_running
@property
def model(self):
"""Return the model of the sensor."""
return "Tibber Pulse"
@property
def name(self):
"""Return the name of the sensor."""
return f"Real time consumption {self._name}"
@property
def should_poll(self):
"""Return the polling state."""
return False
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return POWER_WATT
@property
def unique_id(self):
"""Return a unique ID."""
return f"{self.device_id}_rt_consumption"
@property
def device_class(self):
"""Return the device class of the sensor."""
return DEVICE_CLASS_POWER
|
eLBati/odoo
|
refs/heads/master
|
addons/portal_project/project.py
|
103
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-TODAY OpenERP S.A (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class portal_project(osv.Model):
""" Update of mail_mail class, to add the signin URL to notifications. """
_inherit = 'project.project'
def _get_visibility_selection(self, cr, uid, context=None):
""" Override to add portal option. """
selection = super(portal_project, self)._get_visibility_selection(cr, uid, context=context)
idx = [item[0] for item in selection].index('public')
selection.insert((idx + 1), ('portal', 'Customer related project: visible through portal'))
return selection
# return [('public', 'All Users'),
# ('portal', 'Portal Users and Employees'),
# ('employees', 'Employees Only'),
# ('followers', 'Followers Only')]
|
zubair-arbi/edx-platform
|
refs/heads/master
|
common/test/acceptance/fixtures/edxnotes.py
|
48
|
"""
Tools for creating edxnotes content fixture data.
"""
import json
import factory
import requests
from . import EDXNOTES_STUB_URL
class Range(factory.Factory):
class Meta(object):
model = dict
start = "/div[1]/p[1]"
end = "/div[1]/p[1]"
startOffset = 0
endOffset = 8
class Note(factory.Factory):
class Meta(object):
model = dict
user = "dummy-user"
usage_id = "dummy-usage-id"
course_id = "dummy-course-id"
text = "dummy note text"
quote = "dummy note quote"
ranges = [Range()]
class EdxNotesFixtureError(Exception):
"""
Error occurred while installing a edxnote fixture.
"""
pass
class EdxNotesFixture(object):
notes = []
def create_notes(self, notes_list):
self.notes = notes_list
return self
def install(self):
"""
Push the data to the stub EdxNotes service.
"""
response = requests.post(
'{}/create_notes'.format(EDXNOTES_STUB_URL),
data=json.dumps(self.notes)
)
if not response.ok:
raise EdxNotesFixtureError(
"Could not create notes {0}. Status was {1}".format(
json.dumps(self.notes), response.status_code
)
)
return self
def cleanup(self):
"""
Cleanup the stub EdxNotes service.
"""
self.notes = []
response = requests.put('{}/cleanup'.format(EDXNOTES_STUB_URL))
if not response.ok:
raise EdxNotesFixtureError(
"Could not cleanup EdxNotes service {0}. Status was {1}".format(
json.dumps(self.notes), response.status_code
)
)
return self
|
EricSchles/pattern
|
refs/heads/master
|
pattern/text/xx/inflect.py
|
1
|
#### PATTERN | XX | INFLECT ########################################################################
# -*- coding: utf-8 -*-
# Copyright (c)
# Author:
# License:
# http://www.clips.ua.ac.be/pages/pattern
####################################################################################################
# Template for pattern.xx.inflect with functions for word inflection in language XXXXX.
# inflection is the modification of a word to express different grammatical categories,
# such as tense, mood, voice, aspect, person, number, gender and case.
# Conjugation is the inflection of verbs.
# To construct a lemmatizer for pattern.xx.parser.find_lemmata(),
# we need functions for noun singularization, verb infinitives, predicate adjectives, etc.
import os
import sys
import re
try:
MODULE = os.path.dirname(os.path.abspath(__file__))
except:
MODULE = ""
sys.path.insert(0, os.path.join(MODULE, "..", "..", "..", ".."))
# Import Verbs base class and verb tenses.
from pattern.text import Verbs as _Verbs
from pattern.text import (
INFINITIVE, PRESENT, PAST, FUTURE,
FIRST, SECOND, THIRD,
SINGULAR, PLURAL, SG, PL,
PROGRESSIVE,
PARTICIPLE
)
sys.path.pop(0)
VERB, NOUN, ADJECTIVE, ADVERB = "VB", "NN", "JJ", "RB"
VOWELS = "aeiouy"
re_vowel = re.compile(r"a|e|i|o|u|y", re.I)
is_vowel = lambda ch: ch in VOWELS
#### ARTICLE #######################################################################################
# Inflection gender.
MASCULINE, FEMININE, NEUTER, PLURAL = \
MALE, FEMALE, NEUTRAL, PLURAL = \
M, F, N, PL = "m", "f", "n", "p"
def definite_article(word):
""" Returns the definite article for a given word.
"""
return "the"
def indefinite_article(word):
""" Returns the indefinite article for a given word.
"""
return "a"
DEFINITE, INDEFINITE = \
"definite", "indefinite"
def article(word, function=INDEFINITE):
""" Returns the indefinite or definite article for the given word.
"""
return function == DEFINITE \
and definite_article(word) \
or indefinite_article(word)
_article = article
def referenced(word, article=INDEFINITE):
""" Returns a string with the article + the word.
"""
return "%s %s" % (_article(word, article), word)
#### PLURALIZE ######################################################################################
def pluralize(word, pos=NOUN, custom={}):
""" Returns the plural of a given word.
"""
return word + "s"
#### SINGULARIZE ###################################################################################
def singularize(word, pos=NOUN, custom={}):
""" Returns the singular of a given word.
"""
return word.rstrip("s")
#### VERB CONJUGATION ##############################################################################
# The verb table was trained on CELEX and contains the top 2000 most frequent verbs.
class Verbs(_Verbs):
def __init__(self):
_Verbs.__init__(self, os.path.join(MODULE, "xx-verbs.txt"),
language = "xx",
# The order of tenses in the given file; see pattern.text.__init__.py => Verbs.
format = [0, 1, 2, 3, 7, 8, 17, 18, 19, 23, 25, 24, 16, 9, 10, 11, 15, 33, 26, 27, 28, 32],
default = {}
)
def find_lemma(self, verb):
""" Returns the base form of the given inflected verb, using a rule-based approach.
"""
return verb
def find_lexeme(self, verb):
""" For a regular verb (base form), returns the forms using a rule-based approach.
"""
return []
verbs = Verbs()
conjugate, lemma, lexeme, tenses = \
verbs.conjugate, verbs.lemma, verbs.lexeme, verbs.tenses
#### ATTRIBUTIVE & PREDICATIVE #####################################################################
def attributive(adjective):
""" For a predicative adjective, returns the attributive form.
"""
return adjective
def predicative(adjective):
""" Returns the predicative adjective.
"""
return adjective
|
Abando/esupa
|
refs/heads/develop
|
esupa/__init__.py
|
2
|
# -*- coding: utf-8 -*-
#
# Copyright 2015, Ekevoo.com.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
#
__author__ = 'Ekevoo'
__copyright__ = 'Copyright 2015, Ekevoo.com'
__license__ = 'Apache License, Version 2.0'
__version__ = '0.4' # PEP386
# https://docs.djangoproject.com/en/1.8/ref/applications/#application-configuration
default_app_config = __name__ + '.apps.EsupaConfig'
|
cslzchen/osf.io
|
refs/heads/develop
|
api_tests/conftest.py
|
10
|
from __future__ import print_function
import pytest
from website.app import init_app
from tests.json_api_test_app import JSONAPITestApp
@pytest.fixture()
def app():
return JSONAPITestApp()
# NOTE: autouse so that ADDONS_REQUESTED gets set on website.settings
@pytest.fixture(autouse=True, scope='session')
def app_init():
init_app(routes=False, set_backends=False)
|
ebar0n/django
|
refs/heads/master
|
django/db/migrations/operations/fields.py
|
5
|
from django.core.exceptions import FieldDoesNotExist
from django.db.models.fields import NOT_PROVIDED
from django.utils.functional import cached_property
from .base import Operation
from .utils import is_referenced_by_foreign_key
class FieldOperation(Operation):
def __init__(self, model_name, name):
self.model_name = model_name
self.name = name
@cached_property
def model_name_lower(self):
return self.model_name.lower()
@cached_property
def name_lower(self):
return self.name.lower()
def is_same_model_operation(self, operation):
return self.model_name_lower == operation.model_name_lower
def is_same_field_operation(self, operation):
return self.is_same_model_operation(operation) and self.name_lower == operation.name_lower
def references_model(self, name, app_label=None):
return name.lower() == self.model_name_lower
def references_field(self, model_name, name, app_label=None):
return self.references_model(model_name) and name.lower() == self.name_lower
def reduce(self, operation, in_between, app_label=None):
return (
super().reduce(operation, in_between, app_label=app_label) or
not operation.references_field(self.model_name, self.name, app_label)
)
class AddField(FieldOperation):
"""Add a field to a model."""
def __init__(self, model_name, name, field, preserve_default=True):
self.field = field
self.preserve_default = preserve_default
super().__init__(model_name, name)
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'name': self.name,
'field': self.field,
}
if self.preserve_default is not True:
kwargs['preserve_default'] = self.preserve_default
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
# If preserve default is off, don't use the default for future state
if not self.preserve_default:
field = self.field.clone()
field.default = NOT_PROVIDED
else:
field = self.field
state.models[app_label, self.model_name_lower].fields.append((self.name, field))
# Delay rendering of relationships if it's not a relational field
delay = not field.is_relation
state.reload_model(app_label, self.model_name_lower, delay=delay)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
field = to_model._meta.get_field(self.name)
if not self.preserve_default:
field.default = self.field.default
schema_editor.add_field(
from_model,
field,
)
if not self.preserve_default:
field.default = NOT_PROVIDED
def database_backwards(self, app_label, schema_editor, from_state, to_state):
from_model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, from_model):
schema_editor.remove_field(from_model, from_model._meta.get_field(self.name))
def describe(self):
return "Add field %s to %s" % (self.name, self.model_name)
def reduce(self, operation, in_between, app_label=None):
if isinstance(operation, FieldOperation) and self.is_same_field_operation(operation):
if isinstance(operation, AlterField):
return [
AddField(
model_name=self.model_name,
name=operation.name,
field=operation.field,
),
]
elif isinstance(operation, RemoveField):
return []
elif isinstance(operation, RenameField):
return [
AddField(
model_name=self.model_name,
name=operation.new_name,
field=self.field,
),
]
return super().reduce(operation, in_between, app_label=app_label)
class RemoveField(FieldOperation):
"""Remove a field from a model."""
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'name': self.name,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
new_fields = []
old_field = None
for name, instance in state.models[app_label, self.model_name_lower].fields:
if name != self.name:
new_fields.append((name, instance))
else:
old_field = instance
state.models[app_label, self.model_name_lower].fields = new_fields
# Delay rendering of relationships if it's not a relational field
delay = not old_field.is_relation
state.reload_model(app_label, self.model_name_lower, delay=delay)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
from_model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, from_model):
schema_editor.remove_field(from_model, from_model._meta.get_field(self.name))
def database_backwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
schema_editor.add_field(from_model, to_model._meta.get_field(self.name))
def describe(self):
return "Remove field %s from %s" % (self.name, self.model_name)
class AlterField(FieldOperation):
"""
Alter a field's database column (e.g. null, max_length) to the provided
new field.
"""
def __init__(self, model_name, name, field, preserve_default=True):
self.field = field
self.preserve_default = preserve_default
super().__init__(model_name, name)
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'name': self.name,
'field': self.field,
}
if self.preserve_default is not True:
kwargs['preserve_default'] = self.preserve_default
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
if not self.preserve_default:
field = self.field.clone()
field.default = NOT_PROVIDED
else:
field = self.field
state.models[app_label, self.model_name_lower].fields = [
(n, field if n == self.name else f)
for n, f in
state.models[app_label, self.model_name_lower].fields
]
# TODO: investigate if old relational fields must be reloaded or if it's
# sufficient if the new field is (#27737).
# Delay rendering of relationships if it's not a relational field and
# not referenced by a foreign key.
delay = (
not field.is_relation and
not is_referenced_by_foreign_key(state, self.model_name_lower, self.field, self.name)
)
state.reload_model(app_label, self.model_name_lower, delay=delay)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
from_field = from_model._meta.get_field(self.name)
to_field = to_model._meta.get_field(self.name)
if not self.preserve_default:
to_field.default = self.field.default
schema_editor.alter_field(from_model, from_field, to_field)
if not self.preserve_default:
to_field.default = NOT_PROVIDED
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.database_forwards(app_label, schema_editor, from_state, to_state)
def describe(self):
return "Alter field %s on %s" % (self.name, self.model_name)
def reduce(self, operation, in_between, app_label=None):
if isinstance(operation, RemoveField) and self.is_same_field_operation(operation):
return [operation]
elif isinstance(operation, RenameField) and self.is_same_field_operation(operation):
return [
operation,
AlterField(
model_name=self.model_name,
name=operation.new_name,
field=self.field,
),
]
return super().reduce(operation, in_between, app_label=app_label)
class RenameField(FieldOperation):
"""Rename a field on the model. Might affect db_column too."""
def __init__(self, model_name, old_name, new_name):
self.old_name = old_name
self.new_name = new_name
super().__init__(model_name, old_name)
@cached_property
def old_name_lower(self):
return self.old_name.lower()
@cached_property
def new_name_lower(self):
return self.new_name.lower()
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'old_name': self.old_name,
'new_name': self.new_name,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.model_name_lower]
# Rename the field
fields = model_state.fields
for index, (name, field) in enumerate(fields):
if name == self.old_name:
fields[index] = (self.new_name, field)
# Delay rendering of relationships if it's not a relational
# field and not referenced by a foreign key.
delay = (
not field.is_relation and
not is_referenced_by_foreign_key(state, self.model_name_lower, field, self.name)
)
break
else:
raise FieldDoesNotExist(
"%s.%s has no field named '%s'" % (app_label, self.model_name, self.old_name)
)
# Fix index/unique_together to refer to the new field
options = model_state.options
for option in ('index_together', 'unique_together'):
if option in options:
options[option] = [
[self.new_name if n == self.old_name else n for n in together]
for together in options[option]
]
state.reload_model(app_label, self.model_name_lower, delay=delay)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
schema_editor.alter_field(
from_model,
from_model._meta.get_field(self.old_name),
to_model._meta.get_field(self.new_name),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
schema_editor.alter_field(
from_model,
from_model._meta.get_field(self.new_name),
to_model._meta.get_field(self.old_name),
)
def describe(self):
return "Rename field %s on %s to %s" % (self.old_name, self.model_name, self.new_name)
def references_field(self, model_name, name, app_label=None):
return self.references_model(model_name) and (
name.lower() == self.old_name_lower or
name.lower() == self.new_name_lower
)
def reduce(self, operation, in_between, app_label=None):
if (isinstance(operation, RenameField) and
self.is_same_model_operation(operation) and
self.new_name_lower == operation.old_name_lower):
return [
RenameField(
self.model_name,
self.old_name,
operation.new_name,
),
]
# Skip `FieldOperation.reduce` as we want to run `references_field`
# against self.new_name.
return (
super(FieldOperation, self).reduce(operation, in_between, app_label=app_label) or
not operation.references_field(self.model_name, self.new_name, app_label)
)
|
proversity-org/edx-platform
|
refs/heads/master
|
openedx/core/djangoapps/ace_common/apps.py
|
1
|
"""
Configuration for the ace_common Django app.
"""
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from openedx.core.djangoapps.plugins.constants import ProjectType, PluginSettings, SettingsType
class AceCommonConfig(AppConfig):
"""
Configuration class for the ace_common Django app.
"""
name = 'openedx.core.djangoapps.ace_common'
verbose_name = _('ACE Common')
plugin_app = {
PluginSettings.CONFIG: {
ProjectType.CMS: {
SettingsType.AWS: {PluginSettings.RELATIVE_PATH: u'settings.aws'},
SettingsType.COMMON: {PluginSettings.RELATIVE_PATH: u'settings.common'},
SettingsType.DEVSTACK: {PluginSettings.RELATIVE_PATH: u'settings.common'},
},
ProjectType.LMS: {
SettingsType.AWS: {PluginSettings.RELATIVE_PATH: u'settings.aws'},
SettingsType.COMMON: {PluginSettings.RELATIVE_PATH: u'settings.common'},
SettingsType.DEVSTACK: {PluginSettings.RELATIVE_PATH: u'settings.common'},
}
}
}
|
OmniLayer/omnicore
|
refs/heads/master
|
test/functional/wallet_multiwallet.py
|
3
|
#!/usr/bin/env python3
# Copyright (c) 2017-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multiwallet.
Verify that a bitcoind node can load multiple wallet files
"""
from threading import Thread
import os
import shutil
import time
from test_framework.authproxy import JSONRPCException
from test_framework.test_framework import BitcoinTestFramework
from test_framework.test_node import ErrorMatch
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
get_rpc_proxy,
)
FEATURE_LATEST = 169900
got_loading_error = False
def test_load_unload(node, name):
global got_loading_error
for i in range(10):
if got_loading_error:
return
try:
node.loadwallet(name)
node.unloadwallet(name)
except JSONRPCException as e:
if e.error['code'] == -4 and 'Wallet already being loading' in e.error['message']:
got_loading_error = True
return
class MultiWalletTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.rpc_timeout = 120
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def add_options(self, parser):
parser.add_argument(
'--data_wallets_dir',
default=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/wallets/'),
help='Test data with wallet directories (default: %(default)s)',
)
def run_test(self):
node = self.nodes[0]
data_dir = lambda *p: os.path.join(node.datadir, self.chain, *p)
wallet_dir = lambda *p: data_dir('wallets', *p)
wallet = lambda name: node.get_wallet_rpc(name)
def wallet_file(name):
if os.path.isdir(wallet_dir(name)):
return wallet_dir(name, "wallet.dat")
return wallet_dir(name)
assert_equal(self.nodes[0].listwalletdir(), { 'wallets': [{ 'name': '' }] })
# check wallet.dat is created
self.stop_nodes()
assert_equal(os.path.isfile(wallet_dir('wallet.dat')), True)
# create symlink to verify wallet directory path can be referenced
# through symlink
os.mkdir(wallet_dir('w7'))
os.symlink('w7', wallet_dir('w7_symlink'))
# rename wallet.dat to make sure plain wallet file paths (as opposed to
# directory paths) can be loaded
os.rename(wallet_dir("wallet.dat"), wallet_dir("w8"))
# create another dummy wallet for use in testing backups later
self.start_node(0, [])
self.stop_nodes()
empty_wallet = os.path.join(self.options.tmpdir, 'empty.dat')
os.rename(wallet_dir("wallet.dat"), empty_wallet)
# restart node with a mix of wallet names:
# w1, w2, w3 - to verify new wallets created when non-existing paths specified
# w - to verify wallet name matching works when one wallet path is prefix of another
# sub/w5 - to verify relative wallet path is created correctly
# extern/w6 - to verify absolute wallet path is created correctly
# w7_symlink - to verify symlinked wallet path is initialized correctly
# w8 - to verify existing wallet file is loaded correctly
# '' - to verify default wallet file is created correctly
wallet_names = ['w1', 'w2', 'w3', 'w', 'sub/w5', os.path.join(self.options.tmpdir, 'extern/w6'), 'w7_symlink', 'w8', '']
extra_args = ['-wallet={}'.format(n) for n in wallet_names]
self.start_node(0, extra_args)
assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), ['', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8'])
assert_equal(set(node.listwallets()), set(wallet_names))
# check that all requested wallets were created
self.stop_node(0)
for wallet_name in wallet_names:
assert_equal(os.path.isfile(wallet_file(wallet_name)), True)
# should not initialize if wallet path can't be created
exp_stderr = "boost::filesystem::create_directory:"
self.nodes[0].assert_start_raises_init_error(['-wallet=wallet.dat/bad'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
self.nodes[0].assert_start_raises_init_error(['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" does not exist')
self.nodes[0].assert_start_raises_init_error(['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" is a relative path', cwd=data_dir())
self.nodes[0].assert_start_raises_init_error(['-walletdir=debug.log'], 'Error: Specified -walletdir "debug.log" is not a directory', cwd=data_dir())
# should not initialize if there are duplicate wallets
self.nodes[0].assert_start_raises_init_error(['-wallet=w1', '-wallet=w1'], 'Error: Error loading wallet w1. Duplicate -wallet filename specified.')
# should not initialize if one wallet is a copy of another
shutil.copyfile(wallet_dir('w8'), wallet_dir('w8_copy'))
exp_stderr = r"BerkeleyBatch: Can't open database w8_copy \(duplicates fileid \w+ from w8\)"
self.nodes[0].assert_start_raises_init_error(['-wallet=w8', '-wallet=w8_copy'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
# should not initialize if wallet file is a symlink
os.symlink('w8', wallet_dir('w8_symlink'))
self.nodes[0].assert_start_raises_init_error(['-wallet=w8_symlink'], r'Error: Invalid -wallet path \'w8_symlink\'\. .*', match=ErrorMatch.FULL_REGEX)
# should not initialize if the specified walletdir does not exist
self.nodes[0].assert_start_raises_init_error(['-walletdir=bad'], 'Error: Specified -walletdir "bad" does not exist')
# should not initialize if the specified walletdir is not a directory
not_a_dir = wallet_dir('notadir')
open(not_a_dir, 'a', encoding="utf8").close()
self.nodes[0].assert_start_raises_init_error(['-walletdir=' + not_a_dir], 'Error: Specified -walletdir "' + not_a_dir + '" is not a directory')
self.log.info("Do not allow -zapwallettxes with multiwallet")
self.nodes[0].assert_start_raises_init_error(['-zapwallettxes', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=1', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=2', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
self.log.info("Do not allow -salvagewallet with multiwallet")
self.nodes[0].assert_start_raises_init_error(['-salvagewallet', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file")
self.nodes[0].assert_start_raises_init_error(['-salvagewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file")
self.log.info("Do not allow -upgradewallet with multiwallet")
self.nodes[0].assert_start_raises_init_error(['-upgradewallet', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file")
self.nodes[0].assert_start_raises_init_error(['-upgradewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file")
# if wallets/ doesn't exist, datadir should be the default wallet dir
wallet_dir2 = data_dir('walletdir')
os.rename(wallet_dir(), wallet_dir2)
self.start_node(0, ['-wallet=w4', '-wallet=w5'])
assert_equal(set(node.listwallets()), {"w4", "w5"})
w5 = wallet("w5")
node.generatetoaddress(nblocks=1, address=w5.getnewaddress())
# now if wallets/ exists again, but the rootdir is specified as the walletdir, w4 and w5 should still be loaded
os.rename(wallet_dir2, wallet_dir())
self.restart_node(0, ['-wallet=w4', '-wallet=w5', '-walletdir=' + data_dir()])
assert_equal(set(node.listwallets()), {"w4", "w5"})
w5 = wallet("w5")
w5_info = w5.getwalletinfo()
assert_equal(w5_info['immature_balance'], 50)
competing_wallet_dir = os.path.join(self.options.tmpdir, 'competing_walletdir')
os.mkdir(competing_wallet_dir)
self.restart_node(0, ['-walletdir=' + competing_wallet_dir])
exp_stderr = r"Error: Error initializing wallet database environment \"\S+competing_walletdir\"!"
self.nodes[1].assert_start_raises_init_error(['-walletdir=' + competing_wallet_dir], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
self.restart_node(0, extra_args)
assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), ['', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8', 'w8_copy'])
wallets = [wallet(w) for w in wallet_names]
wallet_bad = wallet("bad")
# check wallet names and balances
node.generatetoaddress(nblocks=1, address=wallets[0].getnewaddress())
for wallet_name, wallet in zip(wallet_names, wallets):
info = wallet.getwalletinfo()
assert_equal(info['immature_balance'], 50 if wallet is wallets[0] else 0)
assert_equal(info['walletname'], wallet_name)
# accessing invalid wallet fails
assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", wallet_bad.getwalletinfo)
# accessing wallet RPC without using wallet endpoint fails
assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo)
w1, w2, w3, w4, *_ = wallets
node.generatetoaddress(nblocks=101, address=w1.getnewaddress())
assert_equal(w1.getbalance(), 100)
assert_equal(w2.getbalance(), 0)
assert_equal(w3.getbalance(), 0)
assert_equal(w4.getbalance(), 0)
w1.sendtoaddress(w2.getnewaddress(), 1)
w1.sendtoaddress(w3.getnewaddress(), 2)
w1.sendtoaddress(w4.getnewaddress(), 3)
node.generatetoaddress(nblocks=1, address=w1.getnewaddress())
assert_equal(w2.getbalance(), 1)
assert_equal(w3.getbalance(), 2)
assert_equal(w4.getbalance(), 3)
batch = w1.batch([w1.getblockchaininfo.get_request(), w1.getwalletinfo.get_request()])
assert_equal(batch[0]["result"]["chain"], self.chain)
assert_equal(batch[1]["result"]["walletname"], "w1")
self.log.info('Check for per-wallet settxfee call')
assert_equal(w1.getwalletinfo()['paytxfee'], 0)
assert_equal(w2.getwalletinfo()['paytxfee'], 0)
w2.settxfee(4.0)
assert_equal(w1.getwalletinfo()['paytxfee'], 0)
assert_equal(w2.getwalletinfo()['paytxfee'], 4.0)
self.log.info("Test dynamic wallet loading")
self.restart_node(0, ['-nowallet'])
assert_equal(node.listwallets(), [])
assert_raises_rpc_error(-32601, "Method not found", node.getwalletinfo)
self.log.info("Load first wallet")
loadwallet_name = node.loadwallet(wallet_names[0])
assert_equal(loadwallet_name['name'], wallet_names[0])
assert_equal(node.listwallets(), wallet_names[0:1])
node.getwalletinfo()
w1 = node.get_wallet_rpc(wallet_names[0])
w1.getwalletinfo()
self.log.info("Load second wallet")
loadwallet_name = node.loadwallet(wallet_names[1])
assert_equal(loadwallet_name['name'], wallet_names[1])
assert_equal(node.listwallets(), wallet_names[0:2])
assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo)
w2 = node.get_wallet_rpc(wallet_names[1])
w2.getwalletinfo()
self.log.info("Concurrent wallet loading")
threads = []
for _ in range(3):
n = node.cli if self.options.usecli else get_rpc_proxy(node.url, 1, timeout=600, coveragedir=node.coverage_dir)
t = Thread(target=test_load_unload, args=(n, wallet_names[2], ))
t.start()
threads.append(t)
for t in threads:
t.join()
global got_loading_error
assert_equal(got_loading_error, True)
self.log.info("Load remaining wallets")
for wallet_name in wallet_names[2:]:
loadwallet_name = self.nodes[0].loadwallet(wallet_name)
assert_equal(loadwallet_name['name'], wallet_name)
assert_equal(set(self.nodes[0].listwallets()), set(wallet_names))
# Fail to load if wallet doesn't exist
assert_raises_rpc_error(-18, 'Wallet wallets not found.', self.nodes[0].loadwallet, 'wallets')
# Fail to load duplicate wallets
assert_raises_rpc_error(-4, 'Wallet file verification failed: Error loading wallet w1. Duplicate -wallet filename specified.', self.nodes[0].loadwallet, wallet_names[0])
# Fail to load duplicate wallets by different ways (directory and filepath)
assert_raises_rpc_error(-4, "Wallet file verification failed: Error loading wallet wallet.dat. Duplicate -wallet filename specified.", self.nodes[0].loadwallet, 'wallet.dat')
# Fail to load if one wallet is a copy of another
assert_raises_rpc_error(-4, "BerkeleyBatch: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy')
# Fail to load if one wallet is a copy of another, test this twice to make sure that we don't re-introduce #14304
assert_raises_rpc_error(-4, "BerkeleyBatch: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy')
# Fail to load if wallet file is a symlink
assert_raises_rpc_error(-4, "Wallet file verification failed: Invalid -wallet path 'w8_symlink'", self.nodes[0].loadwallet, 'w8_symlink')
# Fail to load if a directory is specified that doesn't contain a wallet
os.mkdir(wallet_dir('empty_wallet_dir'))
assert_raises_rpc_error(-18, "Directory empty_wallet_dir does not contain a wallet.dat file", self.nodes[0].loadwallet, 'empty_wallet_dir')
self.log.info("Test dynamic wallet creation.")
# Fail to create a wallet if it already exists.
assert_raises_rpc_error(-4, "Wallet w2 already exists.", self.nodes[0].createwallet, 'w2')
# Successfully create a wallet with a new name
loadwallet_name = self.nodes[0].createwallet('w9')
assert_equal(loadwallet_name['name'], 'w9')
w9 = node.get_wallet_rpc('w9')
assert_equal(w9.getwalletinfo()['walletname'], 'w9')
assert 'w9' in self.nodes[0].listwallets()
# Successfully create a wallet using a full path
new_wallet_dir = os.path.join(self.options.tmpdir, 'new_walletdir')
new_wallet_name = os.path.join(new_wallet_dir, 'w10')
loadwallet_name = self.nodes[0].createwallet(new_wallet_name)
assert_equal(loadwallet_name['name'], new_wallet_name)
w10 = node.get_wallet_rpc(new_wallet_name)
assert_equal(w10.getwalletinfo()['walletname'], new_wallet_name)
assert new_wallet_name in self.nodes[0].listwallets()
self.log.info("Test dynamic wallet unloading")
# Test `unloadwallet` errors
assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].unloadwallet)
assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", self.nodes[0].unloadwallet, "dummy")
assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", node.get_wallet_rpc("dummy").unloadwallet)
assert_raises_rpc_error(-8, "Cannot unload the requested wallet", w1.unloadwallet, "w2"),
# Successfully unload the specified wallet name
self.nodes[0].unloadwallet("w1")
assert 'w1' not in self.nodes[0].listwallets()
# Successfully unload the wallet referenced by the request endpoint
# Also ensure unload works during walletpassphrase timeout
w2.encryptwallet('test')
w2.walletpassphrase('test', 1)
w2.unloadwallet()
time.sleep(1.1)
assert 'w2' not in self.nodes[0].listwallets()
# Successfully unload all wallets
for wallet_name in self.nodes[0].listwallets():
self.nodes[0].unloadwallet(wallet_name)
assert_equal(self.nodes[0].listwallets(), [])
assert_raises_rpc_error(-32601, "Method not found (wallet method is disabled because no wallet is loaded)", self.nodes[0].getwalletinfo)
# Successfully load a previously unloaded wallet
self.nodes[0].loadwallet('w1')
assert_equal(self.nodes[0].listwallets(), ['w1'])
assert_equal(w1.getwalletinfo()['walletname'], 'w1')
assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), ['', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8', 'w8_copy', 'w9'])
# Test backing up and restoring wallets
self.log.info("Test wallet backup")
self.restart_node(0, ['-nowallet'])
for wallet_name in wallet_names:
self.nodes[0].loadwallet(wallet_name)
for wallet_name in wallet_names:
rpc = self.nodes[0].get_wallet_rpc(wallet_name)
addr = rpc.getnewaddress()
backup = os.path.join(self.options.tmpdir, 'backup.dat')
rpc.backupwallet(backup)
self.nodes[0].unloadwallet(wallet_name)
shutil.copyfile(empty_wallet, wallet_file(wallet_name))
self.nodes[0].loadwallet(wallet_name)
assert_equal(rpc.getaddressinfo(addr)['ismine'], False)
self.nodes[0].unloadwallet(wallet_name)
shutil.copyfile(backup, wallet_file(wallet_name))
self.nodes[0].loadwallet(wallet_name)
assert_equal(rpc.getaddressinfo(addr)['ismine'], True)
# Test .walletlock file is closed
self.start_node(1)
wallet = os.path.join(self.options.tmpdir, 'my_wallet')
self.nodes[0].createwallet(wallet)
assert_raises_rpc_error(-4, "Error initializing wallet database environment", self.nodes[1].loadwallet, wallet)
self.nodes[0].unloadwallet(wallet)
self.nodes[1].loadwallet(wallet)
# Fail to load if wallet is downgraded
shutil.copytree(os.path.join(self.options.data_wallets_dir, 'high_minversion'), wallet_dir('high_minversion'))
self.restart_node(0, extra_args=['-upgradewallet={}'.format(FEATURE_LATEST)])
assert {'name': 'high_minversion'} in self.nodes[0].listwalletdir()['wallets']
self.log.info("Fail -upgradewallet that results in downgrade")
assert_raises_rpc_error(
-4,
'Wallet loading failed: Error loading {}: Wallet requires newer version of {}'.format(
wallet_dir('high_minversion', 'wallet.dat'), self.config['environment']['PACKAGE_NAME']),
lambda: self.nodes[0].loadwallet(filename='high_minversion'),
)
if __name__ == '__main__':
MultiWalletTest().main()
|
HyperloopTeam/FullOpenMDAO
|
refs/heads/master
|
lib/python2.7/site-packages/pycrypto-2.3-py2.7-macosx-10.10-x86_64.egg/Crypto/Util/number.py
|
10
|
#
# number.py : Number-theoretic functions
#
# Part of the Python Cryptography Toolkit
#
# Written by Andrew M. Kuchling, Barry A. Warsaw, and others
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
#
__revision__ = "$Id$"
from Crypto.pct_warnings import GetRandomNumber_DeprecationWarning
import math
bignum = long
try:
from Crypto.PublicKey import _fastmath
except ImportError:
_fastmath = None
# New functions
from _number_new import *
# Commented out and replaced with faster versions below
## def long2str(n):
## s=''
## while n>0:
## s=chr(n & 255)+s
## n=n>>8
## return s
## import types
## def str2long(s):
## if type(s)!=types.StringType: return s # Integers will be left alone
## return reduce(lambda x,y : x*256+ord(y), s, 0L)
def size (N):
"""size(N:long) : int
Returns the size of the number N in bits.
"""
bits = 0
while N >> bits:
bits += 1
return bits
def getRandomNumber(N, randfunc=None):
"""Deprecated. Use getRandomInteger or getRandomNBitInteger instead."""
warnings.warn("Crypto.Util.number.getRandomNumber has confusing semantics and has been deprecated. Use getRandomInteger or getRandomNBitInteger instead.",
GetRandomNumber_DeprecationWarning)
return getRandomNBitInteger(N, randfunc)
def getRandomInteger(N, randfunc=None):
"""getRandomInteger(N:int, randfunc:callable):long
Return a random number with at most N bits.
If randfunc is omitted, then Random.new().read is used.
This function is for internal use only and may be renamed or removed in
the future.
"""
if randfunc is None:
_import_Random()
randfunc = Random.new().read
S = randfunc(N>>3)
odd_bits = N % 8
if odd_bits != 0:
char = ord(randfunc(1)) >> (8-odd_bits)
S = chr(char) + S
value = bytes_to_long(S)
return value
def getRandomRange(a, b, randfunc=None):
"""getRandomRange(a:int, b:int, randfunc:callable):long
Return a random number n so that a <= n < b.
If randfunc is omitted, then Random.new().read is used.
This function is for internal use only and may be renamed or removed in
the future.
"""
range_ = b - a - 1
bits = size(range_)
value = getRandomInteger(bits, randfunc)
while value > range_:
value = getRandomInteger(bits, randfunc)
return a + value
def getRandomNBitInteger(N, randfunc=None):
"""getRandomInteger(N:int, randfunc:callable):long
Return a random number with exactly N-bits, i.e. a random number
between 2**(N-1) and (2**N)-1.
If randfunc is omitted, then Random.new().read is used.
This function is for internal use only and may be renamed or removed in
the future.
"""
value = getRandomInteger (N-1, randfunc)
value |= 2L ** (N-1) # Ensure high bit is set
assert size(value) >= N
return value
def GCD(x,y):
"""GCD(x:long, y:long): long
Return the GCD of x and y.
"""
x = abs(x) ; y = abs(y)
while x > 0:
x, y = y % x, x
return y
def inverse(u, v):
"""inverse(u:long, v:long):long
Return the inverse of u mod v.
"""
u3, v3 = long(u), long(v)
u1, v1 = 1L, 0L
while v3 > 0:
q=divmod(u3, v3)[0]
u1, v1 = v1, u1 - v1*q
u3, v3 = v3, u3 - v3*q
while u1<0:
u1 = u1 + v
return u1
# Given a number of bits to generate and a random generation function,
# find a prime number of the appropriate size.
def getPrime(N, randfunc=None):
"""getPrime(N:int, randfunc:callable):long
Return a random N-bit prime number.
If randfunc is omitted, then Random.new().read is used.
"""
if randfunc is None:
_import_Random()
randfunc = Random.new().read
number=getRandomNBitInteger(N, randfunc) | 1
while (not isPrime(number, randfunc=randfunc)):
number=number+2
return number
def _rabinMillerTest(n, rounds, randfunc=None):
"""_rabinMillerTest(n:long, rounds:int, randfunc:callable):int
Tests if n is prime.
Returns 0 when n is definitly composite.
Returns 1 when n is probably prime.
Returns 2 when n is definitly prime.
If randfunc is omitted, then Random.new().read is used.
This function is for internal use only and may be renamed or removed in
the future.
"""
# check special cases (n==2, n even, n < 2)
if n < 3 or (n & 1) == 0:
return n == 2
# n might be very large so it might be beneficial to precalculate n-1
n_1 = n - 1
# determine m and b so that 2**b * m = n - 1 and b maximal
b = 0
m = n_1
while (m & 1) == 0:
b += 1
m >>= 1
tested = []
# we need to do at most n-2 rounds.
for i in xrange (min (rounds, n-2)):
# randomly choose a < n and make sure it hasn't been tested yet
a = getRandomRange (2, n, randfunc)
while a in tested:
a = getRandomRange (2, n, randfunc)
tested.append (a)
# do the rabin-miller test
z = pow (a, m, n) # (a**m) % n
if z == 1 or z == n_1:
continue
composite = 1
for r in xrange (b):
z = (z * z) % n
if z == 1:
return 0
elif z == n_1:
composite = 0
break
if composite:
return 0
return 1
def getStrongPrime(N, e=0, false_positive_prob=1e-6, randfunc=None):
"""getStrongPrime(N:int, e:int, false_positive_prob:float, randfunc:callable):long
Return a random strong N-bit prime number.
In this context p is a strong prime if p-1 and p+1 have at
least one large prime factor.
N should be a multiple of 128 and > 512.
If e is provided the returned prime p-1 will be coprime to e
and thus suitable for RSA where e is the public exponent.
The optional false_positive_prob is the statistical probability
that true is returned even though it is not (pseudo-prime).
It defaults to 1e-6 (less than 1:1000000).
Note that the real probability of a false-positiv is far less. This is
just the mathematically provable limit.
randfunc should take a single int parameter and return that
many random bytes as a string.
If randfunc is omitted, then Random.new().read is used.
"""
# This function was implemented following the
# instructions found in the paper:
# "FAST GENERATION OF RANDOM, STRONG RSA PRIMES"
# by Robert D. Silverman
# RSA Laboratories
# May 17, 1997
# which by the time of writing could be freely downloaded here:
# http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.17.2713&rep=rep1&type=pdf
# Use the accelerator if available
if _fastmath is not None:
return _fastmath.getStrongPrime(long(N), long(e), false_positive_prob, randfunc)
if (N < 512) or ((N % 128) != 0):
raise ValueError ("bits must be multiple of 128 and > 512")
rabin_miller_rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4)))
# calculate range for X
# lower_bound = sqrt(2) * 2^{511 + 128*x}
# upper_bound = 2^{512 + 128*x} - 1
x = (N - 512) >> 7;
# We need to approximate the sqrt(2) in the lower_bound by an integer
# expression because floating point math overflows with these numbers
lower_bound = divmod(14142135623730950489L * (2L ** (511 + 128*x)),
10000000000000000000L)[0]
upper_bound = (1L << (512 + 128*x)) - 1
# Randomly choose X in calculated range
X = getRandomRange (lower_bound, upper_bound, randfunc)
# generate p1 and p2
p = [0, 0]
for i in (0, 1):
# randomly choose 101-bit y
y = getRandomNBitInteger (101, randfunc)
# initialize the field for sieving
field = [0] * 5 * len (sieve_base)
# sieve the field
for prime in sieve_base:
offset = y % prime
for j in xrange ((prime - offset) % prime, len (field), prime):
field[j] = 1
# look for suitable p[i] starting at y
result = 0
for j in range(len(field)):
composite = field[j]
# look for next canidate
if composite:
continue
tmp = y + j
result = _rabinMillerTest (tmp, rabin_miller_rounds)
if result > 0:
p[i] = tmp
break
if result == 0:
raise RuntimeError ("Couln't find prime in field. "
"Developer: Increase field_size")
# Calculate R
# R = (p2^{-1} mod p1) * p2 - (p1^{-1} mod p2) * p1
tmp1 = inverse (p[1], p[0]) * p[1] # (p2^-1 mod p1)*p2
tmp2 = inverse (p[0], p[1]) * p[0] # (p1^-1 mod p2)*p1
R = tmp1 - tmp2 # (p2^-1 mod p1)*p2 - (p1^-1 mod p2)*p1
# search for final prime number starting by Y0
# Y0 = X + (R - X mod p1p2)
increment = p[0] * p[1]
X = X + (R - (X % increment))
while 1:
is_possible_prime = 1
# first check canidate against sieve_base
for prime in sieve_base:
if (X % prime) == 0:
is_possible_prime = 0
break
# if e is given make sure that e and X-1 are coprime
# this is not necessarily a strong prime criterion but usefull when
# creating them for RSA where the p-1 and q-1 should be coprime to
# the public exponent e
if e and is_possible_prime:
if e & 1:
if GCD (e, X-1) != 1:
is_possible_prime = 0
else:
if GCD (e, (X-1)/2) != 1:
is_possible_prime = 0
# do some Rabin-Miller-Tests
if is_possible_prime:
result = _rabinMillerTest (X, rabin_miller_rounds)
if result > 0:
break
X += increment
# abort when X has more bits than requested
# TODO: maybe we shouldn't abort but rather start over.
if X >= 1L << N:
raise RuntimeError ("Couln't find prime in field. "
"Developer: Increase field_size")
return X
def isPrime(N, false_positive_prob=1e-6, randfunc=None):
"""isPrime(N:long, false_positive_prob:float, randfunc:callable):bool
Return true if N is prime.
The optional false_positive_prob is the statistical probability
that true is returned even though it is not (pseudo-prime).
It defaults to 1e-6 (less than 1:1000000).
Note that the real probability of a false-positiv is far less. This is
just the mathematically provable limit.
If randfunc is omitted, then Random.new().read is used.
"""
if _fastmath is not None:
return _fastmath.isPrime(long(N), false_positive_prob, randfunc)
if N < 3 or N & 1 == 0:
return N == 2
for p in sieve_base:
if N == p:
return 1
if N % p == 0:
return 0
rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4)))
return _rabinMillerTest(N, rounds, randfunc)
# Improved conversion functions contributed by Barry Warsaw, after
# careful benchmarking
import struct
def long_to_bytes(n, blocksize=0):
"""long_to_bytes(n:long, blocksize:int) : string
Convert a long integer to a byte string.
If optional blocksize is given and greater than zero, pad the front of the
byte string with binary zeros so that the length is a multiple of
blocksize.
"""
# after much testing, this algorithm was deemed to be the fastest
s = ''
n = long(n)
pack = struct.pack
while n > 0:
s = pack('>I', n & 0xffffffffL) + s
n = n >> 32
# strip off leading zeros
for i in range(len(s)):
if s[i] != '\000':
break
else:
# only happens when n == 0
s = '\000'
i = 0
s = s[i:]
# add back some pad bytes. this could be done more efficiently w.r.t. the
# de-padding being done above, but sigh...
if blocksize > 0 and len(s) % blocksize:
s = (blocksize - len(s) % blocksize) * '\000' + s
return s
def bytes_to_long(s):
"""bytes_to_long(string) : long
Convert a byte string to a long integer.
This is (essentially) the inverse of long_to_bytes().
"""
acc = 0L
unpack = struct.unpack
length = len(s)
if length % 4:
extra = (4 - length % 4)
s = '\000' * extra + s
length = length + extra
for i in range(0, length, 4):
acc = (acc << 32) + unpack('>I', s[i:i+4])[0]
return acc
# For backwards compatibility...
import warnings
def long2str(n, blocksize=0):
warnings.warn("long2str() has been replaced by long_to_bytes()")
return long_to_bytes(n, blocksize)
def str2long(s):
warnings.warn("str2long() has been replaced by bytes_to_long()")
return bytes_to_long(s)
def _import_Random():
# This is called in a function instead of at the module level in order to avoid problems with recursive imports
global Random, StrongRandom
from Crypto import Random
from Crypto.Random.random import StrongRandom
# The first 10000 primes used for checking primality.
# This should be enough to eliminate most of the odd
# numbers before needing to do a Rabin-Miller test at all.
sieve_base = (
2, 3, 5, 7, 11, 13, 17, 19, 23, 29,
31, 37, 41, 43, 47, 53, 59, 61, 67, 71,
73, 79, 83, 89, 97, 101, 103, 107, 109, 113,
127, 131, 137, 139, 149, 151, 157, 163, 167, 173,
179, 181, 191, 193, 197, 199, 211, 223, 227, 229,
233, 239, 241, 251, 257, 263, 269, 271, 277, 281,
283, 293, 307, 311, 313, 317, 331, 337, 347, 349,
353, 359, 367, 373, 379, 383, 389, 397, 401, 409,
419, 421, 431, 433, 439, 443, 449, 457, 461, 463,
467, 479, 487, 491, 499, 503, 509, 521, 523, 541,
547, 557, 563, 569, 571, 577, 587, 593, 599, 601,
607, 613, 617, 619, 631, 641, 643, 647, 653, 659,
661, 673, 677, 683, 691, 701, 709, 719, 727, 733,
739, 743, 751, 757, 761, 769, 773, 787, 797, 809,
811, 821, 823, 827, 829, 839, 853, 857, 859, 863,
877, 881, 883, 887, 907, 911, 919, 929, 937, 941,
947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013,
1019, 1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069,
1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151,
1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223,
1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283, 1289, 1291,
1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373,
1381, 1399, 1409, 1423, 1427, 1429, 1433, 1439, 1447, 1451,
1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511,
1523, 1531, 1543, 1549, 1553, 1559, 1567, 1571, 1579, 1583,
1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637, 1657,
1663, 1667, 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733,
1741, 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811,
1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889,
1901, 1907, 1913, 1931, 1933, 1949, 1951, 1973, 1979, 1987,
1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053,
2063, 2069, 2081, 2083, 2087, 2089, 2099, 2111, 2113, 2129,
2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213,
2221, 2237, 2239, 2243, 2251, 2267, 2269, 2273, 2281, 2287,
2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, 2357,
2371, 2377, 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423,
2437, 2441, 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531,
2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617,
2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, 2683, 2687,
2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741,
2749, 2753, 2767, 2777, 2789, 2791, 2797, 2801, 2803, 2819,
2833, 2837, 2843, 2851, 2857, 2861, 2879, 2887, 2897, 2903,
2909, 2917, 2927, 2939, 2953, 2957, 2963, 2969, 2971, 2999,
3001, 3011, 3019, 3023, 3037, 3041, 3049, 3061, 3067, 3079,
3083, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181,
3187, 3191, 3203, 3209, 3217, 3221, 3229, 3251, 3253, 3257,
3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331,
3343, 3347, 3359, 3361, 3371, 3373, 3389, 3391, 3407, 3413,
3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511,
3517, 3527, 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571,
3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, 3643,
3659, 3671, 3673, 3677, 3691, 3697, 3701, 3709, 3719, 3727,
3733, 3739, 3761, 3767, 3769, 3779, 3793, 3797, 3803, 3821,
3823, 3833, 3847, 3851, 3853, 3863, 3877, 3881, 3889, 3907,
3911, 3917, 3919, 3923, 3929, 3931, 3943, 3947, 3967, 3989,
4001, 4003, 4007, 4013, 4019, 4021, 4027, 4049, 4051, 4057,
4073, 4079, 4091, 4093, 4099, 4111, 4127, 4129, 4133, 4139,
4153, 4157, 4159, 4177, 4201, 4211, 4217, 4219, 4229, 4231,
4241, 4243, 4253, 4259, 4261, 4271, 4273, 4283, 4289, 4297,
4327, 4337, 4339, 4349, 4357, 4363, 4373, 4391, 4397, 4409,
4421, 4423, 4441, 4447, 4451, 4457, 4463, 4481, 4483, 4493,
4507, 4513, 4517, 4519, 4523, 4547, 4549, 4561, 4567, 4583,
4591, 4597, 4603, 4621, 4637, 4639, 4643, 4649, 4651, 4657,
4663, 4673, 4679, 4691, 4703, 4721, 4723, 4729, 4733, 4751,
4759, 4783, 4787, 4789, 4793, 4799, 4801, 4813, 4817, 4831,
4861, 4871, 4877, 4889, 4903, 4909, 4919, 4931, 4933, 4937,
4943, 4951, 4957, 4967, 4969, 4973, 4987, 4993, 4999, 5003,
5009, 5011, 5021, 5023, 5039, 5051, 5059, 5077, 5081, 5087,
5099, 5101, 5107, 5113, 5119, 5147, 5153, 5167, 5171, 5179,
5189, 5197, 5209, 5227, 5231, 5233, 5237, 5261, 5273, 5279,
5281, 5297, 5303, 5309, 5323, 5333, 5347, 5351, 5381, 5387,
5393, 5399, 5407, 5413, 5417, 5419, 5431, 5437, 5441, 5443,
5449, 5471, 5477, 5479, 5483, 5501, 5503, 5507, 5519, 5521,
5527, 5531, 5557, 5563, 5569, 5573, 5581, 5591, 5623, 5639,
5641, 5647, 5651, 5653, 5657, 5659, 5669, 5683, 5689, 5693,
5701, 5711, 5717, 5737, 5741, 5743, 5749, 5779, 5783, 5791,
5801, 5807, 5813, 5821, 5827, 5839, 5843, 5849, 5851, 5857,
5861, 5867, 5869, 5879, 5881, 5897, 5903, 5923, 5927, 5939,
5953, 5981, 5987, 6007, 6011, 6029, 6037, 6043, 6047, 6053,
6067, 6073, 6079, 6089, 6091, 6101, 6113, 6121, 6131, 6133,
6143, 6151, 6163, 6173, 6197, 6199, 6203, 6211, 6217, 6221,
6229, 6247, 6257, 6263, 6269, 6271, 6277, 6287, 6299, 6301,
6311, 6317, 6323, 6329, 6337, 6343, 6353, 6359, 6361, 6367,
6373, 6379, 6389, 6397, 6421, 6427, 6449, 6451, 6469, 6473,
6481, 6491, 6521, 6529, 6547, 6551, 6553, 6563, 6569, 6571,
6577, 6581, 6599, 6607, 6619, 6637, 6653, 6659, 6661, 6673,
6679, 6689, 6691, 6701, 6703, 6709, 6719, 6733, 6737, 6761,
6763, 6779, 6781, 6791, 6793, 6803, 6823, 6827, 6829, 6833,
6841, 6857, 6863, 6869, 6871, 6883, 6899, 6907, 6911, 6917,
6947, 6949, 6959, 6961, 6967, 6971, 6977, 6983, 6991, 6997,
7001, 7013, 7019, 7027, 7039, 7043, 7057, 7069, 7079, 7103,
7109, 7121, 7127, 7129, 7151, 7159, 7177, 7187, 7193, 7207,
7211, 7213, 7219, 7229, 7237, 7243, 7247, 7253, 7283, 7297,
7307, 7309, 7321, 7331, 7333, 7349, 7351, 7369, 7393, 7411,
7417, 7433, 7451, 7457, 7459, 7477, 7481, 7487, 7489, 7499,
7507, 7517, 7523, 7529, 7537, 7541, 7547, 7549, 7559, 7561,
7573, 7577, 7583, 7589, 7591, 7603, 7607, 7621, 7639, 7643,
7649, 7669, 7673, 7681, 7687, 7691, 7699, 7703, 7717, 7723,
7727, 7741, 7753, 7757, 7759, 7789, 7793, 7817, 7823, 7829,
7841, 7853, 7867, 7873, 7877, 7879, 7883, 7901, 7907, 7919,
7927, 7933, 7937, 7949, 7951, 7963, 7993, 8009, 8011, 8017,
8039, 8053, 8059, 8069, 8081, 8087, 8089, 8093, 8101, 8111,
8117, 8123, 8147, 8161, 8167, 8171, 8179, 8191, 8209, 8219,
8221, 8231, 8233, 8237, 8243, 8263, 8269, 8273, 8287, 8291,
8293, 8297, 8311, 8317, 8329, 8353, 8363, 8369, 8377, 8387,
8389, 8419, 8423, 8429, 8431, 8443, 8447, 8461, 8467, 8501,
8513, 8521, 8527, 8537, 8539, 8543, 8563, 8573, 8581, 8597,
8599, 8609, 8623, 8627, 8629, 8641, 8647, 8663, 8669, 8677,
8681, 8689, 8693, 8699, 8707, 8713, 8719, 8731, 8737, 8741,
8747, 8753, 8761, 8779, 8783, 8803, 8807, 8819, 8821, 8831,
8837, 8839, 8849, 8861, 8863, 8867, 8887, 8893, 8923, 8929,
8933, 8941, 8951, 8963, 8969, 8971, 8999, 9001, 9007, 9011,
9013, 9029, 9041, 9043, 9049, 9059, 9067, 9091, 9103, 9109,
9127, 9133, 9137, 9151, 9157, 9161, 9173, 9181, 9187, 9199,
9203, 9209, 9221, 9227, 9239, 9241, 9257, 9277, 9281, 9283,
9293, 9311, 9319, 9323, 9337, 9341, 9343, 9349, 9371, 9377,
9391, 9397, 9403, 9413, 9419, 9421, 9431, 9433, 9437, 9439,
9461, 9463, 9467, 9473, 9479, 9491, 9497, 9511, 9521, 9533,
9539, 9547, 9551, 9587, 9601, 9613, 9619, 9623, 9629, 9631,
9643, 9649, 9661, 9677, 9679, 9689, 9697, 9719, 9721, 9733,
9739, 9743, 9749, 9767, 9769, 9781, 9787, 9791, 9803, 9811,
9817, 9829, 9833, 9839, 9851, 9857, 9859, 9871, 9883, 9887,
9901, 9907, 9923, 9929, 9931, 9941, 9949, 9967, 9973, 10007,
10009, 10037, 10039, 10061, 10067, 10069, 10079, 10091, 10093, 10099,
10103, 10111, 10133, 10139, 10141, 10151, 10159, 10163, 10169, 10177,
10181, 10193, 10211, 10223, 10243, 10247, 10253, 10259, 10267, 10271,
10273, 10289, 10301, 10303, 10313, 10321, 10331, 10333, 10337, 10343,
10357, 10369, 10391, 10399, 10427, 10429, 10433, 10453, 10457, 10459,
10463, 10477, 10487, 10499, 10501, 10513, 10529, 10531, 10559, 10567,
10589, 10597, 10601, 10607, 10613, 10627, 10631, 10639, 10651, 10657,
10663, 10667, 10687, 10691, 10709, 10711, 10723, 10729, 10733, 10739,
10753, 10771, 10781, 10789, 10799, 10831, 10837, 10847, 10853, 10859,
10861, 10867, 10883, 10889, 10891, 10903, 10909, 10937, 10939, 10949,
10957, 10973, 10979, 10987, 10993, 11003, 11027, 11047, 11057, 11059,
11069, 11071, 11083, 11087, 11093, 11113, 11117, 11119, 11131, 11149,
11159, 11161, 11171, 11173, 11177, 11197, 11213, 11239, 11243, 11251,
11257, 11261, 11273, 11279, 11287, 11299, 11311, 11317, 11321, 11329,
11351, 11353, 11369, 11383, 11393, 11399, 11411, 11423, 11437, 11443,
11447, 11467, 11471, 11483, 11489, 11491, 11497, 11503, 11519, 11527,
11549, 11551, 11579, 11587, 11593, 11597, 11617, 11621, 11633, 11657,
11677, 11681, 11689, 11699, 11701, 11717, 11719, 11731, 11743, 11777,
11779, 11783, 11789, 11801, 11807, 11813, 11821, 11827, 11831, 11833,
11839, 11863, 11867, 11887, 11897, 11903, 11909, 11923, 11927, 11933,
11939, 11941, 11953, 11959, 11969, 11971, 11981, 11987, 12007, 12011,
12037, 12041, 12043, 12049, 12071, 12073, 12097, 12101, 12107, 12109,
12113, 12119, 12143, 12149, 12157, 12161, 12163, 12197, 12203, 12211,
12227, 12239, 12241, 12251, 12253, 12263, 12269, 12277, 12281, 12289,
12301, 12323, 12329, 12343, 12347, 12373, 12377, 12379, 12391, 12401,
12409, 12413, 12421, 12433, 12437, 12451, 12457, 12473, 12479, 12487,
12491, 12497, 12503, 12511, 12517, 12527, 12539, 12541, 12547, 12553,
12569, 12577, 12583, 12589, 12601, 12611, 12613, 12619, 12637, 12641,
12647, 12653, 12659, 12671, 12689, 12697, 12703, 12713, 12721, 12739,
12743, 12757, 12763, 12781, 12791, 12799, 12809, 12821, 12823, 12829,
12841, 12853, 12889, 12893, 12899, 12907, 12911, 12917, 12919, 12923,
12941, 12953, 12959, 12967, 12973, 12979, 12983, 13001, 13003, 13007,
13009, 13033, 13037, 13043, 13049, 13063, 13093, 13099, 13103, 13109,
13121, 13127, 13147, 13151, 13159, 13163, 13171, 13177, 13183, 13187,
13217, 13219, 13229, 13241, 13249, 13259, 13267, 13291, 13297, 13309,
13313, 13327, 13331, 13337, 13339, 13367, 13381, 13397, 13399, 13411,
13417, 13421, 13441, 13451, 13457, 13463, 13469, 13477, 13487, 13499,
13513, 13523, 13537, 13553, 13567, 13577, 13591, 13597, 13613, 13619,
13627, 13633, 13649, 13669, 13679, 13681, 13687, 13691, 13693, 13697,
13709, 13711, 13721, 13723, 13729, 13751, 13757, 13759, 13763, 13781,
13789, 13799, 13807, 13829, 13831, 13841, 13859, 13873, 13877, 13879,
13883, 13901, 13903, 13907, 13913, 13921, 13931, 13933, 13963, 13967,
13997, 13999, 14009, 14011, 14029, 14033, 14051, 14057, 14071, 14081,
14083, 14087, 14107, 14143, 14149, 14153, 14159, 14173, 14177, 14197,
14207, 14221, 14243, 14249, 14251, 14281, 14293, 14303, 14321, 14323,
14327, 14341, 14347, 14369, 14387, 14389, 14401, 14407, 14411, 14419,
14423, 14431, 14437, 14447, 14449, 14461, 14479, 14489, 14503, 14519,
14533, 14537, 14543, 14549, 14551, 14557, 14561, 14563, 14591, 14593,
14621, 14627, 14629, 14633, 14639, 14653, 14657, 14669, 14683, 14699,
14713, 14717, 14723, 14731, 14737, 14741, 14747, 14753, 14759, 14767,
14771, 14779, 14783, 14797, 14813, 14821, 14827, 14831, 14843, 14851,
14867, 14869, 14879, 14887, 14891, 14897, 14923, 14929, 14939, 14947,
14951, 14957, 14969, 14983, 15013, 15017, 15031, 15053, 15061, 15073,
15077, 15083, 15091, 15101, 15107, 15121, 15131, 15137, 15139, 15149,
15161, 15173, 15187, 15193, 15199, 15217, 15227, 15233, 15241, 15259,
15263, 15269, 15271, 15277, 15287, 15289, 15299, 15307, 15313, 15319,
15329, 15331, 15349, 15359, 15361, 15373, 15377, 15383, 15391, 15401,
15413, 15427, 15439, 15443, 15451, 15461, 15467, 15473, 15493, 15497,
15511, 15527, 15541, 15551, 15559, 15569, 15581, 15583, 15601, 15607,
15619, 15629, 15641, 15643, 15647, 15649, 15661, 15667, 15671, 15679,
15683, 15727, 15731, 15733, 15737, 15739, 15749, 15761, 15767, 15773,
15787, 15791, 15797, 15803, 15809, 15817, 15823, 15859, 15877, 15881,
15887, 15889, 15901, 15907, 15913, 15919, 15923, 15937, 15959, 15971,
15973, 15991, 16001, 16007, 16033, 16057, 16061, 16063, 16067, 16069,
16073, 16087, 16091, 16097, 16103, 16111, 16127, 16139, 16141, 16183,
16187, 16189, 16193, 16217, 16223, 16229, 16231, 16249, 16253, 16267,
16273, 16301, 16319, 16333, 16339, 16349, 16361, 16363, 16369, 16381,
16411, 16417, 16421, 16427, 16433, 16447, 16451, 16453, 16477, 16481,
16487, 16493, 16519, 16529, 16547, 16553, 16561, 16567, 16573, 16603,
16607, 16619, 16631, 16633, 16649, 16651, 16657, 16661, 16673, 16691,
16693, 16699, 16703, 16729, 16741, 16747, 16759, 16763, 16787, 16811,
16823, 16829, 16831, 16843, 16871, 16879, 16883, 16889, 16901, 16903,
16921, 16927, 16931, 16937, 16943, 16963, 16979, 16981, 16987, 16993,
17011, 17021, 17027, 17029, 17033, 17041, 17047, 17053, 17077, 17093,
17099, 17107, 17117, 17123, 17137, 17159, 17167, 17183, 17189, 17191,
17203, 17207, 17209, 17231, 17239, 17257, 17291, 17293, 17299, 17317,
17321, 17327, 17333, 17341, 17351, 17359, 17377, 17383, 17387, 17389,
17393, 17401, 17417, 17419, 17431, 17443, 17449, 17467, 17471, 17477,
17483, 17489, 17491, 17497, 17509, 17519, 17539, 17551, 17569, 17573,
17579, 17581, 17597, 17599, 17609, 17623, 17627, 17657, 17659, 17669,
17681, 17683, 17707, 17713, 17729, 17737, 17747, 17749, 17761, 17783,
17789, 17791, 17807, 17827, 17837, 17839, 17851, 17863, 17881, 17891,
17903, 17909, 17911, 17921, 17923, 17929, 17939, 17957, 17959, 17971,
17977, 17981, 17987, 17989, 18013, 18041, 18043, 18047, 18049, 18059,
18061, 18077, 18089, 18097, 18119, 18121, 18127, 18131, 18133, 18143,
18149, 18169, 18181, 18191, 18199, 18211, 18217, 18223, 18229, 18233,
18251, 18253, 18257, 18269, 18287, 18289, 18301, 18307, 18311, 18313,
18329, 18341, 18353, 18367, 18371, 18379, 18397, 18401, 18413, 18427,
18433, 18439, 18443, 18451, 18457, 18461, 18481, 18493, 18503, 18517,
18521, 18523, 18539, 18541, 18553, 18583, 18587, 18593, 18617, 18637,
18661, 18671, 18679, 18691, 18701, 18713, 18719, 18731, 18743, 18749,
18757, 18773, 18787, 18793, 18797, 18803, 18839, 18859, 18869, 18899,
18911, 18913, 18917, 18919, 18947, 18959, 18973, 18979, 19001, 19009,
19013, 19031, 19037, 19051, 19069, 19073, 19079, 19081, 19087, 19121,
19139, 19141, 19157, 19163, 19181, 19183, 19207, 19211, 19213, 19219,
19231, 19237, 19249, 19259, 19267, 19273, 19289, 19301, 19309, 19319,
19333, 19373, 19379, 19381, 19387, 19391, 19403, 19417, 19421, 19423,
19427, 19429, 19433, 19441, 19447, 19457, 19463, 19469, 19471, 19477,
19483, 19489, 19501, 19507, 19531, 19541, 19543, 19553, 19559, 19571,
19577, 19583, 19597, 19603, 19609, 19661, 19681, 19687, 19697, 19699,
19709, 19717, 19727, 19739, 19751, 19753, 19759, 19763, 19777, 19793,
19801, 19813, 19819, 19841, 19843, 19853, 19861, 19867, 19889, 19891,
19913, 19919, 19927, 19937, 19949, 19961, 19963, 19973, 19979, 19991,
19993, 19997, 20011, 20021, 20023, 20029, 20047, 20051, 20063, 20071,
20089, 20101, 20107, 20113, 20117, 20123, 20129, 20143, 20147, 20149,
20161, 20173, 20177, 20183, 20201, 20219, 20231, 20233, 20249, 20261,
20269, 20287, 20297, 20323, 20327, 20333, 20341, 20347, 20353, 20357,
20359, 20369, 20389, 20393, 20399, 20407, 20411, 20431, 20441, 20443,
20477, 20479, 20483, 20507, 20509, 20521, 20533, 20543, 20549, 20551,
20563, 20593, 20599, 20611, 20627, 20639, 20641, 20663, 20681, 20693,
20707, 20717, 20719, 20731, 20743, 20747, 20749, 20753, 20759, 20771,
20773, 20789, 20807, 20809, 20849, 20857, 20873, 20879, 20887, 20897,
20899, 20903, 20921, 20929, 20939, 20947, 20959, 20963, 20981, 20983,
21001, 21011, 21013, 21017, 21019, 21023, 21031, 21059, 21061, 21067,
21089, 21101, 21107, 21121, 21139, 21143, 21149, 21157, 21163, 21169,
21179, 21187, 21191, 21193, 21211, 21221, 21227, 21247, 21269, 21277,
21283, 21313, 21317, 21319, 21323, 21341, 21347, 21377, 21379, 21383,
21391, 21397, 21401, 21407, 21419, 21433, 21467, 21481, 21487, 21491,
21493, 21499, 21503, 21517, 21521, 21523, 21529, 21557, 21559, 21563,
21569, 21577, 21587, 21589, 21599, 21601, 21611, 21613, 21617, 21647,
21649, 21661, 21673, 21683, 21701, 21713, 21727, 21737, 21739, 21751,
21757, 21767, 21773, 21787, 21799, 21803, 21817, 21821, 21839, 21841,
21851, 21859, 21863, 21871, 21881, 21893, 21911, 21929, 21937, 21943,
21961, 21977, 21991, 21997, 22003, 22013, 22027, 22031, 22037, 22039,
22051, 22063, 22067, 22073, 22079, 22091, 22093, 22109, 22111, 22123,
22129, 22133, 22147, 22153, 22157, 22159, 22171, 22189, 22193, 22229,
22247, 22259, 22271, 22273, 22277, 22279, 22283, 22291, 22303, 22307,
22343, 22349, 22367, 22369, 22381, 22391, 22397, 22409, 22433, 22441,
22447, 22453, 22469, 22481, 22483, 22501, 22511, 22531, 22541, 22543,
22549, 22567, 22571, 22573, 22613, 22619, 22621, 22637, 22639, 22643,
22651, 22669, 22679, 22691, 22697, 22699, 22709, 22717, 22721, 22727,
22739, 22741, 22751, 22769, 22777, 22783, 22787, 22807, 22811, 22817,
22853, 22859, 22861, 22871, 22877, 22901, 22907, 22921, 22937, 22943,
22961, 22963, 22973, 22993, 23003, 23011, 23017, 23021, 23027, 23029,
23039, 23041, 23053, 23057, 23059, 23063, 23071, 23081, 23087, 23099,
23117, 23131, 23143, 23159, 23167, 23173, 23189, 23197, 23201, 23203,
23209, 23227, 23251, 23269, 23279, 23291, 23293, 23297, 23311, 23321,
23327, 23333, 23339, 23357, 23369, 23371, 23399, 23417, 23431, 23447,
23459, 23473, 23497, 23509, 23531, 23537, 23539, 23549, 23557, 23561,
23563, 23567, 23581, 23593, 23599, 23603, 23609, 23623, 23627, 23629,
23633, 23663, 23669, 23671, 23677, 23687, 23689, 23719, 23741, 23743,
23747, 23753, 23761, 23767, 23773, 23789, 23801, 23813, 23819, 23827,
23831, 23833, 23857, 23869, 23873, 23879, 23887, 23893, 23899, 23909,
23911, 23917, 23929, 23957, 23971, 23977, 23981, 23993, 24001, 24007,
24019, 24023, 24029, 24043, 24049, 24061, 24071, 24077, 24083, 24091,
24097, 24103, 24107, 24109, 24113, 24121, 24133, 24137, 24151, 24169,
24179, 24181, 24197, 24203, 24223, 24229, 24239, 24247, 24251, 24281,
24317, 24329, 24337, 24359, 24371, 24373, 24379, 24391, 24407, 24413,
24419, 24421, 24439, 24443, 24469, 24473, 24481, 24499, 24509, 24517,
24527, 24533, 24547, 24551, 24571, 24593, 24611, 24623, 24631, 24659,
24671, 24677, 24683, 24691, 24697, 24709, 24733, 24749, 24763, 24767,
24781, 24793, 24799, 24809, 24821, 24841, 24847, 24851, 24859, 24877,
24889, 24907, 24917, 24919, 24923, 24943, 24953, 24967, 24971, 24977,
24979, 24989, 25013, 25031, 25033, 25037, 25057, 25073, 25087, 25097,
25111, 25117, 25121, 25127, 25147, 25153, 25163, 25169, 25171, 25183,
25189, 25219, 25229, 25237, 25243, 25247, 25253, 25261, 25301, 25303,
25307, 25309, 25321, 25339, 25343, 25349, 25357, 25367, 25373, 25391,
25409, 25411, 25423, 25439, 25447, 25453, 25457, 25463, 25469, 25471,
25523, 25537, 25541, 25561, 25577, 25579, 25583, 25589, 25601, 25603,
25609, 25621, 25633, 25639, 25643, 25657, 25667, 25673, 25679, 25693,
25703, 25717, 25733, 25741, 25747, 25759, 25763, 25771, 25793, 25799,
25801, 25819, 25841, 25847, 25849, 25867, 25873, 25889, 25903, 25913,
25919, 25931, 25933, 25939, 25943, 25951, 25969, 25981, 25997, 25999,
26003, 26017, 26021, 26029, 26041, 26053, 26083, 26099, 26107, 26111,
26113, 26119, 26141, 26153, 26161, 26171, 26177, 26183, 26189, 26203,
26209, 26227, 26237, 26249, 26251, 26261, 26263, 26267, 26293, 26297,
26309, 26317, 26321, 26339, 26347, 26357, 26371, 26387, 26393, 26399,
26407, 26417, 26423, 26431, 26437, 26449, 26459, 26479, 26489, 26497,
26501, 26513, 26539, 26557, 26561, 26573, 26591, 26597, 26627, 26633,
26641, 26647, 26669, 26681, 26683, 26687, 26693, 26699, 26701, 26711,
26713, 26717, 26723, 26729, 26731, 26737, 26759, 26777, 26783, 26801,
26813, 26821, 26833, 26839, 26849, 26861, 26863, 26879, 26881, 26891,
26893, 26903, 26921, 26927, 26947, 26951, 26953, 26959, 26981, 26987,
26993, 27011, 27017, 27031, 27043, 27059, 27061, 27067, 27073, 27077,
27091, 27103, 27107, 27109, 27127, 27143, 27179, 27191, 27197, 27211,
27239, 27241, 27253, 27259, 27271, 27277, 27281, 27283, 27299, 27329,
27337, 27361, 27367, 27397, 27407, 27409, 27427, 27431, 27437, 27449,
27457, 27479, 27481, 27487, 27509, 27527, 27529, 27539, 27541, 27551,
27581, 27583, 27611, 27617, 27631, 27647, 27653, 27673, 27689, 27691,
27697, 27701, 27733, 27737, 27739, 27743, 27749, 27751, 27763, 27767,
27773, 27779, 27791, 27793, 27799, 27803, 27809, 27817, 27823, 27827,
27847, 27851, 27883, 27893, 27901, 27917, 27919, 27941, 27943, 27947,
27953, 27961, 27967, 27983, 27997, 28001, 28019, 28027, 28031, 28051,
28057, 28069, 28081, 28087, 28097, 28099, 28109, 28111, 28123, 28151,
28163, 28181, 28183, 28201, 28211, 28219, 28229, 28277, 28279, 28283,
28289, 28297, 28307, 28309, 28319, 28349, 28351, 28387, 28393, 28403,
28409, 28411, 28429, 28433, 28439, 28447, 28463, 28477, 28493, 28499,
28513, 28517, 28537, 28541, 28547, 28549, 28559, 28571, 28573, 28579,
28591, 28597, 28603, 28607, 28619, 28621, 28627, 28631, 28643, 28649,
28657, 28661, 28663, 28669, 28687, 28697, 28703, 28711, 28723, 28729,
28751, 28753, 28759, 28771, 28789, 28793, 28807, 28813, 28817, 28837,
28843, 28859, 28867, 28871, 28879, 28901, 28909, 28921, 28927, 28933,
28949, 28961, 28979, 29009, 29017, 29021, 29023, 29027, 29033, 29059,
29063, 29077, 29101, 29123, 29129, 29131, 29137, 29147, 29153, 29167,
29173, 29179, 29191, 29201, 29207, 29209, 29221, 29231, 29243, 29251,
29269, 29287, 29297, 29303, 29311, 29327, 29333, 29339, 29347, 29363,
29383, 29387, 29389, 29399, 29401, 29411, 29423, 29429, 29437, 29443,
29453, 29473, 29483, 29501, 29527, 29531, 29537, 29567, 29569, 29573,
29581, 29587, 29599, 29611, 29629, 29633, 29641, 29663, 29669, 29671,
29683, 29717, 29723, 29741, 29753, 29759, 29761, 29789, 29803, 29819,
29833, 29837, 29851, 29863, 29867, 29873, 29879, 29881, 29917, 29921,
29927, 29947, 29959, 29983, 29989, 30011, 30013, 30029, 30047, 30059,
30071, 30089, 30091, 30097, 30103, 30109, 30113, 30119, 30133, 30137,
30139, 30161, 30169, 30181, 30187, 30197, 30203, 30211, 30223, 30241,
30253, 30259, 30269, 30271, 30293, 30307, 30313, 30319, 30323, 30341,
30347, 30367, 30389, 30391, 30403, 30427, 30431, 30449, 30467, 30469,
30491, 30493, 30497, 30509, 30517, 30529, 30539, 30553, 30557, 30559,
30577, 30593, 30631, 30637, 30643, 30649, 30661, 30671, 30677, 30689,
30697, 30703, 30707, 30713, 30727, 30757, 30763, 30773, 30781, 30803,
30809, 30817, 30829, 30839, 30841, 30851, 30853, 30859, 30869, 30871,
30881, 30893, 30911, 30931, 30937, 30941, 30949, 30971, 30977, 30983,
31013, 31019, 31033, 31039, 31051, 31063, 31069, 31079, 31081, 31091,
31121, 31123, 31139, 31147, 31151, 31153, 31159, 31177, 31181, 31183,
31189, 31193, 31219, 31223, 31231, 31237, 31247, 31249, 31253, 31259,
31267, 31271, 31277, 31307, 31319, 31321, 31327, 31333, 31337, 31357,
31379, 31387, 31391, 31393, 31397, 31469, 31477, 31481, 31489, 31511,
31513, 31517, 31531, 31541, 31543, 31547, 31567, 31573, 31583, 31601,
31607, 31627, 31643, 31649, 31657, 31663, 31667, 31687, 31699, 31721,
31723, 31727, 31729, 31741, 31751, 31769, 31771, 31793, 31799, 31817,
31847, 31849, 31859, 31873, 31883, 31891, 31907, 31957, 31963, 31973,
31981, 31991, 32003, 32009, 32027, 32029, 32051, 32057, 32059, 32063,
32069, 32077, 32083, 32089, 32099, 32117, 32119, 32141, 32143, 32159,
32173, 32183, 32189, 32191, 32203, 32213, 32233, 32237, 32251, 32257,
32261, 32297, 32299, 32303, 32309, 32321, 32323, 32327, 32341, 32353,
32359, 32363, 32369, 32371, 32377, 32381, 32401, 32411, 32413, 32423,
32429, 32441, 32443, 32467, 32479, 32491, 32497, 32503, 32507, 32531,
32533, 32537, 32561, 32563, 32569, 32573, 32579, 32587, 32603, 32609,
32611, 32621, 32633, 32647, 32653, 32687, 32693, 32707, 32713, 32717,
32719, 32749, 32771, 32779, 32783, 32789, 32797, 32801, 32803, 32831,
32833, 32839, 32843, 32869, 32887, 32909, 32911, 32917, 32933, 32939,
32941, 32957, 32969, 32971, 32983, 32987, 32993, 32999, 33013, 33023,
33029, 33037, 33049, 33053, 33071, 33073, 33083, 33091, 33107, 33113,
33119, 33149, 33151, 33161, 33179, 33181, 33191, 33199, 33203, 33211,
33223, 33247, 33287, 33289, 33301, 33311, 33317, 33329, 33331, 33343,
33347, 33349, 33353, 33359, 33377, 33391, 33403, 33409, 33413, 33427,
33457, 33461, 33469, 33479, 33487, 33493, 33503, 33521, 33529, 33533,
33547, 33563, 33569, 33577, 33581, 33587, 33589, 33599, 33601, 33613,
33617, 33619, 33623, 33629, 33637, 33641, 33647, 33679, 33703, 33713,
33721, 33739, 33749, 33751, 33757, 33767, 33769, 33773, 33791, 33797,
33809, 33811, 33827, 33829, 33851, 33857, 33863, 33871, 33889, 33893,
33911, 33923, 33931, 33937, 33941, 33961, 33967, 33997, 34019, 34031,
34033, 34039, 34057, 34061, 34123, 34127, 34129, 34141, 34147, 34157,
34159, 34171, 34183, 34211, 34213, 34217, 34231, 34253, 34259, 34261,
34267, 34273, 34283, 34297, 34301, 34303, 34313, 34319, 34327, 34337,
34351, 34361, 34367, 34369, 34381, 34403, 34421, 34429, 34439, 34457,
34469, 34471, 34483, 34487, 34499, 34501, 34511, 34513, 34519, 34537,
34543, 34549, 34583, 34589, 34591, 34603, 34607, 34613, 34631, 34649,
34651, 34667, 34673, 34679, 34687, 34693, 34703, 34721, 34729, 34739,
34747, 34757, 34759, 34763, 34781, 34807, 34819, 34841, 34843, 34847,
34849, 34871, 34877, 34883, 34897, 34913, 34919, 34939, 34949, 34961,
34963, 34981, 35023, 35027, 35051, 35053, 35059, 35069, 35081, 35083,
35089, 35099, 35107, 35111, 35117, 35129, 35141, 35149, 35153, 35159,
35171, 35201, 35221, 35227, 35251, 35257, 35267, 35279, 35281, 35291,
35311, 35317, 35323, 35327, 35339, 35353, 35363, 35381, 35393, 35401,
35407, 35419, 35423, 35437, 35447, 35449, 35461, 35491, 35507, 35509,
35521, 35527, 35531, 35533, 35537, 35543, 35569, 35573, 35591, 35593,
35597, 35603, 35617, 35671, 35677, 35729, 35731, 35747, 35753, 35759,
35771, 35797, 35801, 35803, 35809, 35831, 35837, 35839, 35851, 35863,
35869, 35879, 35897, 35899, 35911, 35923, 35933, 35951, 35963, 35969,
35977, 35983, 35993, 35999, 36007, 36011, 36013, 36017, 36037, 36061,
36067, 36073, 36083, 36097, 36107, 36109, 36131, 36137, 36151, 36161,
36187, 36191, 36209, 36217, 36229, 36241, 36251, 36263, 36269, 36277,
36293, 36299, 36307, 36313, 36319, 36341, 36343, 36353, 36373, 36383,
36389, 36433, 36451, 36457, 36467, 36469, 36473, 36479, 36493, 36497,
36523, 36527, 36529, 36541, 36551, 36559, 36563, 36571, 36583, 36587,
36599, 36607, 36629, 36637, 36643, 36653, 36671, 36677, 36683, 36691,
36697, 36709, 36713, 36721, 36739, 36749, 36761, 36767, 36779, 36781,
36787, 36791, 36793, 36809, 36821, 36833, 36847, 36857, 36871, 36877,
36887, 36899, 36901, 36913, 36919, 36923, 36929, 36931, 36943, 36947,
36973, 36979, 36997, 37003, 37013, 37019, 37021, 37039, 37049, 37057,
37061, 37087, 37097, 37117, 37123, 37139, 37159, 37171, 37181, 37189,
37199, 37201, 37217, 37223, 37243, 37253, 37273, 37277, 37307, 37309,
37313, 37321, 37337, 37339, 37357, 37361, 37363, 37369, 37379, 37397,
37409, 37423, 37441, 37447, 37463, 37483, 37489, 37493, 37501, 37507,
37511, 37517, 37529, 37537, 37547, 37549, 37561, 37567, 37571, 37573,
37579, 37589, 37591, 37607, 37619, 37633, 37643, 37649, 37657, 37663,
37691, 37693, 37699, 37717, 37747, 37781, 37783, 37799, 37811, 37813,
37831, 37847, 37853, 37861, 37871, 37879, 37889, 37897, 37907, 37951,
37957, 37963, 37967, 37987, 37991, 37993, 37997, 38011, 38039, 38047,
38053, 38069, 38083, 38113, 38119, 38149, 38153, 38167, 38177, 38183,
38189, 38197, 38201, 38219, 38231, 38237, 38239, 38261, 38273, 38281,
38287, 38299, 38303, 38317, 38321, 38327, 38329, 38333, 38351, 38371,
38377, 38393, 38431, 38447, 38449, 38453, 38459, 38461, 38501, 38543,
38557, 38561, 38567, 38569, 38593, 38603, 38609, 38611, 38629, 38639,
38651, 38653, 38669, 38671, 38677, 38693, 38699, 38707, 38711, 38713,
38723, 38729, 38737, 38747, 38749, 38767, 38783, 38791, 38803, 38821,
38833, 38839, 38851, 38861, 38867, 38873, 38891, 38903, 38917, 38921,
38923, 38933, 38953, 38959, 38971, 38977, 38993, 39019, 39023, 39041,
39043, 39047, 39079, 39089, 39097, 39103, 39107, 39113, 39119, 39133,
39139, 39157, 39161, 39163, 39181, 39191, 39199, 39209, 39217, 39227,
39229, 39233, 39239, 39241, 39251, 39293, 39301, 39313, 39317, 39323,
39341, 39343, 39359, 39367, 39371, 39373, 39383, 39397, 39409, 39419,
39439, 39443, 39451, 39461, 39499, 39503, 39509, 39511, 39521, 39541,
39551, 39563, 39569, 39581, 39607, 39619, 39623, 39631, 39659, 39667,
39671, 39679, 39703, 39709, 39719, 39727, 39733, 39749, 39761, 39769,
39779, 39791, 39799, 39821, 39827, 39829, 39839, 39841, 39847, 39857,
39863, 39869, 39877, 39883, 39887, 39901, 39929, 39937, 39953, 39971,
39979, 39983, 39989, 40009, 40013, 40031, 40037, 40039, 40063, 40087,
40093, 40099, 40111, 40123, 40127, 40129, 40151, 40153, 40163, 40169,
40177, 40189, 40193, 40213, 40231, 40237, 40241, 40253, 40277, 40283,
40289, 40343, 40351, 40357, 40361, 40387, 40423, 40427, 40429, 40433,
40459, 40471, 40483, 40487, 40493, 40499, 40507, 40519, 40529, 40531,
40543, 40559, 40577, 40583, 40591, 40597, 40609, 40627, 40637, 40639,
40693, 40697, 40699, 40709, 40739, 40751, 40759, 40763, 40771, 40787,
40801, 40813, 40819, 40823, 40829, 40841, 40847, 40849, 40853, 40867,
40879, 40883, 40897, 40903, 40927, 40933, 40939, 40949, 40961, 40973,
40993, 41011, 41017, 41023, 41039, 41047, 41051, 41057, 41077, 41081,
41113, 41117, 41131, 41141, 41143, 41149, 41161, 41177, 41179, 41183,
41189, 41201, 41203, 41213, 41221, 41227, 41231, 41233, 41243, 41257,
41263, 41269, 41281, 41299, 41333, 41341, 41351, 41357, 41381, 41387,
41389, 41399, 41411, 41413, 41443, 41453, 41467, 41479, 41491, 41507,
41513, 41519, 41521, 41539, 41543, 41549, 41579, 41593, 41597, 41603,
41609, 41611, 41617, 41621, 41627, 41641, 41647, 41651, 41659, 41669,
41681, 41687, 41719, 41729, 41737, 41759, 41761, 41771, 41777, 41801,
41809, 41813, 41843, 41849, 41851, 41863, 41879, 41887, 41893, 41897,
41903, 41911, 41927, 41941, 41947, 41953, 41957, 41959, 41969, 41981,
41983, 41999, 42013, 42017, 42019, 42023, 42043, 42061, 42071, 42073,
42083, 42089, 42101, 42131, 42139, 42157, 42169, 42179, 42181, 42187,
42193, 42197, 42209, 42221, 42223, 42227, 42239, 42257, 42281, 42283,
42293, 42299, 42307, 42323, 42331, 42337, 42349, 42359, 42373, 42379,
42391, 42397, 42403, 42407, 42409, 42433, 42437, 42443, 42451, 42457,
42461, 42463, 42467, 42473, 42487, 42491, 42499, 42509, 42533, 42557,
42569, 42571, 42577, 42589, 42611, 42641, 42643, 42649, 42667, 42677,
42683, 42689, 42697, 42701, 42703, 42709, 42719, 42727, 42737, 42743,
42751, 42767, 42773, 42787, 42793, 42797, 42821, 42829, 42839, 42841,
42853, 42859, 42863, 42899, 42901, 42923, 42929, 42937, 42943, 42953,
42961, 42967, 42979, 42989, 43003, 43013, 43019, 43037, 43049, 43051,
43063, 43067, 43093, 43103, 43117, 43133, 43151, 43159, 43177, 43189,
43201, 43207, 43223, 43237, 43261, 43271, 43283, 43291, 43313, 43319,
43321, 43331, 43391, 43397, 43399, 43403, 43411, 43427, 43441, 43451,
43457, 43481, 43487, 43499, 43517, 43541, 43543, 43573, 43577, 43579,
43591, 43597, 43607, 43609, 43613, 43627, 43633, 43649, 43651, 43661,
43669, 43691, 43711, 43717, 43721, 43753, 43759, 43777, 43781, 43783,
43787, 43789, 43793, 43801, 43853, 43867, 43889, 43891, 43913, 43933,
43943, 43951, 43961, 43963, 43969, 43973, 43987, 43991, 43997, 44017,
44021, 44027, 44029, 44041, 44053, 44059, 44071, 44087, 44089, 44101,
44111, 44119, 44123, 44129, 44131, 44159, 44171, 44179, 44189, 44201,
44203, 44207, 44221, 44249, 44257, 44263, 44267, 44269, 44273, 44279,
44281, 44293, 44351, 44357, 44371, 44381, 44383, 44389, 44417, 44449,
44453, 44483, 44491, 44497, 44501, 44507, 44519, 44531, 44533, 44537,
44543, 44549, 44563, 44579, 44587, 44617, 44621, 44623, 44633, 44641,
44647, 44651, 44657, 44683, 44687, 44699, 44701, 44711, 44729, 44741,
44753, 44771, 44773, 44777, 44789, 44797, 44809, 44819, 44839, 44843,
44851, 44867, 44879, 44887, 44893, 44909, 44917, 44927, 44939, 44953,
44959, 44963, 44971, 44983, 44987, 45007, 45013, 45053, 45061, 45077,
45083, 45119, 45121, 45127, 45131, 45137, 45139, 45161, 45179, 45181,
45191, 45197, 45233, 45247, 45259, 45263, 45281, 45289, 45293, 45307,
45317, 45319, 45329, 45337, 45341, 45343, 45361, 45377, 45389, 45403,
45413, 45427, 45433, 45439, 45481, 45491, 45497, 45503, 45523, 45533,
45541, 45553, 45557, 45569, 45587, 45589, 45599, 45613, 45631, 45641,
45659, 45667, 45673, 45677, 45691, 45697, 45707, 45737, 45751, 45757,
45763, 45767, 45779, 45817, 45821, 45823, 45827, 45833, 45841, 45853,
45863, 45869, 45887, 45893, 45943, 45949, 45953, 45959, 45971, 45979,
45989, 46021, 46027, 46049, 46051, 46061, 46073, 46091, 46093, 46099,
46103, 46133, 46141, 46147, 46153, 46171, 46181, 46183, 46187, 46199,
46219, 46229, 46237, 46261, 46271, 46273, 46279, 46301, 46307, 46309,
46327, 46337, 46349, 46351, 46381, 46399, 46411, 46439, 46441, 46447,
46451, 46457, 46471, 46477, 46489, 46499, 46507, 46511, 46523, 46549,
46559, 46567, 46573, 46589, 46591, 46601, 46619, 46633, 46639, 46643,
46649, 46663, 46679, 46681, 46687, 46691, 46703, 46723, 46727, 46747,
46751, 46757, 46769, 46771, 46807, 46811, 46817, 46819, 46829, 46831,
46853, 46861, 46867, 46877, 46889, 46901, 46919, 46933, 46957, 46993,
46997, 47017, 47041, 47051, 47057, 47059, 47087, 47093, 47111, 47119,
47123, 47129, 47137, 47143, 47147, 47149, 47161, 47189, 47207, 47221,
47237, 47251, 47269, 47279, 47287, 47293, 47297, 47303, 47309, 47317,
47339, 47351, 47353, 47363, 47381, 47387, 47389, 47407, 47417, 47419,
47431, 47441, 47459, 47491, 47497, 47501, 47507, 47513, 47521, 47527,
47533, 47543, 47563, 47569, 47581, 47591, 47599, 47609, 47623, 47629,
47639, 47653, 47657, 47659, 47681, 47699, 47701, 47711, 47713, 47717,
47737, 47741, 47743, 47777, 47779, 47791, 47797, 47807, 47809, 47819,
47837, 47843, 47857, 47869, 47881, 47903, 47911, 47917, 47933, 47939,
47947, 47951, 47963, 47969, 47977, 47981, 48017, 48023, 48029, 48049,
48073, 48079, 48091, 48109, 48119, 48121, 48131, 48157, 48163, 48179,
48187, 48193, 48197, 48221, 48239, 48247, 48259, 48271, 48281, 48299,
48311, 48313, 48337, 48341, 48353, 48371, 48383, 48397, 48407, 48409,
48413, 48437, 48449, 48463, 48473, 48479, 48481, 48487, 48491, 48497,
48523, 48527, 48533, 48539, 48541, 48563, 48571, 48589, 48593, 48611,
48619, 48623, 48647, 48649, 48661, 48673, 48677, 48679, 48731, 48733,
48751, 48757, 48761, 48767, 48779, 48781, 48787, 48799, 48809, 48817,
48821, 48823, 48847, 48857, 48859, 48869, 48871, 48883, 48889, 48907,
48947, 48953, 48973, 48989, 48991, 49003, 49009, 49019, 49031, 49033,
49037, 49043, 49057, 49069, 49081, 49103, 49109, 49117, 49121, 49123,
49139, 49157, 49169, 49171, 49177, 49193, 49199, 49201, 49207, 49211,
49223, 49253, 49261, 49277, 49279, 49297, 49307, 49331, 49333, 49339,
49363, 49367, 49369, 49391, 49393, 49409, 49411, 49417, 49429, 49433,
49451, 49459, 49463, 49477, 49481, 49499, 49523, 49529, 49531, 49537,
49547, 49549, 49559, 49597, 49603, 49613, 49627, 49633, 49639, 49663,
49667, 49669, 49681, 49697, 49711, 49727, 49739, 49741, 49747, 49757,
49783, 49787, 49789, 49801, 49807, 49811, 49823, 49831, 49843, 49853,
49871, 49877, 49891, 49919, 49921, 49927, 49937, 49939, 49943, 49957,
49991, 49993, 49999, 50021, 50023, 50033, 50047, 50051, 50053, 50069,
50077, 50087, 50093, 50101, 50111, 50119, 50123, 50129, 50131, 50147,
50153, 50159, 50177, 50207, 50221, 50227, 50231, 50261, 50263, 50273,
50287, 50291, 50311, 50321, 50329, 50333, 50341, 50359, 50363, 50377,
50383, 50387, 50411, 50417, 50423, 50441, 50459, 50461, 50497, 50503,
50513, 50527, 50539, 50543, 50549, 50551, 50581, 50587, 50591, 50593,
50599, 50627, 50647, 50651, 50671, 50683, 50707, 50723, 50741, 50753,
50767, 50773, 50777, 50789, 50821, 50833, 50839, 50849, 50857, 50867,
50873, 50891, 50893, 50909, 50923, 50929, 50951, 50957, 50969, 50971,
50989, 50993, 51001, 51031, 51043, 51047, 51059, 51061, 51071, 51109,
51131, 51133, 51137, 51151, 51157, 51169, 51193, 51197, 51199, 51203,
51217, 51229, 51239, 51241, 51257, 51263, 51283, 51287, 51307, 51329,
51341, 51343, 51347, 51349, 51361, 51383, 51407, 51413, 51419, 51421,
51427, 51431, 51437, 51439, 51449, 51461, 51473, 51479, 51481, 51487,
51503, 51511, 51517, 51521, 51539, 51551, 51563, 51577, 51581, 51593,
51599, 51607, 51613, 51631, 51637, 51647, 51659, 51673, 51679, 51683,
51691, 51713, 51719, 51721, 51749, 51767, 51769, 51787, 51797, 51803,
51817, 51827, 51829, 51839, 51853, 51859, 51869, 51871, 51893, 51899,
51907, 51913, 51929, 51941, 51949, 51971, 51973, 51977, 51991, 52009,
52021, 52027, 52051, 52057, 52067, 52069, 52081, 52103, 52121, 52127,
52147, 52153, 52163, 52177, 52181, 52183, 52189, 52201, 52223, 52237,
52249, 52253, 52259, 52267, 52289, 52291, 52301, 52313, 52321, 52361,
52363, 52369, 52379, 52387, 52391, 52433, 52453, 52457, 52489, 52501,
52511, 52517, 52529, 52541, 52543, 52553, 52561, 52567, 52571, 52579,
52583, 52609, 52627, 52631, 52639, 52667, 52673, 52691, 52697, 52709,
52711, 52721, 52727, 52733, 52747, 52757, 52769, 52783, 52807, 52813,
52817, 52837, 52859, 52861, 52879, 52883, 52889, 52901, 52903, 52919,
52937, 52951, 52957, 52963, 52967, 52973, 52981, 52999, 53003, 53017,
53047, 53051, 53069, 53077, 53087, 53089, 53093, 53101, 53113, 53117,
53129, 53147, 53149, 53161, 53171, 53173, 53189, 53197, 53201, 53231,
53233, 53239, 53267, 53269, 53279, 53281, 53299, 53309, 53323, 53327,
53353, 53359, 53377, 53381, 53401, 53407, 53411, 53419, 53437, 53441,
53453, 53479, 53503, 53507, 53527, 53549, 53551, 53569, 53591, 53593,
53597, 53609, 53611, 53617, 53623, 53629, 53633, 53639, 53653, 53657,
53681, 53693, 53699, 53717, 53719, 53731, 53759, 53773, 53777, 53783,
53791, 53813, 53819, 53831, 53849, 53857, 53861, 53881, 53887, 53891,
53897, 53899, 53917, 53923, 53927, 53939, 53951, 53959, 53987, 53993,
54001, 54011, 54013, 54037, 54049, 54059, 54083, 54091, 54101, 54121,
54133, 54139, 54151, 54163, 54167, 54181, 54193, 54217, 54251, 54269,
54277, 54287, 54293, 54311, 54319, 54323, 54331, 54347, 54361, 54367,
54371, 54377, 54401, 54403, 54409, 54413, 54419, 54421, 54437, 54443,
54449, 54469, 54493, 54497, 54499, 54503, 54517, 54521, 54539, 54541,
54547, 54559, 54563, 54577, 54581, 54583, 54601, 54617, 54623, 54629,
54631, 54647, 54667, 54673, 54679, 54709, 54713, 54721, 54727, 54751,
54767, 54773, 54779, 54787, 54799, 54829, 54833, 54851, 54869, 54877,
54881, 54907, 54917, 54919, 54941, 54949, 54959, 54973, 54979, 54983,
55001, 55009, 55021, 55049, 55051, 55057, 55061, 55073, 55079, 55103,
55109, 55117, 55127, 55147, 55163, 55171, 55201, 55207, 55213, 55217,
55219, 55229, 55243, 55249, 55259, 55291, 55313, 55331, 55333, 55337,
55339, 55343, 55351, 55373, 55381, 55399, 55411, 55439, 55441, 55457,
55469, 55487, 55501, 55511, 55529, 55541, 55547, 55579, 55589, 55603,
55609, 55619, 55621, 55631, 55633, 55639, 55661, 55663, 55667, 55673,
55681, 55691, 55697, 55711, 55717, 55721, 55733, 55763, 55787, 55793,
55799, 55807, 55813, 55817, 55819, 55823, 55829, 55837, 55843, 55849,
55871, 55889, 55897, 55901, 55903, 55921, 55927, 55931, 55933, 55949,
55967, 55987, 55997, 56003, 56009, 56039, 56041, 56053, 56081, 56087,
56093, 56099, 56101, 56113, 56123, 56131, 56149, 56167, 56171, 56179,
56197, 56207, 56209, 56237, 56239, 56249, 56263, 56267, 56269, 56299,
56311, 56333, 56359, 56369, 56377, 56383, 56393, 56401, 56417, 56431,
56437, 56443, 56453, 56467, 56473, 56477, 56479, 56489, 56501, 56503,
56509, 56519, 56527, 56531, 56533, 56543, 56569, 56591, 56597, 56599,
56611, 56629, 56633, 56659, 56663, 56671, 56681, 56687, 56701, 56711,
56713, 56731, 56737, 56747, 56767, 56773, 56779, 56783, 56807, 56809,
56813, 56821, 56827, 56843, 56857, 56873, 56891, 56893, 56897, 56909,
56911, 56921, 56923, 56929, 56941, 56951, 56957, 56963, 56983, 56989,
56993, 56999, 57037, 57041, 57047, 57059, 57073, 57077, 57089, 57097,
57107, 57119, 57131, 57139, 57143, 57149, 57163, 57173, 57179, 57191,
57193, 57203, 57221, 57223, 57241, 57251, 57259, 57269, 57271, 57283,
57287, 57301, 57329, 57331, 57347, 57349, 57367, 57373, 57383, 57389,
57397, 57413, 57427, 57457, 57467, 57487, 57493, 57503, 57527, 57529,
57557, 57559, 57571, 57587, 57593, 57601, 57637, 57641, 57649, 57653,
57667, 57679, 57689, 57697, 57709, 57713, 57719, 57727, 57731, 57737,
57751, 57773, 57781, 57787, 57791, 57793, 57803, 57809, 57829, 57839,
57847, 57853, 57859, 57881, 57899, 57901, 57917, 57923, 57943, 57947,
57973, 57977, 57991, 58013, 58027, 58031, 58043, 58049, 58057, 58061,
58067, 58073, 58099, 58109, 58111, 58129, 58147, 58151, 58153, 58169,
58171, 58189, 58193, 58199, 58207, 58211, 58217, 58229, 58231, 58237,
58243, 58271, 58309, 58313, 58321, 58337, 58363, 58367, 58369, 58379,
58391, 58393, 58403, 58411, 58417, 58427, 58439, 58441, 58451, 58453,
58477, 58481, 58511, 58537, 58543, 58549, 58567, 58573, 58579, 58601,
58603, 58613, 58631, 58657, 58661, 58679, 58687, 58693, 58699, 58711,
58727, 58733, 58741, 58757, 58763, 58771, 58787, 58789, 58831, 58889,
58897, 58901, 58907, 58909, 58913, 58921, 58937, 58943, 58963, 58967,
58979, 58991, 58997, 59009, 59011, 59021, 59023, 59029, 59051, 59053,
59063, 59069, 59077, 59083, 59093, 59107, 59113, 59119, 59123, 59141,
59149, 59159, 59167, 59183, 59197, 59207, 59209, 59219, 59221, 59233,
59239, 59243, 59263, 59273, 59281, 59333, 59341, 59351, 59357, 59359,
59369, 59377, 59387, 59393, 59399, 59407, 59417, 59419, 59441, 59443,
59447, 59453, 59467, 59471, 59473, 59497, 59509, 59513, 59539, 59557,
59561, 59567, 59581, 59611, 59617, 59621, 59627, 59629, 59651, 59659,
59663, 59669, 59671, 59693, 59699, 59707, 59723, 59729, 59743, 59747,
59753, 59771, 59779, 59791, 59797, 59809, 59833, 59863, 59879, 59887,
59921, 59929, 59951, 59957, 59971, 59981, 59999, 60013, 60017, 60029,
60037, 60041, 60077, 60083, 60089, 60091, 60101, 60103, 60107, 60127,
60133, 60139, 60149, 60161, 60167, 60169, 60209, 60217, 60223, 60251,
60257, 60259, 60271, 60289, 60293, 60317, 60331, 60337, 60343, 60353,
60373, 60383, 60397, 60413, 60427, 60443, 60449, 60457, 60493, 60497,
60509, 60521, 60527, 60539, 60589, 60601, 60607, 60611, 60617, 60623,
60631, 60637, 60647, 60649, 60659, 60661, 60679, 60689, 60703, 60719,
60727, 60733, 60737, 60757, 60761, 60763, 60773, 60779, 60793, 60811,
60821, 60859, 60869, 60887, 60889, 60899, 60901, 60913, 60917, 60919,
60923, 60937, 60943, 60953, 60961, 61001, 61007, 61027, 61031, 61043,
61051, 61057, 61091, 61099, 61121, 61129, 61141, 61151, 61153, 61169,
61211, 61223, 61231, 61253, 61261, 61283, 61291, 61297, 61331, 61333,
61339, 61343, 61357, 61363, 61379, 61381, 61403, 61409, 61417, 61441,
61463, 61469, 61471, 61483, 61487, 61493, 61507, 61511, 61519, 61543,
61547, 61553, 61559, 61561, 61583, 61603, 61609, 61613, 61627, 61631,
61637, 61643, 61651, 61657, 61667, 61673, 61681, 61687, 61703, 61717,
61723, 61729, 61751, 61757, 61781, 61813, 61819, 61837, 61843, 61861,
61871, 61879, 61909, 61927, 61933, 61949, 61961, 61967, 61979, 61981,
61987, 61991, 62003, 62011, 62017, 62039, 62047, 62053, 62057, 62071,
62081, 62099, 62119, 62129, 62131, 62137, 62141, 62143, 62171, 62189,
62191, 62201, 62207, 62213, 62219, 62233, 62273, 62297, 62299, 62303,
62311, 62323, 62327, 62347, 62351, 62383, 62401, 62417, 62423, 62459,
62467, 62473, 62477, 62483, 62497, 62501, 62507, 62533, 62539, 62549,
62563, 62581, 62591, 62597, 62603, 62617, 62627, 62633, 62639, 62653,
62659, 62683, 62687, 62701, 62723, 62731, 62743, 62753, 62761, 62773,
62791, 62801, 62819, 62827, 62851, 62861, 62869, 62873, 62897, 62903,
62921, 62927, 62929, 62939, 62969, 62971, 62981, 62983, 62987, 62989,
63029, 63031, 63059, 63067, 63073, 63079, 63097, 63103, 63113, 63127,
63131, 63149, 63179, 63197, 63199, 63211, 63241, 63247, 63277, 63281,
63299, 63311, 63313, 63317, 63331, 63337, 63347, 63353, 63361, 63367,
63377, 63389, 63391, 63397, 63409, 63419, 63421, 63439, 63443, 63463,
63467, 63473, 63487, 63493, 63499, 63521, 63527, 63533, 63541, 63559,
63577, 63587, 63589, 63599, 63601, 63607, 63611, 63617, 63629, 63647,
63649, 63659, 63667, 63671, 63689, 63691, 63697, 63703, 63709, 63719,
63727, 63737, 63743, 63761, 63773, 63781, 63793, 63799, 63803, 63809,
63823, 63839, 63841, 63853, 63857, 63863, 63901, 63907, 63913, 63929,
63949, 63977, 63997, 64007, 64013, 64019, 64033, 64037, 64063, 64067,
64081, 64091, 64109, 64123, 64151, 64153, 64157, 64171, 64187, 64189,
64217, 64223, 64231, 64237, 64271, 64279, 64283, 64301, 64303, 64319,
64327, 64333, 64373, 64381, 64399, 64403, 64433, 64439, 64451, 64453,
64483, 64489, 64499, 64513, 64553, 64567, 64577, 64579, 64591, 64601,
64609, 64613, 64621, 64627, 64633, 64661, 64663, 64667, 64679, 64693,
64709, 64717, 64747, 64763, 64781, 64783, 64793, 64811, 64817, 64849,
64853, 64871, 64877, 64879, 64891, 64901, 64919, 64921, 64927, 64937,
64951, 64969, 64997, 65003, 65011, 65027, 65029, 65033, 65053, 65063,
65071, 65089, 65099, 65101, 65111, 65119, 65123, 65129, 65141, 65147,
65167, 65171, 65173, 65179, 65183, 65203, 65213, 65239, 65257, 65267,
65269, 65287, 65293, 65309, 65323, 65327, 65353, 65357, 65371, 65381,
65393, 65407, 65413, 65419, 65423, 65437, 65447, 65449, 65479, 65497,
65519, 65521, 65537, 65539, 65543, 65551, 65557, 65563, 65579, 65581,
65587, 65599, 65609, 65617, 65629, 65633, 65647, 65651, 65657, 65677,
65687, 65699, 65701, 65707, 65713, 65717, 65719, 65729, 65731, 65761,
65777, 65789, 65809, 65827, 65831, 65837, 65839, 65843, 65851, 65867,
65881, 65899, 65921, 65927, 65929, 65951, 65957, 65963, 65981, 65983,
65993, 66029, 66037, 66041, 66047, 66067, 66071, 66083, 66089, 66103,
66107, 66109, 66137, 66161, 66169, 66173, 66179, 66191, 66221, 66239,
66271, 66293, 66301, 66337, 66343, 66347, 66359, 66361, 66373, 66377,
66383, 66403, 66413, 66431, 66449, 66457, 66463, 66467, 66491, 66499,
66509, 66523, 66529, 66533, 66541, 66553, 66569, 66571, 66587, 66593,
66601, 66617, 66629, 66643, 66653, 66683, 66697, 66701, 66713, 66721,
66733, 66739, 66749, 66751, 66763, 66791, 66797, 66809, 66821, 66841,
66851, 66853, 66863, 66877, 66883, 66889, 66919, 66923, 66931, 66943,
66947, 66949, 66959, 66973, 66977, 67003, 67021, 67033, 67043, 67049,
67057, 67061, 67073, 67079, 67103, 67121, 67129, 67139, 67141, 67153,
67157, 67169, 67181, 67187, 67189, 67211, 67213, 67217, 67219, 67231,
67247, 67261, 67271, 67273, 67289, 67307, 67339, 67343, 67349, 67369,
67391, 67399, 67409, 67411, 67421, 67427, 67429, 67433, 67447, 67453,
67477, 67481, 67489, 67493, 67499, 67511, 67523, 67531, 67537, 67547,
67559, 67567, 67577, 67579, 67589, 67601, 67607, 67619, 67631, 67651,
67679, 67699, 67709, 67723, 67733, 67741, 67751, 67757, 67759, 67763,
67777, 67783, 67789, 67801, 67807, 67819, 67829, 67843, 67853, 67867,
67883, 67891, 67901, 67927, 67931, 67933, 67939, 67943, 67957, 67961,
67967, 67979, 67987, 67993, 68023, 68041, 68053, 68059, 68071, 68087,
68099, 68111, 68113, 68141, 68147, 68161, 68171, 68207, 68209, 68213,
68219, 68227, 68239, 68261, 68279, 68281, 68311, 68329, 68351, 68371,
68389, 68399, 68437, 68443, 68447, 68449, 68473, 68477, 68483, 68489,
68491, 68501, 68507, 68521, 68531, 68539, 68543, 68567, 68581, 68597,
68611, 68633, 68639, 68659, 68669, 68683, 68687, 68699, 68711, 68713,
68729, 68737, 68743, 68749, 68767, 68771, 68777, 68791, 68813, 68819,
68821, 68863, 68879, 68881, 68891, 68897, 68899, 68903, 68909, 68917,
68927, 68947, 68963, 68993, 69001, 69011, 69019, 69029, 69031, 69061,
69067, 69073, 69109, 69119, 69127, 69143, 69149, 69151, 69163, 69191,
69193, 69197, 69203, 69221, 69233, 69239, 69247, 69257, 69259, 69263,
69313, 69317, 69337, 69341, 69371, 69379, 69383, 69389, 69401, 69403,
69427, 69431, 69439, 69457, 69463, 69467, 69473, 69481, 69491, 69493,
69497, 69499, 69539, 69557, 69593, 69623, 69653, 69661, 69677, 69691,
69697, 69709, 69737, 69739, 69761, 69763, 69767, 69779, 69809, 69821,
69827, 69829, 69833, 69847, 69857, 69859, 69877, 69899, 69911, 69929,
69931, 69941, 69959, 69991, 69997, 70001, 70003, 70009, 70019, 70039,
70051, 70061, 70067, 70079, 70099, 70111, 70117, 70121, 70123, 70139,
70141, 70157, 70163, 70177, 70181, 70183, 70199, 70201, 70207, 70223,
70229, 70237, 70241, 70249, 70271, 70289, 70297, 70309, 70313, 70321,
70327, 70351, 70373, 70379, 70381, 70393, 70423, 70429, 70439, 70451,
70457, 70459, 70481, 70487, 70489, 70501, 70507, 70529, 70537, 70549,
70571, 70573, 70583, 70589, 70607, 70619, 70621, 70627, 70639, 70657,
70663, 70667, 70687, 70709, 70717, 70729, 70753, 70769, 70783, 70793,
70823, 70841, 70843, 70849, 70853, 70867, 70877, 70879, 70891, 70901,
70913, 70919, 70921, 70937, 70949, 70951, 70957, 70969, 70979, 70981,
70991, 70997, 70999, 71011, 71023, 71039, 71059, 71069, 71081, 71089,
71119, 71129, 71143, 71147, 71153, 71161, 71167, 71171, 71191, 71209,
71233, 71237, 71249, 71257, 71261, 71263, 71287, 71293, 71317, 71327,
71329, 71333, 71339, 71341, 71347, 71353, 71359, 71363, 71387, 71389,
71399, 71411, 71413, 71419, 71429, 71437, 71443, 71453, 71471, 71473,
71479, 71483, 71503, 71527, 71537, 71549, 71551, 71563, 71569, 71593,
71597, 71633, 71647, 71663, 71671, 71693, 71699, 71707, 71711, 71713,
71719, 71741, 71761, 71777, 71789, 71807, 71809, 71821, 71837, 71843,
71849, 71861, 71867, 71879, 71881, 71887, 71899, 71909, 71917, 71933,
71941, 71947, 71963, 71971, 71983, 71987, 71993, 71999, 72019, 72031,
72043, 72047, 72053, 72073, 72077, 72089, 72091, 72101, 72103, 72109,
72139, 72161, 72167, 72169, 72173, 72211, 72221, 72223, 72227, 72229,
72251, 72253, 72269, 72271, 72277, 72287, 72307, 72313, 72337, 72341,
72353, 72367, 72379, 72383, 72421, 72431, 72461, 72467, 72469, 72481,
72493, 72497, 72503, 72533, 72547, 72551, 72559, 72577, 72613, 72617,
72623, 72643, 72647, 72649, 72661, 72671, 72673, 72679, 72689, 72701,
72707, 72719, 72727, 72733, 72739, 72763, 72767, 72797, 72817, 72823,
72859, 72869, 72871, 72883, 72889, 72893, 72901, 72907, 72911, 72923,
72931, 72937, 72949, 72953, 72959, 72973, 72977, 72997, 73009, 73013,
73019, 73037, 73039, 73043, 73061, 73063, 73079, 73091, 73121, 73127,
73133, 73141, 73181, 73189, 73237, 73243, 73259, 73277, 73291, 73303,
73309, 73327, 73331, 73351, 73361, 73363, 73369, 73379, 73387, 73417,
73421, 73433, 73453, 73459, 73471, 73477, 73483, 73517, 73523, 73529,
73547, 73553, 73561, 73571, 73583, 73589, 73597, 73607, 73609, 73613,
73637, 73643, 73651, 73673, 73679, 73681, 73693, 73699, 73709, 73721,
73727, 73751, 73757, 73771, 73783, 73819, 73823, 73847, 73849, 73859,
73867, 73877, 73883, 73897, 73907, 73939, 73943, 73951, 73961, 73973,
73999, 74017, 74021, 74027, 74047, 74051, 74071, 74077, 74093, 74099,
74101, 74131, 74143, 74149, 74159, 74161, 74167, 74177, 74189, 74197,
74201, 74203, 74209, 74219, 74231, 74257, 74279, 74287, 74293, 74297,
74311, 74317, 74323, 74353, 74357, 74363, 74377, 74381, 74383, 74411,
74413, 74419, 74441, 74449, 74453, 74471, 74489, 74507, 74509, 74521,
74527, 74531, 74551, 74561, 74567, 74573, 74587, 74597, 74609, 74611,
74623, 74653, 74687, 74699, 74707, 74713, 74717, 74719, 74729, 74731,
74747, 74759, 74761, 74771, 74779, 74797, 74821, 74827, 74831, 74843,
74857, 74861, 74869, 74873, 74887, 74891, 74897, 74903, 74923, 74929,
74933, 74941, 74959, 75011, 75013, 75017, 75029, 75037, 75041, 75079,
75083, 75109, 75133, 75149, 75161, 75167, 75169, 75181, 75193, 75209,
75211, 75217, 75223, 75227, 75239, 75253, 75269, 75277, 75289, 75307,
75323, 75329, 75337, 75347, 75353, 75367, 75377, 75389, 75391, 75401,
75403, 75407, 75431, 75437, 75479, 75503, 75511, 75521, 75527, 75533,
75539, 75541, 75553, 75557, 75571, 75577, 75583, 75611, 75617, 75619,
75629, 75641, 75653, 75659, 75679, 75683, 75689, 75703, 75707, 75709,
75721, 75731, 75743, 75767, 75773, 75781, 75787, 75793, 75797, 75821,
75833, 75853, 75869, 75883, 75913, 75931, 75937, 75941, 75967, 75979,
75983, 75989, 75991, 75997, 76001, 76003, 76031, 76039, 76079, 76081,
76091, 76099, 76103, 76123, 76129, 76147, 76157, 76159, 76163, 76207,
76213, 76231, 76243, 76249, 76253, 76259, 76261, 76283, 76289, 76303,
76333, 76343, 76367, 76369, 76379, 76387, 76403, 76421, 76423, 76441,
76463, 76471, 76481, 76487, 76493, 76507, 76511, 76519, 76537, 76541,
76543, 76561, 76579, 76597, 76603, 76607, 76631, 76649, 76651, 76667,
76673, 76679, 76697, 76717, 76733, 76753, 76757, 76771, 76777, 76781,
76801, 76819, 76829, 76831, 76837, 76847, 76871, 76873, 76883, 76907,
76913, 76919, 76943, 76949, 76961, 76963, 76991, 77003, 77017, 77023,
77029, 77041, 77047, 77069, 77081, 77093, 77101, 77137, 77141, 77153,
77167, 77171, 77191, 77201, 77213, 77237, 77239, 77243, 77249, 77261,
77263, 77267, 77269, 77279, 77291, 77317, 77323, 77339, 77347, 77351,
77359, 77369, 77377, 77383, 77417, 77419, 77431, 77447, 77471, 77477,
77479, 77489, 77491, 77509, 77513, 77521, 77527, 77543, 77549, 77551,
77557, 77563, 77569, 77573, 77587, 77591, 77611, 77617, 77621, 77641,
77647, 77659, 77681, 77687, 77689, 77699, 77711, 77713, 77719, 77723,
77731, 77743, 77747, 77761, 77773, 77783, 77797, 77801, 77813, 77839,
77849, 77863, 77867, 77893, 77899, 77929, 77933, 77951, 77969, 77977,
77983, 77999, 78007, 78017, 78031, 78041, 78049, 78059, 78079, 78101,
78121, 78137, 78139, 78157, 78163, 78167, 78173, 78179, 78191, 78193,
78203, 78229, 78233, 78241, 78259, 78277, 78283, 78301, 78307, 78311,
78317, 78341, 78347, 78367, 78401, 78427, 78437, 78439, 78467, 78479,
78487, 78497, 78509, 78511, 78517, 78539, 78541, 78553, 78569, 78571,
78577, 78583, 78593, 78607, 78623, 78643, 78649, 78653, 78691, 78697,
78707, 78713, 78721, 78737, 78779, 78781, 78787, 78791, 78797, 78803,
78809, 78823, 78839, 78853, 78857, 78877, 78887, 78889, 78893, 78901,
78919, 78929, 78941, 78977, 78979, 78989, 79031, 79039, 79043, 79063,
79087, 79103, 79111, 79133, 79139, 79147, 79151, 79153, 79159, 79181,
79187, 79193, 79201, 79229, 79231, 79241, 79259, 79273, 79279, 79283,
79301, 79309, 79319, 79333, 79337, 79349, 79357, 79367, 79379, 79393,
79397, 79399, 79411, 79423, 79427, 79433, 79451, 79481, 79493, 79531,
79537, 79549, 79559, 79561, 79579, 79589, 79601, 79609, 79613, 79621,
79627, 79631, 79633, 79657, 79669, 79687, 79691, 79693, 79697, 79699,
79757, 79769, 79777, 79801, 79811, 79813, 79817, 79823, 79829, 79841,
79843, 79847, 79861, 79867, 79873, 79889, 79901, 79903, 79907, 79939,
79943, 79967, 79973, 79979, 79987, 79997, 79999, 80021, 80039, 80051,
80071, 80077, 80107, 80111, 80141, 80147, 80149, 80153, 80167, 80173,
80177, 80191, 80207, 80209, 80221, 80231, 80233, 80239, 80251, 80263,
80273, 80279, 80287, 80309, 80317, 80329, 80341, 80347, 80363, 80369,
80387, 80407, 80429, 80447, 80449, 80471, 80473, 80489, 80491, 80513,
80527, 80537, 80557, 80567, 80599, 80603, 80611, 80621, 80627, 80629,
80651, 80657, 80669, 80671, 80677, 80681, 80683, 80687, 80701, 80713,
80737, 80747, 80749, 80761, 80777, 80779, 80783, 80789, 80803, 80809,
80819, 80831, 80833, 80849, 80863, 80897, 80909, 80911, 80917, 80923,
80929, 80933, 80953, 80963, 80989, 81001, 81013, 81017, 81019, 81023,
81031, 81041, 81043, 81047, 81049, 81071, 81077, 81083, 81097, 81101,
81119, 81131, 81157, 81163, 81173, 81181, 81197, 81199, 81203, 81223,
81233, 81239, 81281, 81283, 81293, 81299, 81307, 81331, 81343, 81349,
81353, 81359, 81371, 81373, 81401, 81409, 81421, 81439, 81457, 81463,
81509, 81517, 81527, 81533, 81547, 81551, 81553, 81559, 81563, 81569,
81611, 81619, 81629, 81637, 81647, 81649, 81667, 81671, 81677, 81689,
81701, 81703, 81707, 81727, 81737, 81749, 81761, 81769, 81773, 81799,
81817, 81839, 81847, 81853, 81869, 81883, 81899, 81901, 81919, 81929,
81931, 81937, 81943, 81953, 81967, 81971, 81973, 82003, 82007, 82009,
82013, 82021, 82031, 82037, 82039, 82051, 82067, 82073, 82129, 82139,
82141, 82153, 82163, 82171, 82183, 82189, 82193, 82207, 82217, 82219,
82223, 82231, 82237, 82241, 82261, 82267, 82279, 82301, 82307, 82339,
82349, 82351, 82361, 82373, 82387, 82393, 82421, 82457, 82463, 82469,
82471, 82483, 82487, 82493, 82499, 82507, 82529, 82531, 82549, 82559,
82561, 82567, 82571, 82591, 82601, 82609, 82613, 82619, 82633, 82651,
82657, 82699, 82721, 82723, 82727, 82729, 82757, 82759, 82763, 82781,
82787, 82793, 82799, 82811, 82813, 82837, 82847, 82883, 82889, 82891,
82903, 82913, 82939, 82963, 82981, 82997, 83003, 83009, 83023, 83047,
83059, 83063, 83071, 83077, 83089, 83093, 83101, 83117, 83137, 83177,
83203, 83207, 83219, 83221, 83227, 83231, 83233, 83243, 83257, 83267,
83269, 83273, 83299, 83311, 83339, 83341, 83357, 83383, 83389, 83399,
83401, 83407, 83417, 83423, 83431, 83437, 83443, 83449, 83459, 83471,
83477, 83497, 83537, 83557, 83561, 83563, 83579, 83591, 83597, 83609,
83617, 83621, 83639, 83641, 83653, 83663, 83689, 83701, 83717, 83719,
83737, 83761, 83773, 83777, 83791, 83813, 83833, 83843, 83857, 83869,
83873, 83891, 83903, 83911, 83921, 83933, 83939, 83969, 83983, 83987,
84011, 84017, 84047, 84053, 84059, 84061, 84067, 84089, 84121, 84127,
84131, 84137, 84143, 84163, 84179, 84181, 84191, 84199, 84211, 84221,
84223, 84229, 84239, 84247, 84263, 84299, 84307, 84313, 84317, 84319,
84347, 84349, 84377, 84389, 84391, 84401, 84407, 84421, 84431, 84437,
84443, 84449, 84457, 84463, 84467, 84481, 84499, 84503, 84509, 84521,
84523, 84533, 84551, 84559, 84589, 84629, 84631, 84649, 84653, 84659,
84673, 84691, 84697, 84701, 84713, 84719, 84731, 84737, 84751, 84761,
84787, 84793, 84809, 84811, 84827, 84857, 84859, 84869, 84871, 84913,
84919, 84947, 84961, 84967, 84977, 84979, 84991, 85009, 85021, 85027,
85037, 85049, 85061, 85081, 85087, 85091, 85093, 85103, 85109, 85121,
85133, 85147, 85159, 85193, 85199, 85201, 85213, 85223, 85229, 85237,
85243, 85247, 85259, 85297, 85303, 85313, 85331, 85333, 85361, 85363,
85369, 85381, 85411, 85427, 85429, 85439, 85447, 85451, 85453, 85469,
85487, 85513, 85517, 85523, 85531, 85549, 85571, 85577, 85597, 85601,
85607, 85619, 85621, 85627, 85639, 85643, 85661, 85667, 85669, 85691,
85703, 85711, 85717, 85733, 85751, 85781, 85793, 85817, 85819, 85829,
85831, 85837, 85843, 85847, 85853, 85889, 85903, 85909, 85931, 85933,
85991, 85999, 86011, 86017, 86027, 86029, 86069, 86077, 86083, 86111,
86113, 86117, 86131, 86137, 86143, 86161, 86171, 86179, 86183, 86197,
86201, 86209, 86239, 86243, 86249, 86257, 86263, 86269, 86287, 86291,
86293, 86297, 86311, 86323, 86341, 86351, 86353, 86357, 86369, 86371,
86381, 86389, 86399, 86413, 86423, 86441, 86453, 86461, 86467, 86477,
86491, 86501, 86509, 86531, 86533, 86539, 86561, 86573, 86579, 86587,
86599, 86627, 86629, 86677, 86689, 86693, 86711, 86719, 86729, 86743,
86753, 86767, 86771, 86783, 86813, 86837, 86843, 86851, 86857, 86861,
86869, 86923, 86927, 86929, 86939, 86951, 86959, 86969, 86981, 86993,
87011, 87013, 87037, 87041, 87049, 87071, 87083, 87103, 87107, 87119,
87121, 87133, 87149, 87151, 87179, 87181, 87187, 87211, 87221, 87223,
87251, 87253, 87257, 87277, 87281, 87293, 87299, 87313, 87317, 87323,
87337, 87359, 87383, 87403, 87407, 87421, 87427, 87433, 87443, 87473,
87481, 87491, 87509, 87511, 87517, 87523, 87539, 87541, 87547, 87553,
87557, 87559, 87583, 87587, 87589, 87613, 87623, 87629, 87631, 87641,
87643, 87649, 87671, 87679, 87683, 87691, 87697, 87701, 87719, 87721,
87739, 87743, 87751, 87767, 87793, 87797, 87803, 87811, 87833, 87853,
87869, 87877, 87881, 87887, 87911, 87917, 87931, 87943, 87959, 87961,
87973, 87977, 87991, 88001, 88003, 88007, 88019, 88037, 88069, 88079,
88093, 88117, 88129, 88169, 88177, 88211, 88223, 88237, 88241, 88259,
88261, 88289, 88301, 88321, 88327, 88337, 88339, 88379, 88397, 88411,
88423, 88427, 88463, 88469, 88471, 88493, 88499, 88513, 88523, 88547,
88589, 88591, 88607, 88609, 88643, 88651, 88657, 88661, 88663, 88667,
88681, 88721, 88729, 88741, 88747, 88771, 88789, 88793, 88799, 88801,
88807, 88811, 88813, 88817, 88819, 88843, 88853, 88861, 88867, 88873,
88883, 88897, 88903, 88919, 88937, 88951, 88969, 88993, 88997, 89003,
89009, 89017, 89021, 89041, 89051, 89057, 89069, 89071, 89083, 89087,
89101, 89107, 89113, 89119, 89123, 89137, 89153, 89189, 89203, 89209,
89213, 89227, 89231, 89237, 89261, 89269, 89273, 89293, 89303, 89317,
89329, 89363, 89371, 89381, 89387, 89393, 89399, 89413, 89417, 89431,
89443, 89449, 89459, 89477, 89491, 89501, 89513, 89519, 89521, 89527,
89533, 89561, 89563, 89567, 89591, 89597, 89599, 89603, 89611, 89627,
89633, 89653, 89657, 89659, 89669, 89671, 89681, 89689, 89753, 89759,
89767, 89779, 89783, 89797, 89809, 89819, 89821, 89833, 89839, 89849,
89867, 89891, 89897, 89899, 89909, 89917, 89923, 89939, 89959, 89963,
89977, 89983, 89989, 90001, 90007, 90011, 90017, 90019, 90023, 90031,
90053, 90059, 90067, 90071, 90073, 90089, 90107, 90121, 90127, 90149,
90163, 90173, 90187, 90191, 90197, 90199, 90203, 90217, 90227, 90239,
90247, 90263, 90271, 90281, 90289, 90313, 90353, 90359, 90371, 90373,
90379, 90397, 90401, 90403, 90407, 90437, 90439, 90469, 90473, 90481,
90499, 90511, 90523, 90527, 90529, 90533, 90547, 90583, 90599, 90617,
90619, 90631, 90641, 90647, 90659, 90677, 90679, 90697, 90703, 90709,
90731, 90749, 90787, 90793, 90803, 90821, 90823, 90833, 90841, 90847,
90863, 90887, 90901, 90907, 90911, 90917, 90931, 90947, 90971, 90977,
90989, 90997, 91009, 91019, 91033, 91079, 91081, 91097, 91099, 91121,
91127, 91129, 91139, 91141, 91151, 91153, 91159, 91163, 91183, 91193,
91199, 91229, 91237, 91243, 91249, 91253, 91283, 91291, 91297, 91303,
91309, 91331, 91367, 91369, 91373, 91381, 91387, 91393, 91397, 91411,
91423, 91433, 91453, 91457, 91459, 91463, 91493, 91499, 91513, 91529,
91541, 91571, 91573, 91577, 91583, 91591, 91621, 91631, 91639, 91673,
91691, 91703, 91711, 91733, 91753, 91757, 91771, 91781, 91801, 91807,
91811, 91813, 91823, 91837, 91841, 91867, 91873, 91909, 91921, 91939,
91943, 91951, 91957, 91961, 91967, 91969, 91997, 92003, 92009, 92033,
92041, 92051, 92077, 92083, 92107, 92111, 92119, 92143, 92153, 92173,
92177, 92179, 92189, 92203, 92219, 92221, 92227, 92233, 92237, 92243,
92251, 92269, 92297, 92311, 92317, 92333, 92347, 92353, 92357, 92363,
92369, 92377, 92381, 92383, 92387, 92399, 92401, 92413, 92419, 92431,
92459, 92461, 92467, 92479, 92489, 92503, 92507, 92551, 92557, 92567,
92569, 92581, 92593, 92623, 92627, 92639, 92641, 92647, 92657, 92669,
92671, 92681, 92683, 92693, 92699, 92707, 92717, 92723, 92737, 92753,
92761, 92767, 92779, 92789, 92791, 92801, 92809, 92821, 92831, 92849,
92857, 92861, 92863, 92867, 92893, 92899, 92921, 92927, 92941, 92951,
92957, 92959, 92987, 92993, 93001, 93047, 93053, 93059, 93077, 93083,
93089, 93097, 93103, 93113, 93131, 93133, 93139, 93151, 93169, 93179,
93187, 93199, 93229, 93239, 93241, 93251, 93253, 93257, 93263, 93281,
93283, 93287, 93307, 93319, 93323, 93329, 93337, 93371, 93377, 93383,
93407, 93419, 93427, 93463, 93479, 93481, 93487, 93491, 93493, 93497,
93503, 93523, 93529, 93553, 93557, 93559, 93563, 93581, 93601, 93607,
93629, 93637, 93683, 93701, 93703, 93719, 93739, 93761, 93763, 93787,
93809, 93811, 93827, 93851, 93871, 93887, 93889, 93893, 93901, 93911,
93913, 93923, 93937, 93941, 93949, 93967, 93971, 93979, 93983, 93997,
94007, 94009, 94033, 94049, 94057, 94063, 94079, 94099, 94109, 94111,
94117, 94121, 94151, 94153, 94169, 94201, 94207, 94219, 94229, 94253,
94261, 94273, 94291, 94307, 94309, 94321, 94327, 94331, 94343, 94349,
94351, 94379, 94397, 94399, 94421, 94427, 94433, 94439, 94441, 94447,
94463, 94477, 94483, 94513, 94529, 94531, 94541, 94543, 94547, 94559,
94561, 94573, 94583, 94597, 94603, 94613, 94621, 94649, 94651, 94687,
94693, 94709, 94723, 94727, 94747, 94771, 94777, 94781, 94789, 94793,
94811, 94819, 94823, 94837, 94841, 94847, 94849, 94873, 94889, 94903,
94907, 94933, 94949, 94951, 94961, 94993, 94999, 95003, 95009, 95021,
95027, 95063, 95071, 95083, 95087, 95089, 95093, 95101, 95107, 95111,
95131, 95143, 95153, 95177, 95189, 95191, 95203, 95213, 95219, 95231,
95233, 95239, 95257, 95261, 95267, 95273, 95279, 95287, 95311, 95317,
95327, 95339, 95369, 95383, 95393, 95401, 95413, 95419, 95429, 95441,
95443, 95461, 95467, 95471, 95479, 95483, 95507, 95527, 95531, 95539,
95549, 95561, 95569, 95581, 95597, 95603, 95617, 95621, 95629, 95633,
95651, 95701, 95707, 95713, 95717, 95723, 95731, 95737, 95747, 95773,
95783, 95789, 95791, 95801, 95803, 95813, 95819, 95857, 95869, 95873,
95881, 95891, 95911, 95917, 95923, 95929, 95947, 95957, 95959, 95971,
95987, 95989, 96001, 96013, 96017, 96043, 96053, 96059, 96079, 96097,
96137, 96149, 96157, 96167, 96179, 96181, 96199, 96211, 96221, 96223,
96233, 96259, 96263, 96269, 96281, 96289, 96293, 96323, 96329, 96331,
96337, 96353, 96377, 96401, 96419, 96431, 96443, 96451, 96457, 96461,
96469, 96479, 96487, 96493, 96497, 96517, 96527, 96553, 96557, 96581,
96587, 96589, 96601, 96643, 96661, 96667, 96671, 96697, 96703, 96731,
96737, 96739, 96749, 96757, 96763, 96769, 96779, 96787, 96797, 96799,
96821, 96823, 96827, 96847, 96851, 96857, 96893, 96907, 96911, 96931,
96953, 96959, 96973, 96979, 96989, 96997, 97001, 97003, 97007, 97021,
97039, 97073, 97081, 97103, 97117, 97127, 97151, 97157, 97159, 97169,
97171, 97177, 97187, 97213, 97231, 97241, 97259, 97283, 97301, 97303,
97327, 97367, 97369, 97373, 97379, 97381, 97387, 97397, 97423, 97429,
97441, 97453, 97459, 97463, 97499, 97501, 97511, 97523, 97547, 97549,
97553, 97561, 97571, 97577, 97579, 97583, 97607, 97609, 97613, 97649,
97651, 97673, 97687, 97711, 97729, 97771, 97777, 97787, 97789, 97813,
97829, 97841, 97843, 97847, 97849, 97859, 97861, 97871, 97879, 97883,
97919, 97927, 97931, 97943, 97961, 97967, 97973, 97987, 98009, 98011,
98017, 98041, 98047, 98057, 98081, 98101, 98123, 98129, 98143, 98179,
98207, 98213, 98221, 98227, 98251, 98257, 98269, 98297, 98299, 98317,
98321, 98323, 98327, 98347, 98369, 98377, 98387, 98389, 98407, 98411,
98419, 98429, 98443, 98453, 98459, 98467, 98473, 98479, 98491, 98507,
98519, 98533, 98543, 98561, 98563, 98573, 98597, 98621, 98627, 98639,
98641, 98663, 98669, 98689, 98711, 98713, 98717, 98729, 98731, 98737,
98773, 98779, 98801, 98807, 98809, 98837, 98849, 98867, 98869, 98873,
98887, 98893, 98897, 98899, 98909, 98911, 98927, 98929, 98939, 98947,
98953, 98963, 98981, 98993, 98999, 99013, 99017, 99023, 99041, 99053,
99079, 99083, 99089, 99103, 99109, 99119, 99131, 99133, 99137, 99139,
99149, 99173, 99181, 99191, 99223, 99233, 99241, 99251, 99257, 99259,
99277, 99289, 99317, 99347, 99349, 99367, 99371, 99377, 99391, 99397,
99401, 99409, 99431, 99439, 99469, 99487, 99497, 99523, 99527, 99529,
99551, 99559, 99563, 99571, 99577, 99581, 99607, 99611, 99623, 99643,
99661, 99667, 99679, 99689, 99707, 99709, 99713, 99719, 99721, 99733,
99761, 99767, 99787, 99793, 99809, 99817, 99823, 99829, 99833, 99839,
99859, 99871, 99877, 99881, 99901, 99907, 99923, 99929, 99961, 99971,
99989, 99991, 100003, 100019, 100043, 100049, 100057, 100069, 100103, 100109,
100129, 100151, 100153, 100169, 100183, 100189, 100193, 100207, 100213, 100237,
100267, 100271, 100279, 100291, 100297, 100313, 100333, 100343, 100357, 100361,
100363, 100379, 100391, 100393, 100403, 100411, 100417, 100447, 100459, 100469,
100483, 100493, 100501, 100511, 100517, 100519, 100523, 100537, 100547, 100549,
100559, 100591, 100609, 100613, 100621, 100649, 100669, 100673, 100693, 100699,
100703, 100733, 100741, 100747, 100769, 100787, 100799, 100801, 100811, 100823,
100829, 100847, 100853, 100907, 100913, 100927, 100931, 100937, 100943, 100957,
100981, 100987, 100999, 101009, 101021, 101027, 101051, 101063, 101081, 101089,
101107, 101111, 101113, 101117, 101119, 101141, 101149, 101159, 101161, 101173,
101183, 101197, 101203, 101207, 101209, 101221, 101267, 101273, 101279, 101281,
101287, 101293, 101323, 101333, 101341, 101347, 101359, 101363, 101377, 101383,
101399, 101411, 101419, 101429, 101449, 101467, 101477, 101483, 101489, 101501,
101503, 101513, 101527, 101531, 101533, 101537, 101561, 101573, 101581, 101599,
101603, 101611, 101627, 101641, 101653, 101663, 101681, 101693, 101701, 101719,
101723, 101737, 101741, 101747, 101749, 101771, 101789, 101797, 101807, 101833,
101837, 101839, 101863, 101869, 101873, 101879, 101891, 101917, 101921, 101929,
101939, 101957, 101963, 101977, 101987, 101999, 102001, 102013, 102019, 102023,
102031, 102043, 102059, 102061, 102071, 102077, 102079, 102101, 102103, 102107,
102121, 102139, 102149, 102161, 102181, 102191, 102197, 102199, 102203, 102217,
102229, 102233, 102241, 102251, 102253, 102259, 102293, 102299, 102301, 102317,
102329, 102337, 102359, 102367, 102397, 102407, 102409, 102433, 102437, 102451,
102461, 102481, 102497, 102499, 102503, 102523, 102533, 102539, 102547, 102551,
102559, 102563, 102587, 102593, 102607, 102611, 102643, 102647, 102653, 102667,
102673, 102677, 102679, 102701, 102761, 102763, 102769, 102793, 102797, 102811,
102829, 102841, 102859, 102871, 102877, 102881, 102911, 102913, 102929, 102931,
102953, 102967, 102983, 103001, 103007, 103043, 103049, 103067, 103069, 103079,
103087, 103091, 103093, 103099, 103123, 103141, 103171, 103177, 103183, 103217,
103231, 103237, 103289, 103291, 103307, 103319, 103333, 103349, 103357, 103387,
103391, 103393, 103399, 103409, 103421, 103423, 103451, 103457, 103471, 103483,
103511, 103529, 103549, 103553, 103561, 103567, 103573, 103577, 103583, 103591,
103613, 103619, 103643, 103651, 103657, 103669, 103681, 103687, 103699, 103703,
103723, 103769, 103787, 103801, 103811, 103813, 103837, 103841, 103843, 103867,
103889, 103903, 103913, 103919, 103951, 103963, 103967, 103969, 103979, 103981,
103991, 103993, 103997, 104003, 104009, 104021, 104033, 104047, 104053, 104059,
104087, 104089, 104107, 104113, 104119, 104123, 104147, 104149, 104161, 104173,
104179, 104183, 104207, 104231, 104233, 104239, 104243, 104281, 104287, 104297,
104309, 104311, 104323, 104327, 104347, 104369, 104381, 104383, 104393, 104399,
104417, 104459, 104471, 104473, 104479, 104491, 104513, 104527, 104537, 104543,
104549, 104551, 104561, 104579, 104593, 104597, 104623, 104639, 104651, 104659,
104677, 104681, 104683, 104693, 104701, 104707, 104711, 104717, 104723, 104729,
)
|
kazemakase/scikit-learn
|
refs/heads/master
|
examples/cluster/plot_kmeans_silhouette_analysis.py
|
242
|
"""
===============================================================================
Selecting the number of clusters with silhouette analysis on KMeans clustering
===============================================================================
Silhouette analysis can be used to study the separation distance between the
resulting clusters. The silhouette plot displays a measure of how close each
point in one cluster is to points in the neighboring clusters and thus provides
a way to assess parameters like number of clusters visually. This measure has a
range of [-1, 1].
Silhoette coefficients (as these values are referred to as) near +1 indicate
that the sample is far away from the neighboring clusters. A value of 0
indicates that the sample is on or very close to the decision boundary between
two neighboring clusters and negative values indicate that those samples might
have been assigned to the wrong cluster.
In this example the silhouette analysis is used to choose an optimal value for
``n_clusters``. The silhouette plot shows that the ``n_clusters`` value of 3, 5
and 6 are a bad pick for the given data due to the presence of clusters with
below average silhouette scores and also due to wide fluctuations in the size
of the silhouette plots. Silhouette analysis is more ambivalent in deciding
between 2 and 4.
Also from the thickness of the silhouette plot the cluster size can be
visualized. The silhouette plot for cluster 0 when ``n_clusters`` is equal to
2, is bigger in size owing to the grouping of the 3 sub clusters into one big
cluster. However when the ``n_clusters`` is equal to 4, all the plots are more
or less of similar thickness and hence are of similar sizes as can be also
verified from the labelled scatter plot on the right.
"""
from __future__ import print_function
from sklearn.datasets import make_blobs
from sklearn.cluster import KMeans
from sklearn.metrics import silhouette_samples, silhouette_score
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
print(__doc__)
# Generating the sample data from make_blobs
# This particular setting has one distict cluster and 3 clusters placed close
# together.
X, y = make_blobs(n_samples=500,
n_features=2,
centers=4,
cluster_std=1,
center_box=(-10.0, 10.0),
shuffle=True,
random_state=1) # For reproducibility
range_n_clusters = [2, 3, 4, 5, 6]
for n_clusters in range_n_clusters:
# Create a subplot with 1 row and 2 columns
fig, (ax1, ax2) = plt.subplots(1, 2)
fig.set_size_inches(18, 7)
# The 1st subplot is the silhouette plot
# The silhouette coefficient can range from -1, 1 but in this example all
# lie within [-0.1, 1]
ax1.set_xlim([-0.1, 1])
# The (n_clusters+1)*10 is for inserting blank space between silhouette
# plots of individual clusters, to demarcate them clearly.
ax1.set_ylim([0, len(X) + (n_clusters + 1) * 10])
# Initialize the clusterer with n_clusters value and a random generator
# seed of 10 for reproducibility.
clusterer = KMeans(n_clusters=n_clusters, random_state=10)
cluster_labels = clusterer.fit_predict(X)
# The silhouette_score gives the average value for all the samples.
# This gives a perspective into the density and separation of the formed
# clusters
silhouette_avg = silhouette_score(X, cluster_labels)
print("For n_clusters =", n_clusters,
"The average silhouette_score is :", silhouette_avg)
# Compute the silhouette scores for each sample
sample_silhouette_values = silhouette_samples(X, cluster_labels)
y_lower = 10
for i in range(n_clusters):
# Aggregate the silhouette scores for samples belonging to
# cluster i, and sort them
ith_cluster_silhouette_values = \
sample_silhouette_values[cluster_labels == i]
ith_cluster_silhouette_values.sort()
size_cluster_i = ith_cluster_silhouette_values.shape[0]
y_upper = y_lower + size_cluster_i
color = cm.spectral(float(i) / n_clusters)
ax1.fill_betweenx(np.arange(y_lower, y_upper),
0, ith_cluster_silhouette_values,
facecolor=color, edgecolor=color, alpha=0.7)
# Label the silhouette plots with their cluster numbers at the middle
ax1.text(-0.05, y_lower + 0.5 * size_cluster_i, str(i))
# Compute the new y_lower for next plot
y_lower = y_upper + 10 # 10 for the 0 samples
ax1.set_title("The silhouette plot for the various clusters.")
ax1.set_xlabel("The silhouette coefficient values")
ax1.set_ylabel("Cluster label")
# The vertical line for average silhoutte score of all the values
ax1.axvline(x=silhouette_avg, color="red", linestyle="--")
ax1.set_yticks([]) # Clear the yaxis labels / ticks
ax1.set_xticks([-0.1, 0, 0.2, 0.4, 0.6, 0.8, 1])
# 2nd Plot showing the actual clusters formed
colors = cm.spectral(cluster_labels.astype(float) / n_clusters)
ax2.scatter(X[:, 0], X[:, 1], marker='.', s=30, lw=0, alpha=0.7,
c=colors)
# Labeling the clusters
centers = clusterer.cluster_centers_
# Draw white circles at cluster centers
ax2.scatter(centers[:, 0], centers[:, 1],
marker='o', c="white", alpha=1, s=200)
for i, c in enumerate(centers):
ax2.scatter(c[0], c[1], marker='$%d$' % i, alpha=1, s=50)
ax2.set_title("The visualization of the clustered data.")
ax2.set_xlabel("Feature space for the 1st feature")
ax2.set_ylabel("Feature space for the 2nd feature")
plt.suptitle(("Silhouette analysis for KMeans clustering on sample data "
"with n_clusters = %d" % n_clusters),
fontsize=14, fontweight='bold')
plt.show()
|
syncboard/syncboard
|
refs/heads/master
|
src/gui.py
|
1
|
"""
Cross-platform clipboard syncing tool
Copyright (C) 2013 Syncboard
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
"""
This file is the user's entrance to the gui and implements the main frame
of the gui.
"""
import wx, info
from wx.lib.pubsub import Publisher
from gui_info import AboutDialog
from gui_status import StatusPanel
from gui_clipboard import ClipboardPanel
from gui_connections import ConnectionsPanel
from session import Session
FRAME_SIZE = (550, 510)
BGD_COLOR = (240, 240, 240)
class MainFrame(wx.Frame):
"""Main Frame of the app."""
def __init__(self, *args, **kwargs):
wx.Frame.__init__(self, *args, **kwargs)
self.session = Session()
self.SetBackgroundColour(BGD_COLOR)
# List of timers so we can stop them when we quit.
# Avoids PyDeadObjectError.
self.timers = set()
Publisher().subscribe(self.new_timer, "new_timer")
# Build the menu bar
menu_bar = wx.MenuBar()
file_menu = wx.Menu()
exit_item = file_menu.Append(wx.ID_EXIT, text="E&xit")
self.Bind(wx.EVT_MENU, self.on_quit, exit_item)
help_menu = wx.Menu()
about_item = help_menu.Append(wx.ID_ABOUT, text="&About",
help="Information about this program")
self.Bind(wx.EVT_MENU, self.on_about, about_item)
menu_bar.Append(file_menu, "&File")
menu_bar.Append(help_menu, "&Help")
self.SetMenuBar(menu_bar)
self.CreateStatusBar(style=0)
Publisher().subscribe(self.change_statusbar, "change_statusbar")
# Add panels
connections_panel = ConnectionsPanel(self, self.session, BGD_COLOR)
clipboard_panel = ClipboardPanel(self, self.session, BGD_COLOR)
status_panel = StatusPanel(self, BGD_COLOR)
new_btn = wx.Button(self, label="New Connection")
new_btn.Bind(wx.EVT_BUTTON, self.on_new)
auto_sync_cb = wx.CheckBox(self, id=wx.ID_ANY,
label="Automatically Sync")
self.Bind(wx.EVT_CHECKBOX, self.on_toggle_auto, auto_sync_cb)
auto_sync_cb.Bind(wx.EVT_ENTER_WINDOW, self.on_enter_auto)
auto_sync_cb.Bind(wx.EVT_LEAVE_WINDOW, self.on_leave_auto)
main_sizer = wx.BoxSizer(wx.VERTICAL)
top_row_sizer = wx.BoxSizer(wx.HORIZONTAL)
board_sizer = wx.BoxSizer(wx.VERTICAL)
btn_flags = wx.SizerFlags().Proportion(0).Border(wx.ALL, 5).Bottom()
status_flags = wx.SizerFlags().Proportion(1).Expand().Border(wx.ALL, 5).Top()
flags = wx.SizerFlags().Proportion(0).Expand().Border(wx.ALL, 5)
board_flags = wx.SizerFlags().Proportion(0).Border(wx.ALL, 5).Right()
top_flags = wx.SizerFlags().Proportion(0).Expand().Border(wx.ALL, 5)
conn_flags = wx.SizerFlags().Proportion(1).Expand().Border(wx.ALL, 5)
board_sizer.AddF(auto_sync_cb, flags)
board_sizer.AddF(clipboard_panel, flags)
top_row_sizer.AddF(new_btn, btn_flags)
top_row_sizer.AddF(status_panel, status_flags)
top_row_sizer.AddF(board_sizer, board_flags)
main_sizer.AddF(top_row_sizer, top_flags)
main_sizer.AddF(connections_panel, conn_flags)
self.SetSizer(main_sizer)
def new_timer(self, msg):
self.timers.add(msg.data)
def change_statusbar(self, msg):
self.SetStatusText(msg.data)
def on_new(self, event):
Publisher().sendMessage(("new_connection"))
def on_toggle_auto(self, event):
Publisher().sendMessage(("auto_toggle"), event.IsChecked())
def on_enter_auto(self, event):
Publisher().sendMessage(("change_statusbar"),
"Automatically copy/paste new items to/from your clipboard")
event.Skip()
def on_leave_auto(self, event):
Publisher().sendMessage(("change_statusbar"), "")
event.Skip()
def on_about(self, event):
aboutbox = AboutDialog(self)
aboutbox.ShowModal()
aboutbox.Destroy()
def on_quit(self, event):
for timer in self.timers():
timer.Stop()
self.session.close()
self.Close()
if __name__ == '__main__':
app = wx.App(False)
frame = MainFrame(None, title=info.NAME, size=FRAME_SIZE,
style=wx.DEFAULT_FRAME_STYLE)# ^ wx.RESIZE_BORDER)
frame.Show()
app.MainLoop()
|
izonder/intellij-community
|
refs/heads/master
|
python/testData/inspections/PyCallingNonCallableInspection/structuralType.py
|
79
|
def f(x):
x.foo + x.bar
x(0)
|
AZMAG/smartpy_core
|
refs/heads/master
|
smartpy_core/gis.py
|
1
|
"""
This module (will) contain methods and utilities for working w/ GIS files and
functions totally indendent of ESRI/arcpy. Instead will rely on a combination
of GDAL, Fiona and Geopandas.
"""
import pandas as pd
import fiona
import geopandas as gpd
def open_gdb_table(gdb, table, flds=None, index_fld=None):
"""
Opens a table within a file geodatadase and retuns a
pandas.DataFrame.
Parameters:
-----------
gdb: str
Full path to the gdb file
table: str
Name of the table w/in the file gdb
flds: str or dict
Fields to pull. If a dict is provided
they will also be re-named.
index_fld: str, optional, default None
Name of the column to serve as the index.
Returns:
--------
pandas.DataFrame
"""
with fiona.open(gdb, layer=table) as t:
df = pd.DataFrame([row['properties'] for row in t])
if index_fld is not None:
df.set_index(index_fld, inplace=True)
return df
|
cshields/satnogs-client
|
refs/heads/master
|
satnogsclient/__init__.py
|
1
|
import logging.config
from os import environ
from validators.url import url
from satnogsclient.settings import (SATNOGS_API_TOKEN, DEFAULT_LOGGING, SATNOGS_STATION_ID,
SATNOGS_STATION_LAT, SATNOGS_STATION_LON, SATNOGS_STATION_ELEV,
SATNOGS_NETWORK_API_URL)
# Avoid validation when building docs
if not environ.get('READTHEDOCS', False):
try:
url(SATNOGS_NETWORK_API_URL)
except:
raise Exception('Invalid SATNOGS_NETWORK_API_URL: {0}'.format(SATNOGS_NETWORK_API_URL))
if not SATNOGS_STATION_ID:
raise Exception('SATNOGS_STATION_ID not configured.')
if not SATNOGS_STATION_LAT:
raise Exception('SATNOGS_STATION_LAT not configured')
if not SATNOGS_STATION_LON:
raise Exception('SATNOGS_STATION_LON not configured')
if SATNOGS_STATION_ELEV is None:
raise Exception('SATNOGS_STATION_ELEV not configured')
if not SATNOGS_API_TOKEN:
raise Exception('SATNOGS_API_TOKEN not configured')
logging.config.dictConfig(DEFAULT_LOGGING)
|
antoinecarme/sklearn2sql_heroku
|
refs/heads/master
|
tests/classification/FourClass_500/ws_FourClass_500_MLPClassifier_hive_code_gen.py
|
1
|
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("MLPClassifier" , "FourClass_500" , "hive")
|
ortylp/scipy
|
refs/heads/master
|
scipy/special/tests/test_spfun_stats.py
|
127
|
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import assert_array_equal, TestCase, run_module_suite, \
assert_array_almost_equal_nulp, assert_raises, assert_almost_equal
from scipy.special import gammaln, multigammaln
class TestMultiGammaLn(TestCase):
def test1(self):
# A test of the identity
# Gamma_1(a) = Gamma(a)
np.random.seed(1234)
a = np.abs(np.random.randn())
assert_array_equal(multigammaln(a, 1), gammaln(a))
def test2(self):
# A test of the identity
# Gamma_2(a) = sqrt(pi) * Gamma(a) * Gamma(a - 0.5)
a = np.array([2.5, 10.0])
result = multigammaln(a, 2)
expected = np.log(np.sqrt(np.pi)) + gammaln(a) + gammaln(a - 0.5)
assert_almost_equal(result, expected)
def test_bararg(self):
assert_raises(ValueError, multigammaln, 0.5, 1.2)
def _check_multigammaln_array_result(a, d):
# Test that the shape of the array returned by multigammaln
# matches the input shape, and that all the values match
# the value computed when multigammaln is called with a scalar.
result = multigammaln(a, d)
assert_array_equal(a.shape, result.shape)
a1 = a.ravel()
result1 = result.ravel()
for i in range(a.size):
assert_array_almost_equal_nulp(result1[i], multigammaln(a1[i], d))
def test_multigammaln_array_arg():
# Check that the array returned by multigammaln has the correct
# shape and contains the correct values. The cases have arrays
# with several differnent shapes.
# The cases include a regression test for ticket #1849
# (a = np.array([2.0]), an array with a single element).
np.random.seed(1234)
cases = [
# a, d
(np.abs(np.random.randn(3, 2)) + 5, 5),
(np.abs(np.random.randn(1, 2)) + 5, 5),
(np.arange(10.0, 18.0).reshape(2, 2, 2), 3),
(np.array([2.0]), 3),
(np.float64(2.0), 3),
]
for a, d in cases:
yield _check_multigammaln_array_result, a, d
if __name__ == '__main__':
run_module_suite()
|
grap/odoo-script
|
refs/heads/master
|
data_integration/__init__.py
|
3
|
#! /usr/bin/env python
# -*- encoding: utf-8 -*-
|
OpenWinCon/OpenWinNet
|
refs/heads/master
|
agent/OpenWRT_agent/MP_sub.py
|
1
|
#!/usr/bin/python2.7
from os import system
import os
f = open("topology.out", 'w')
f.close
system("iw dev wlan0 scan | grep -e wlan0 -e SSID -e signal > topology.out 2>&1")
system("/etc/config/time")
|
frankrousseau/weboob
|
refs/heads/master
|
modules/agendadulibre/__init__.py
|
7
|
# -*- coding: utf-8 -*-
# Copyright(C) 2014 Bezleputh
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from .module import AgendadulibreModule
__all__ = ['AgendadulibreModule']
|
lifedb/lifedb
|
refs/heads/master
|
setup.py
|
1
|
from setuptools import setup
setup(
name='lifedb',
version='0.0.1',
packages=['lifedb'],
url='https://lifedb.io',
license='MIT',
author='dsuo',
author_email='dsuo@post.harvard.edu',
description='',
install_requires=[
'cherrypy',
'google-api-python-client',
'fitbit',
'matplotlib',
'plaid-python'
]
)
|
openstreams/wflow
|
refs/heads/master
|
openda_bmi/openda/bmi/thrift/constants.py
|
3
|
#
# Autogenerated by Thrift Compiler (0.11.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:new_style
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
from .ttypes import *
|
ox-it/humfrey
|
refs/heads/master
|
humfrey/sparql/results.py
|
1
|
import types
import weakref
import rdflib
class SparqlResultBinding(dict):
def __init__(self, bindings):
if isinstance(bindings, (list, tuple, types.GeneratorType)):
bindings = dict(zip(self._fields, bindings))
for field in self._fields:
if field not in bindings:
bindings[field] = None
super(SparqlResultBinding, self).__init__(bindings)
def __iter__(self):
return (self[field] for field in self._fields)
def __getattr__(self, name):
return self[name]
@property
def fields(self):
return self._fields
def __reduce__(self):
return (Result, (self._fields, self._asdict()))
def _asdict(self):
return dict(self)
def Result(fields, bindings=None):
fields = tuple(fields)
if fields in Result._memo:
cls = Result._memo[fields]
else:
class cls(SparqlResultBinding):
_fields = fields
Result._memo[fields] = cls
if bindings is not None:
return cls(bindings)
else:
return cls
Result._memo = weakref.WeakValueDictionary()
class SparqlResultList(list):
"""
A SPARQL resultset that has been turned into a list.
"""
def __init__(self, fields, arg=None):
self.fields = fields
if arg is not None:
list.__init__(self, arg)
def get_bindings(self):
return self
def get_fields(self):
return self.fields
|
forwis/KVMGT-kernel
|
refs/heads/master
|
tools/perf/tests/attr.py
|
3174
|
#! /usr/bin/python
import os
import sys
import glob
import optparse
import tempfile
import logging
import shutil
import ConfigParser
class Fail(Exception):
def __init__(self, test, msg):
self.msg = msg
self.test = test
def getMsg(self):
return '\'%s\' - %s' % (self.test.path, self.msg)
class Unsup(Exception):
def __init__(self, test):
self.test = test
def getMsg(self):
return '\'%s\'' % self.test.path
class Event(dict):
terms = [
'cpu',
'flags',
'type',
'size',
'config',
'sample_period',
'sample_type',
'read_format',
'disabled',
'inherit',
'pinned',
'exclusive',
'exclude_user',
'exclude_kernel',
'exclude_hv',
'exclude_idle',
'mmap',
'comm',
'freq',
'inherit_stat',
'enable_on_exec',
'task',
'watermark',
'precise_ip',
'mmap_data',
'sample_id_all',
'exclude_host',
'exclude_guest',
'exclude_callchain_kernel',
'exclude_callchain_user',
'wakeup_events',
'bp_type',
'config1',
'config2',
'branch_sample_type',
'sample_regs_user',
'sample_stack_user',
]
def add(self, data):
for key, val in data:
log.debug(" %s = %s" % (key, val))
self[key] = val
def __init__(self, name, data, base):
log.debug(" Event %s" % name);
self.name = name;
self.group = ''
self.add(base)
self.add(data)
def compare_data(self, a, b):
# Allow multiple values in assignment separated by '|'
a_list = a.split('|')
b_list = b.split('|')
for a_item in a_list:
for b_item in b_list:
if (a_item == b_item):
return True
elif (a_item == '*') or (b_item == '*'):
return True
return False
def equal(self, other):
for t in Event.terms:
log.debug(" [%s] %s %s" % (t, self[t], other[t]));
if not self.has_key(t) or not other.has_key(t):
return False
if not self.compare_data(self[t], other[t]):
return False
return True
def diff(self, other):
for t in Event.terms:
if not self.has_key(t) or not other.has_key(t):
continue
if not self.compare_data(self[t], other[t]):
log.warning("expected %s=%s, got %s" % (t, self[t], other[t]))
# Test file description needs to have following sections:
# [config]
# - just single instance in file
# - needs to specify:
# 'command' - perf command name
# 'args' - special command arguments
# 'ret' - expected command return value (0 by default)
#
# [eventX:base]
# - one or multiple instances in file
# - expected values assignments
class Test(object):
def __init__(self, path, options):
parser = ConfigParser.SafeConfigParser()
parser.read(path)
log.warning("running '%s'" % path)
self.path = path
self.test_dir = options.test_dir
self.perf = options.perf
self.command = parser.get('config', 'command')
self.args = parser.get('config', 'args')
try:
self.ret = parser.get('config', 'ret')
except:
self.ret = 0
self.expect = {}
self.result = {}
log.debug(" loading expected events");
self.load_events(path, self.expect)
def is_event(self, name):
if name.find("event") == -1:
return False
else:
return True
def load_events(self, path, events):
parser_event = ConfigParser.SafeConfigParser()
parser_event.read(path)
# The event record section header contains 'event' word,
# optionaly followed by ':' allowing to load 'parent
# event' first as a base
for section in filter(self.is_event, parser_event.sections()):
parser_items = parser_event.items(section);
base_items = {}
# Read parent event if there's any
if (':' in section):
base = section[section.index(':') + 1:]
parser_base = ConfigParser.SafeConfigParser()
parser_base.read(self.test_dir + '/' + base)
base_items = parser_base.items('event')
e = Event(section, parser_items, base_items)
events[section] = e
def run_cmd(self, tempdir):
cmd = "PERF_TEST_ATTR=%s %s %s -o %s/perf.data %s" % (tempdir,
self.perf, self.command, tempdir, self.args)
ret = os.WEXITSTATUS(os.system(cmd))
log.info(" '%s' ret %d " % (cmd, ret))
if ret != int(self.ret):
raise Unsup(self)
def compare(self, expect, result):
match = {}
log.debug(" compare");
# For each expected event find all matching
# events in result. Fail if there's not any.
for exp_name, exp_event in expect.items():
exp_list = []
log.debug(" matching [%s]" % exp_name)
for res_name, res_event in result.items():
log.debug(" to [%s]" % res_name)
if (exp_event.equal(res_event)):
exp_list.append(res_name)
log.debug(" ->OK")
else:
log.debug(" ->FAIL");
log.debug(" match: [%s] matches %s" % (exp_name, str(exp_list)))
# we did not any matching event - fail
if (not exp_list):
exp_event.diff(res_event)
raise Fail(self, 'match failure');
match[exp_name] = exp_list
# For each defined group in the expected events
# check we match the same group in the result.
for exp_name, exp_event in expect.items():
group = exp_event.group
if (group == ''):
continue
for res_name in match[exp_name]:
res_group = result[res_name].group
if res_group not in match[group]:
raise Fail(self, 'group failure')
log.debug(" group: [%s] matches group leader %s" %
(exp_name, str(match[group])))
log.debug(" matched")
def resolve_groups(self, events):
for name, event in events.items():
group_fd = event['group_fd'];
if group_fd == '-1':
continue;
for iname, ievent in events.items():
if (ievent['fd'] == group_fd):
event.group = iname
log.debug('[%s] has group leader [%s]' % (name, iname))
break;
def run(self):
tempdir = tempfile.mkdtemp();
try:
# run the test script
self.run_cmd(tempdir);
# load events expectation for the test
log.debug(" loading result events");
for f in glob.glob(tempdir + '/event*'):
self.load_events(f, self.result);
# resolve group_fd to event names
self.resolve_groups(self.expect);
self.resolve_groups(self.result);
# do the expectation - results matching - both ways
self.compare(self.expect, self.result)
self.compare(self.result, self.expect)
finally:
# cleanup
shutil.rmtree(tempdir)
def run_tests(options):
for f in glob.glob(options.test_dir + '/' + options.test):
try:
Test(f, options).run()
except Unsup, obj:
log.warning("unsupp %s" % obj.getMsg())
def setup_log(verbose):
global log
level = logging.CRITICAL
if verbose == 1:
level = logging.WARNING
if verbose == 2:
level = logging.INFO
if verbose >= 3:
level = logging.DEBUG
log = logging.getLogger('test')
log.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
formatter = logging.Formatter('%(message)s')
ch.setFormatter(formatter)
log.addHandler(ch)
USAGE = '''%s [OPTIONS]
-d dir # tests dir
-p path # perf binary
-t test # single test
-v # verbose level
''' % sys.argv[0]
def main():
parser = optparse.OptionParser(usage=USAGE)
parser.add_option("-t", "--test",
action="store", type="string", dest="test")
parser.add_option("-d", "--test-dir",
action="store", type="string", dest="test_dir")
parser.add_option("-p", "--perf",
action="store", type="string", dest="perf")
parser.add_option("-v", "--verbose",
action="count", dest="verbose")
options, args = parser.parse_args()
if args:
parser.error('FAILED wrong arguments %s' % ' '.join(args))
return -1
setup_log(options.verbose)
if not options.test_dir:
print 'FAILED no -d option specified'
sys.exit(-1)
if not options.test:
options.test = 'test*'
try:
run_tests(options)
except Fail, obj:
print "FAILED %s" % obj.getMsg();
sys.exit(-1)
sys.exit(0)
if __name__ == '__main__':
main()
|
iafan/zing
|
refs/heads/master
|
pootle/apps/pootle_statistics/migrations/0003_scorelog_translated_wordcount.py
|
10
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pootle_statistics', '0002_update_submission_ordering'),
]
operations = [
migrations.AddField(
model_name='scorelog',
name='translated_wordcount',
field=models.PositiveIntegerField(null=True),
),
]
|
SNoiraud/gramps
|
refs/heads/master
|
gramps/gen/filters/rules/event/_hassourcecount.py
|
5
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2007 Donald N. Allingham
# Copyright (C) 2007-2009 Brian G. Matherly
# Copyright (C) 2009 Benny Malengier
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .._hassourcecountbase import HasSourceCountBase
#-------------------------------------------------------------------------
# "People having sources"
#-------------------------------------------------------------------------
class HasSourceCount(HasSourceCountBase):
"""Events with sources"""
name = _('Events with <count> sources')
description = _("Matches events with a certain number of sources connected to it")
|
Tisseo/navitia
|
refs/heads/dev
|
source/jormungandr/tests/isochrone_tests.py
|
2
|
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
import logging
from .tests_mechanism import AbstractTestFixture, dataset
from .check_utils import *
@dataset({"basic_routing_test": {}})
class TestIsochrone(AbstractTestFixture):
"""
Test the structure of the journeys response
"""
def test_from_isochrone_coord(self):
#NOTE: we query /v1/coverage/basic_routing_test/journeys and not directly /v1/journeys
#not to use the jormungandr database
query = "v1/coverage/basic_routing_test/journeys?from={}&datetime={}"
query = query.format(s_coord, "20120614T080000")
self.query(query)
def test_stop_point_isochrone_coord(self):
#NOTE: we query /v1/coverage/basic_routing_test/journeys and not directly /v1/journeys
#not to use the jormungandr database
query = "v1/coverage/basic_routing_test/stop_points/A/journeys?max_duration=400&datetime=20120614T080000"
response = self.query(query, display=True)
is_valid_isochrone_response(response, self.tester, query)
assert len(response["journeys"]) == 1
assert response["journeys"][0]["duration"] == 300
assert response["journeys"][0]["to"]["stop_point"]["id"] == "B"
assert response["journeys"][0]["from"]["id"] == "A"
query = "v1/coverage/basic_routing_test/stop_points/A/journeys?max_duration=25500&datetime=20120614T080000"
response = self.query(query)
is_valid_isochrone_response(response, self.tester, query)
assert len(response["journeys"]) == 2
assert response["journeys"][0]["duration"] == 300
assert response["journeys"][0]["to"]["stop_point"]["id"] == "B"
assert response["journeys"][0]["from"]["id"] == "A"
assert response["journeys"][1]["duration"] == 25200
assert response["journeys"][1]["to"]["stop_point"]["id"] == "D"
assert response["journeys"][1]["from"]["id"] == "A"
self.check_context(response)
assert len(response['disruptions']) == 0
def test_stop_point_isochrone_coord_no_transfers(self):
#same query as the test_stop_point_isochrone_coord test, but this time we forbid to do a transfers
#we should be able to touch only 'B'
query = "v1/coverage/basic_routing_test/stop_points/A/journeys?" \
"max_duration=25500&datetime=20120614T080000" \
"&max_nb_transfers=0"
response = self.query(query)
is_valid_isochrone_response(response, self.tester, query)
assert len(response["journeys"]) == 1
assert response["journeys"][0]["duration"] == 300
assert response["journeys"][0]["to"]["stop_point"]["id"] == "B"
assert response["journeys"][0]["from"]["id"] == "A"
def test_to_isochrone_coord(self):
query = "v1/coverage/basic_routing_test/journeys?from={}&datetime={}"
query = query.format(s_coord, "20120614T080000")
response = self.query(query)
#In the previous version, the result contained no_solution with error message : 'several errors occured: \n * '
#with status code 200.
#After this correction, since the EntryPoint is absent an error is raised.
def test_isochrone_from_unkown_sa(self):
query = "v1/coverage/basic_routing_test/journeys?from={}&datetime={}"
query = query.format("bob", "20120614T080000")
normal_response, error_code = self.query_no_assert(query)
assert normal_response['error']['message'] == 'The entry point: bob is not valid'
assert error_code == 404
def test_isochrone_to_unknown_sa(self):
query = "v1/coverage/basic_routing_test/journeys?to={}&datetime={}&datetime_represents=arrival"
query = query.format("bob", "20120614T080000")
normal_response, error_code = self.query_no_assert(query)
assert normal_response['error']['message'] == 'The entry point: bob is not valid'
assert error_code == 404
def test_isochrone_from_point(self):
query = "v1/coverage/basic_routing_test/journeys?from={}&datetime={}"
query = query.format("A", "20120614T080000")
normal_response, error_code = self.query_no_assert(query)
assert error_code == 200
assert len(normal_response["journeys"]) == 1
def test_reverse_isochrone_coord(self):
q = "v1/coverage/basic_routing_test/journeys?max_duration=100000" \
"&datetime=20120615T200000&datetime_represents=arrival&to=D"
normal_response = self.query(q, display=True)
is_valid_isochrone_response(normal_response, self.tester, q)
assert len(normal_response["journeys"]) == 2
def test_reverse_isochrone_coord_clockwise(self):
q = "v1/coverage/basic_routing_test/journeys?datetime=20120614T080000&to=A"
normal_response, error_code = self.query_no_assert(q)
assert error_code == 404
assert 'reverse isochrone works only for anti-clockwise request' in normal_response['error']['message']
def test_isochrone_non_clockwise(self):
q = "v1/coverage/basic_routing_test/journeys?datetime=20120614T080000&from=A&datetime_represents=arrival"
normal_response, error_code = self.query_no_assert(q)
assert error_code == 404
assert 'isochrone works only for clockwise request' in normal_response['error']['message']
def test_isochrone_count(self):
query = "v1/coverage/basic_routing_test/stop_points/A/journeys?max_duration=25500&datetime=20120614T080000"
response = self.query(query)
assert len(response["journeys"]) == 2
is_valid_isochrone_response(response, self.tester, query)
query += "&count=1"
response = self.query(query)
assert len(response["journeys"]) == 1
is_valid_isochrone_response(response, self.tester, query)
def test_invalid_count(self):
query = "v1/coverage/basic_routing_test/stop_points/A/journeys?max_duration=25500&datetime=20120614T080000"
response = self.query(query)
assert len(response["journeys"]) == 2
# invalid count
query += "&count=toto"
response, code = self.query_no_assert(query)
assert code == 400
assert "invalid literal for int() with base 10: 'toto'" in response['message']
|
talha131/pelican-plugins
|
refs/heads/master
|
ctags_generator/__init__.py
|
9
|
from .ctags_generator import *
|
fdupoux/ansible-modules-extras
|
refs/heads/fdevel
|
cloud/cloudstack/cs_instancegroup.py
|
17
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, René Moser <mail@renemoser.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: cs_instancegroup
short_description: Manages instance groups on Apache CloudStack based clouds.
description:
- Create and remove instance groups.
version_added: '2.0'
author: "René Moser (@resmo)"
options:
name:
description:
- Name of the instance group.
required: true
domain:
description:
- Domain the instance group is related to.
required: false
default: null
account:
description:
- Account the instance group is related to.
required: false
default: null
project:
description:
- Project the instance group is related to.
required: false
default: null
state:
description:
- State of the instance group.
required: false
default: 'present'
choices: [ 'present', 'absent' ]
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# Create an instance group
- local_action:
module: cs_instancegroup
name: loadbalancers
# Remove an instance group
- local_action:
module: cs_instancegroup
name: loadbalancers
state: absent
'''
RETURN = '''
---
id:
description: UUID of the instance group.
returned: success
type: string
sample: 04589590-ac63-4ffc-93f5-b698b8ac38b6
name:
description: Name of the instance group.
returned: success
type: string
sample: webservers
created:
description: Date when the instance group was created.
returned: success
type: string
sample: 2015-05-03T15:05:51+0200
domain:
description: Domain the instance group is related to.
returned: success
type: string
sample: example domain
account:
description: Account the instance group is related to.
returned: success
type: string
sample: example account
project:
description: Project the instance group is related to.
returned: success
type: string
sample: example project
'''
try:
from cs import CloudStack, CloudStackException, read_config
has_lib_cs = True
except ImportError:
has_lib_cs = False
# import cloudstack common
from ansible.module_utils.cloudstack import *
class AnsibleCloudStackInstanceGroup(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackInstanceGroup, self).__init__(module)
self.instance_group = None
def get_instance_group(self):
if self.instance_group:
return self.instance_group
name = self.module.params.get('name')
args = {}
args['account'] = self.get_account('name')
args['domainid'] = self.get_domain('id')
args['projectid'] = self.get_project('id')
instance_groups = self.cs.listInstanceGroups(**args)
if instance_groups:
for g in instance_groups['instancegroup']:
if name in [ g['name'], g['id'] ]:
self.instance_group = g
break
return self.instance_group
def present_instance_group(self):
instance_group = self.get_instance_group()
if not instance_group:
self.result['changed'] = True
args = {}
args['name'] = self.module.params.get('name')
args['account'] = self.get_account('name')
args['domainid'] = self.get_domain('id')
args['projectid'] = self.get_project('id')
if not self.module.check_mode:
res = self.cs.createInstanceGroup(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
instance_group = res['instancegroup']
return instance_group
def absent_instance_group(self):
instance_group = self.get_instance_group()
if instance_group:
self.result['changed'] = True
if not self.module.check_mode:
res = self.cs.deleteInstanceGroup(id=instance_group['id'])
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
return instance_group
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name = dict(required=True),
state = dict(default='present', choices=['present', 'absent']),
domain = dict(default=None),
account = dict(default=None),
project = dict(default=None),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
if not has_lib_cs:
module.fail_json(msg="python library cs required: pip install cs")
try:
acs_ig = AnsibleCloudStackInstanceGroup(module)
state = module.params.get('state')
if state in ['absent']:
instance_group = acs_ig.absent_instance_group()
else:
instance_group = acs_ig.present_instance_group()
result = acs_ig.get_result(instance_group)
except CloudStackException, e:
module.fail_json(msg='CloudStackException: %s' % str(e))
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
percy-g2/Novathor_xperia_u8500
|
refs/heads/master
|
6.2.A.1.100/external/webkit/Tools/Scripts/webkitpy/common/system/stack_utils.py
|
215
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Simple routines for logging, obtaining thread stack information."""
import sys
import traceback
def log_thread_state(logger, name, thread_id, msg=''):
"""Log information about the given thread state."""
stack = _find_thread_stack(thread_id)
assert(stack is not None)
logger("")
logger("%s (tid %d) %s" % (name, thread_id, msg))
_log_stack(logger, stack)
logger("")
def _find_thread_stack(thread_id):
"""Returns a stack object that can be used to dump a stack trace for
the given thread id (or None if the id is not found)."""
for tid, stack in sys._current_frames().items():
if tid == thread_id:
return stack
return None
def _log_stack(logger, stack):
"""Log a stack trace to the logger callback."""
for filename, lineno, name, line in traceback.extract_stack(stack):
logger('File: "%s", line %d, in %s' % (filename, lineno, name))
if line:
logger(' %s' % line.strip())
def log_traceback(logger, tb):
stack = traceback.extract_tb(tb)
for frame_str in traceback.format_list(stack):
for line in frame_str.split('\n'):
if line:
logger(" %s" % line)
|
edisonlz/fruit
|
refs/heads/master
|
web_project/api/view/doc.py
|
1
|
#coding=utf-8
import tornado.web
import simplejson
from view.api_doc import *
import tornado
class ThirdPartLoginPage(tornado.web.RequestHandler):
def get(self):
self.render('templates/docs/websocket.html',**{})
class ApiDocHandler(tornado.web.RequestHandler):
def get(self):
api_type = self.get_argument('api_type', '1')
all_apis = api_manager.get_apis(name=self.get_argument('name', None), module=self.get_argument('module', None),
handler=self.get_argument('handler', None), api_type=api_type)
apis = {}
for api in all_apis:
if not apis.has_key(api.module):
apis[api.module] = []
apis[api.module].append(api)
App = type('App', (object,), {'name': "api",})
app = App()
self.render('templates/docs/api_docs.html', **{'tornado':tornado,'apis': apis, 'api_base': self.settings.get("api_base", ''),\
'test_app_key': "", 'test_app': app,
'test_user_name': self.settings.get("test_user_name", ''),"api_type":api_type})
class ApiMapHandler(tornado.web.RequestHandler):
def get(self):
all_apis = api_manager.get_apis(name=self.get_argument('name', None), module=self.get_argument('module', None),
handler=self.get_argument('handler', None))
apis = {}
for api in all_apis:
if not apis.has_key(api.module):
apis[api.module] = []
apis[api.module].append(api)
self.render('templates/docs/api_map.html', **{'apis': apis, 'api_base': self.settings.get("api_base", ''), })
class ApiLoggingDataHandler(tornado.web.RequestHandler):
def get(self):
from api_doc import logging_data
global logging_data
data = logging_data[::-1]
min_len = 15
if len(data) < min_len:
for i in xrange(min_len - len(data)):
data.append("<p class='text-success'>...</p>")
result = simplejson.dumps(data)
self.write(result)
class ApiClearCacheHandler(tornado.web.RequestHandler):
def get(self):
from util import settings
import pylibmc
mc = pylibmc.Client(settings.cache_servers0)
mc.flush_all()
mc = pylibmc.Client(settings.cache_servers1)
mc.flush_all()
self.write({"status":"ok"})
class ApiAppKeyHandler(tornado.web.RequestHandler):
def get(self):
app_keys = {}
self.write(simplejson.dumps(app_keys))
class ApiExampleHandler(tornado.web.RequestHandler):
def get(self):
id = self.get_argument('id')
parts = id.split('.')
data = {}
try:
for p in parts:
data = (type(data) is dict) and data[p] or getattr(data, p)
except Exception, e:
data = ''
if hasattr(data, 'val'):
v = data.val()
else:
v = data
if type(v) in (list, tuple, dict):
if v:
self.write(simplejson.dumps(v,indent=True))
else:
self.write('null')
else:
self.write(v)
class BenmarkUrl(tornado.web.RequestHandler):
def get(self):
api_type = self.get_argument('api_type', '1')
all_apis = api_manager.get_apis(name=self.get_argument('name', None), module=self.get_argument('module', None),
handler=self.get_argument('handler', None),
api_type=api_type)
apis = {}
for api in all_apis:
if not apis.has_key(api.module):
apis[api.module] = []
params = []
api_uri = api.uri
for p in api.params:
if p.name in (":id",":uid",":xid",":keyword"):
api_uri = api_uri.replace(p.name,str(p.example or p.default))
continue
if p.required or p.name =="_cookie":
if p.default:
params.append( "%s=%s" % (p.name,p.default))
elif str(p.example):
params.append( "%s=%s" % (p.name,p.example))
test_url = api_uri + "?" + '&'.join(params)
setattr(api,"test_url",test_url)
apis[api.module].append(api)
self.render('templates/docs/api_test_docs.html', **{'tornado':tornado,'apis': apis})
# import os
# url_dir_name = os.path.dirname(__file__)
#
# url_path = os.path.join(url_dir_name, "url.txt")
#
# url_file = open(url_path, 'r')
# url = url_file.read(1000000)
# return self.write(url)
|
kaushik94/unishark
|
refs/heads/master
|
tests/test_testprogram.py
|
1
|
import unittest
import unishark
import os
import shutil
class TestProgramTestCase(unittest.TestCase):
def setUp(self):
super(TestProgramTestCase, self).setUp()
self.dest = 'results'
if os.path.exists(self.dest):
shutil.rmtree(self.dest)
def tearDown(self):
if os.path.exists(self.dest):
shutil.rmtree(self.dest)
class DefaultTestProgramTestCase(TestProgramTestCase):
def test_sequential_run(self):
dict_conf = {
'suites': {
'my_suite_1': {
'package': 'tests.mock1',
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module1', 'test_module2']
}
}
},
'my_suite_2': {
'package': 'tests.mock2',
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module3']
}
}
}
},
'reporters': {
'html': {
'class': 'unishark.HtmlReporter',
'kwargs': {
'dest': self.dest
}
},
'xunit': {
'class': 'unishark.XUnitReporter',
'kwargs': {
'dest': self.dest
}
}
},
'test': {
'suites': ['my_suite_1', 'my_suite_2'],
'reporters': ['html', 'xunit']
}
}
exit_code = unishark.DefaultTestProgram(dict_conf).run()
self.assertEqual(exit_code, 1)
exp_filenames = ['index.html', 'overview.html', 'my_suite_1_result.html', 'my_suite_2_result.html',
'my_suite_1_xunit_result.xml', 'my_suite_2_xunit_result.xml', 'summary_xunit_result.xml']
filenames = os.listdir(os.path.join(self.dest))
self.assertSetEqual(set(filenames), set(exp_filenames))
def test_program_with_concurrency_on_suites(self):
dict_conf = {
'suites': {
'my_suite_1': {
'package': 'tests.mock1',
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module1', 'test_module2']
}
}
},
'my_suite_2': {
'package': 'tests.mock2',
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module3']
}
}
}
},
'reporters': {
'html': {
'class': 'unishark.HtmlReporter',
'kwargs': {
'dest': self.dest
}
},
'xunit': {
'class': 'unishark.XUnitReporter',
'kwargs': {
'dest': self.dest
}
}
},
'test': {
'suites': ['my_suite_1', 'my_suite_2'],
'reporters': ['html', 'xunit'],
'max_workers': 2
}
}
program = unishark.DefaultTestProgram(dict_conf)
exit_code = program.run()
self.assertEqual(exit_code, 1)
exp_filenames = ['index.html', 'overview.html', 'my_suite_1_result.html', 'my_suite_2_result.html',
'my_suite_1_xunit_result.xml', 'my_suite_2_xunit_result.xml', 'summary_xunit_result.xml']
filenames = os.listdir(os.path.join(self.dest))
self.assertSetEqual(set(filenames), set(exp_filenames))
def test_program_with_concurrency_on_classes(self):
dict_conf = {
'suites': {
'my_suite_1': {
'package': 'tests.mock1',
'max_workers': 4,
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module1', 'test_module2']
}
}
},
'my_suite_2': {
'package': 'tests.mock2',
'max_workers': 4,
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module3']
}
}
}
},
'reporters': {
'html': {
'class': 'unishark.HtmlReporter',
'kwargs': {
'dest': self.dest
}
},
'xunit': {
'class': 'unishark.XUnitReporter',
'kwargs': {
'dest': self.dest
}
}
},
'test': {
'suites': ['my_suite_1', 'my_suite_2'],
'reporters': ['html', 'xunit'],
'max_workers': 0
}
}
program = unishark.DefaultTestProgram(dict_conf)
exit_code = program.run()
self.assertEqual(exit_code, 1)
exp_filenames = ['index.html', 'overview.html', 'my_suite_1_result.html', 'my_suite_2_result.html',
'my_suite_1_xunit_result.xml', 'my_suite_2_xunit_result.xml', 'summary_xunit_result.xml']
filenames = os.listdir(os.path.join(self.dest))
self.assertSetEqual(set(filenames), set(exp_filenames))
def test_program_with_concurrency_on_both_suites_and_classes(self):
dict_conf = {
'suites': {
'my_suite_1': {
'package': 'tests.mock1',
'max_workers': 4,
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module1', 'test_module2']
}
}
},
'my_suite_2': {
'package': 'tests.mock2',
'max_workers': 4,
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module3']
}
}
}
},
'reporters': {
'html': {
'class': 'unishark.HtmlReporter',
'kwargs': {
'dest': self.dest
}
},
'xunit': {
'class': 'unishark.XUnitReporter',
'kwargs': {
'dest': self.dest
}
}
},
'test': {
'suites': ['my_suite_1', 'my_suite_2'],
'reporters': ['html', 'xunit'],
'max_workers': 2
}
}
program = unishark.DefaultTestProgram(dict_conf)
exit_code = program.run()
self.assertEqual(exit_code, 1)
exp_filenames = ['index.html', 'overview.html', 'my_suite_1_result.html', 'my_suite_2_result.html',
'my_suite_1_xunit_result.xml', 'my_suite_2_xunit_result.xml', 'summary_xunit_result.xml']
filenames = os.listdir(os.path.join(self.dest))
self.assertSetEqual(set(filenames), set(exp_filenames))
def test_program_with_no_suites(self):
dict_conf = {
'suites': {
'my_suite_1': {
'package': 'tests.mock1',
'max_workers': 4,
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module1', 'test_module2']
}
}
},
'my_suite_2': {
'package': 'tests.mock2',
'max_workers': 4,
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module3']
}
}
}
},
'reporters': {
'html': {
'class': 'unishark.HtmlReporter',
'kwargs': {
'dest': self.dest
}
},
'xunit': {
'class': 'unishark.XUnitReporter',
'kwargs': {
'dest': self.dest
}
}
},
'test': {
'suites': [],
'reporters': ['html', 'xunit'],
'max_workers': 2
}
}
program = unishark.DefaultTestProgram(dict_conf)
exit_code = program.run()
self.assertEqual(exit_code, 0)
exp_filenames = ['index.html', 'overview.html', 'summary_xunit_result.xml']
filenames = os.listdir(os.path.join(self.dest))
self.assertSetEqual(set(filenames), set(exp_filenames))
def test_program_with_no_reporters_1(self):
dict_conf = {
'suites': {
'my_suite_1': {
'package': 'tests.mock1',
'max_workers': 4,
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module1', 'test_module2']
}
}
},
'my_suite_2': {
'package': 'tests.mock2',
'max_workers': 4,
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module3']
}
}
}
},
'reporters': {
'html': {
'class': 'unishark.HtmlReporter',
'kwargs': {
'dest': self.dest
}
},
'xunit': {
'class': 'unishark.XUnitReporter',
'kwargs': {
'dest': self.dest
}
}
},
'test': {
'suites': ['my_suite_1', 'my_suite_2'],
'reporters': [],
'max_workers': 2
}
}
program = unishark.DefaultTestProgram(dict_conf)
exit_code = program.run()
self.assertEqual(exit_code, 1)
self.assertFalse(os.path.exists(self.dest))
def test_program_with_no_reporters_2(self):
dict_conf = {
'suites': {
'my_suite_1': {
'package': 'tests.mock1',
'max_workers': 4,
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module1', 'test_module2']
}
}
},
'my_suite_2': {
'package': 'tests.mock2',
'max_workers': 4,
'groups': {
'g1': {
'granularity': 'module',
'modules': ['test_module3']
}
}
}
},
'reporters': {
'html': {
'class': 'unishark.HtmlReporter',
'kwargs': {
'dest': self.dest
}
},
'xunit': {
'class': 'unishark.XUnitReporter',
'kwargs': {
'dest': self.dest
}
}
},
'test': {
'suites': ['my_suite_1', 'my_suite_2'],
'max_workers': 2
}
}
program = unishark.DefaultTestProgram(dict_conf)
exit_code = program.run()
self.assertEqual(exit_code, 1)
self.assertFalse(os.path.exists(self.dest))
if __name__ == '__main__':
unittest.main(verbosity=2)
|
azumimuo/family-xbmc-addon
|
refs/heads/master
|
script.mrknow.urlresolver/lib/urlresolver9/plugins/vidag.py
|
4
|
'''
thevideo urlresolver plugin
Copyright (C) 2014 Eldorado
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from lib import jsunpack
from urlresolver9 import common
from urlresolver9.resolver import UrlResolver, ResolverError
class VidAgResolver(UrlResolver):
name = "vid.ag"
domains = ["vid.ag"]
pattern = '(?://|\.)(vid\.ag)/(?:embed-)?([0-9A-Za-z]+)'
def __init__(self):
self.net = common.Net()
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
html = self.net.http_GET(web_url).content
for match in re.finditer('(eval\(function.*?)</script>', html, re.DOTALL):
js_data = jsunpack.unpack(match.group(1))
r = re.search('file\s*:\s*"([^"]+)', js_data)
if r:
return r.group(1)
r = re.search('file\s*:\s*"([^"]+)', html)
if r:
return r.group(1)
raise ResolverError('File Not Found or removed')
def get_url(self, host, media_id):
return 'http://vid.ag/embed-%s.html' % media_id
|
csrocha/OpenUpgrade
|
refs/heads/8.0
|
openerp/addons/base/__openerp__.py
|
336
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Base',
'version': '1.3',
'category': 'Hidden',
'description': """
The kernel of OpenERP, needed for all installation.
===================================================
""",
'author': 'OpenERP SA',
'maintainer': 'OpenERP SA',
'website': 'http://www.openerp.com',
'depends': [],
'data': [
'base_data.xml',
'res/res_currency_data.xml',
'res/res_country_data.xml',
'security/base_security.xml',
'base_menu.xml',
'res/res_config.xml',
'res/res.country.state.csv',
'ir/ir_actions.xml',
'ir/ir_config_parameter_view.xml',
'ir/ir_cron_view.xml',
'ir/ir_filters.xml',
'ir/ir_mail_server_view.xml',
'ir/ir_model_view.xml',
'ir/ir_attachment_view.xml',
'ir/ir_rule_view.xml',
'ir/ir_sequence_view.xml',
'ir/ir_translation_view.xml',
'ir/ir_ui_menu_view.xml',
'ir/ir_ui_view_view.xml',
'ir/ir_values_view.xml',
'ir/osv_memory_autovacuum.xml',
'ir/ir_model_report.xml',
'ir/ir_logging_view.xml',
'ir/ir_qweb.xml',
'workflow/workflow_view.xml',
'module/module_view.xml',
'module/module_data.xml',
'module/module_report.xml',
'module/wizard/base_module_update_view.xml',
'module/wizard/base_language_install_view.xml',
'module/wizard/base_import_language_view.xml',
'module/wizard/base_module_upgrade_view.xml',
'module/wizard/base_module_configuration_view.xml',
'module/wizard/base_export_language_view.xml',
'module/wizard/base_update_translations_view.xml',
'module/wizard/base_module_immediate_install.xml',
'res/res_company_view.xml',
'res/res_request_view.xml',
'res/res_lang_view.xml',
'res/res_partner_report.xml',
'res/res_partner_view.xml',
'res/res_bank_view.xml',
'res/res_country_view.xml',
'res/res_currency_view.xml',
'res/res_users_view.xml',
'res/res_partner_data.xml',
'res/ir_property_view.xml',
'res/res_security.xml',
'security/ir.model.access.csv',
],
'demo': [
'base_demo.xml',
'res/res_partner_demo.xml',
'res/res_partner_demo.yml',
'res/res_partner_image_demo.xml',
],
'test': [
'tests/base_test.yml',
'tests/test_osv_expression.yml',
'tests/test_ir_rule.yml', # <-- These tests modify/add/delete ir_rules.
],
'installable': True,
'auto_install': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
wong2/sentry
|
refs/heads/master
|
src/sentry/plugins/bases/tag.py
|
23
|
"""
sentry.plugins.bases.tag
~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from sentry.constants import MAX_TAG_VALUE_LENGTH
from sentry.plugins import Plugin2
class TagPlugin(Plugin2):
tag = None
project_default_enabled = True
def get_tag_values(self, event, **kwargs):
"""
Must return a list of values.
>>> get_tag_pairs(event)
[tag1, tag2, tag3]
"""
raise NotImplementedError
def get_tags(self, event, **kwargs):
return [
(self.tag, v)
for v in self.get_tag_values(event)
if len(v) <= MAX_TAG_VALUE_LENGTH
]
|
cgwire/zou
|
refs/heads/master
|
zou/app/models/serializer.py
|
1
|
import sqlalchemy.orm as orm
from sqlalchemy.inspection import inspect
from zou.app.utils.fields import serialize_value
class SerializerMixin(object):
"""
Helpers to facilitate JSON serialization of models.
"""
def is_join(self, attr):
return isinstance(
getattr(self.__class__, attr).impl,
orm.attributes.CollectionAttributeImpl,
)
def serialize(self, obj_type=None, relations=False):
attrs = inspect(self).attrs.keys()
if relations:
obj_dict = {
attr: serialize_value(getattr(self, attr)) for attr in attrs
}
else:
obj_dict = {
attr: serialize_value(getattr(self, attr))
for attr in attrs
if not self.is_join(attr)
}
obj_dict["type"] = obj_type or type(self).__name__
return obj_dict
@staticmethod
def serialize_list(models, obj_type=None, relations=False):
return [
model.serialize(obj_type=obj_type, relations=relations)
for model in models
]
|
mgr01/antfs-cli
|
refs/heads/master
|
scripts/40-convert_to_tcx.py
|
1
|
#!/usr/bin/python
#
# Script to run the FIT-to-TCX converter on every new FIT file that is being
# downloaded by antfs-cli
#
# Adjust the fittotcx path to point to where you have put the FIT-to-TCX files.
#
# Copyright (c) 2012, Gustav Tiger <gustav@tiger.name>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from __future__ import absolute_import, print_function
import errno
import os
import subprocess
import sys
fittotcx = "/path/to/FIT-to-TCX/fittotcx.py"
def main(action, filename, fit_type):
# Only new downloads which are activities
if action != "DOWNLOAD" or fit_type != "4":
return 0
basedir = os.path.split(os.path.dirname(filename))[0]
basefile = os.path.basename(filename)
# Create directory
targetdir = os.path.join(basedir, "activities_tcx")
try:
os.mkdir(targetdir)
except:
pass
targetfile = os.path.splitext(basefile)[0] + ".tcx"
try:
# Run FIT-to-TCX
process = subprocess.Popen([fittotcx, filename], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(data, _) = process.communicate()
except OSError as e:
print("Could not run Convert to TCX -", fittotcx, \
"-", errno.errorcode[e.errno], os.strerror(e.errno))
return -1
if process.returncode != 0:
print("Convert to TCX exited with error code", process.returncode)
return -1
# Write result
f = open(os.path.join(targetdir, targetfile), 'w')
f.write(data)
f.close()
return 0
if __name__ == "__main__":
sys.exit(main(*sys.argv[1:]))
|
pselle/calibre
|
refs/heads/master
|
src/calibre/ebooks/markdown/extensions/footnotes.py
|
46
|
"""
========================= FOOTNOTES =================================
This section adds footnote handling to markdown. It can be used as
an example for extending python-markdown with relatively complex
functionality. While in this case the extension is included inside
the module itself, it could just as easily be added from outside the
module. Not that all markdown classes above are ignorant about
footnotes. All footnote functionality is provided separately and
then added to the markdown instance at the run time.
Footnote functionality is attached by calling extendMarkdown()
method of FootnoteExtension. The method also registers the
extension to allow it's state to be reset by a call to reset()
method.
Example:
Footnotes[^1] have a label[^label] and a definition[^!DEF].
[^1]: This is a footnote
[^label]: A footnote on "label"
[^!DEF]: The footnote for definition
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from . import Extension
from ..preprocessors import Preprocessor
from ..inlinepatterns import Pattern
from ..treeprocessors import Treeprocessor
from ..postprocessors import Postprocessor
from ..util import etree, text_type
from ..odict import OrderedDict
import re
FN_BACKLINK_TEXT = "zz1337820767766393qq"
NBSP_PLACEHOLDER = "qq3936677670287331zz"
DEF_RE = re.compile(r'[ ]{0,3}\[\^([^\]]*)\]:\s*(.*)')
TABBED_RE = re.compile(r'((\t)|( ))(.*)')
class FootnoteExtension(Extension):
""" Footnote Extension. """
def __init__ (self, configs):
""" Setup configs. """
self.config = {'PLACE_MARKER':
["///Footnotes Go Here///",
"The text string that marks where the footnotes go"],
'UNIQUE_IDS':
[False,
"Avoid name collisions across "
"multiple calls to reset()."],
"BACKLINK_TEXT":
["↩",
"The text string that links from the footnote to the reader's place."]
}
for key, value in configs:
self.config[key][0] = value
# In multiple invocations, emit links that don't get tangled.
self.unique_prefix = 0
self.reset()
def extendMarkdown(self, md, md_globals):
""" Add pieces to Markdown. """
md.registerExtension(self)
self.parser = md.parser
self.md = md
self.sep = ':'
if self.md.output_format in ['html5', 'xhtml5']:
self.sep = '-'
# Insert a preprocessor before ReferencePreprocessor
md.preprocessors.add("footnote", FootnotePreprocessor(self),
"<reference")
# Insert an inline pattern before ImageReferencePattern
FOOTNOTE_RE = r'\[\^([^\]]*)\]' # blah blah [^1] blah
md.inlinePatterns.add("footnote", FootnotePattern(FOOTNOTE_RE, self),
"<reference")
# Insert a tree-processor that would actually add the footnote div
# This must be before all other treeprocessors (i.e., inline and
# codehilite) so they can run on the the contents of the div.
md.treeprocessors.add("footnote", FootnoteTreeprocessor(self),
"_begin")
# Insert a postprocessor after amp_substitute oricessor
md.postprocessors.add("footnote", FootnotePostprocessor(self),
">amp_substitute")
def reset(self):
""" Clear the footnotes on reset, and prepare for a distinct document. """
self.footnotes = OrderedDict()
self.unique_prefix += 1
def findFootnotesPlaceholder(self, root):
""" Return ElementTree Element that contains Footnote placeholder. """
def finder(element):
for child in element:
if child.text:
if child.text.find(self.getConfig("PLACE_MARKER")) > -1:
return child, element, True
if child.tail:
if child.tail.find(self.getConfig("PLACE_MARKER")) > -1:
return child, element, False
finder(child)
return None
res = finder(root)
return res
def setFootnote(self, id, text):
""" Store a footnote for later retrieval. """
self.footnotes[id] = text
def makeFootnoteId(self, id):
""" Return footnote link id. """
if self.getConfig("UNIQUE_IDS"):
return 'fn%s%d-%s' % (self.sep, self.unique_prefix, id)
else:
return 'fn%s%s' % (self.sep, id)
def makeFootnoteRefId(self, id):
""" Return footnote back-link id. """
if self.getConfig("UNIQUE_IDS"):
return 'fnref%s%d-%s' % (self.sep, self.unique_prefix, id)
else:
return 'fnref%s%s' % (self.sep, id)
def makeFootnotesDiv(self, root):
""" Return div of footnotes as et Element. """
if not list(self.footnotes.keys()):
return None
div = etree.Element("div")
div.set('class', 'footnote')
etree.SubElement(div, "hr")
ol = etree.SubElement(div, "ol")
for id in self.footnotes.keys():
li = etree.SubElement(ol, "li")
li.set("id", self.makeFootnoteId(id))
self.parser.parseChunk(li, self.footnotes[id])
backlink = etree.Element("a")
backlink.set("href", "#" + self.makeFootnoteRefId(id))
if self.md.output_format not in ['html5', 'xhtml5']:
backlink.set("rev", "footnote") # Invalid in HTML5
backlink.set("class", "footnote-backref")
backlink.set("title", "Jump back to footnote %d in the text" % \
(self.footnotes.index(id)+1))
backlink.text = FN_BACKLINK_TEXT
if li.getchildren():
node = li[-1]
if node.tag == "p":
node.text = node.text + NBSP_PLACEHOLDER
node.append(backlink)
else:
p = etree.SubElement(li, "p")
p.append(backlink)
return div
class FootnotePreprocessor(Preprocessor):
""" Find all footnote references and store for later use. """
def __init__ (self, footnotes):
self.footnotes = footnotes
def run(self, lines):
"""
Loop through lines and find, set, and remove footnote definitions.
Keywords:
* lines: A list of lines of text
Return: A list of lines of text with footnote definitions removed.
"""
newlines = []
i = 0
while True:
m = DEF_RE.match(lines[i])
if m:
fn, _i = self.detectTabbed(lines[i+1:])
fn.insert(0, m.group(2))
i += _i-1 # skip past footnote
self.footnotes.setFootnote(m.group(1), "\n".join(fn))
else:
newlines.append(lines[i])
if len(lines) > i+1:
i += 1
else:
break
return newlines
def detectTabbed(self, lines):
""" Find indented text and remove indent before further proccesing.
Keyword arguments:
* lines: an array of strings
Returns: a list of post processed items and the index of last line.
"""
items = []
blank_line = False # have we encountered a blank line yet?
i = 0 # to keep track of where we are
def detab(line):
match = TABBED_RE.match(line)
if match:
return match.group(4)
for line in lines:
if line.strip(): # Non-blank line
detabbed_line = detab(line)
if detabbed_line:
items.append(detabbed_line)
i += 1
continue
elif not blank_line and not DEF_RE.match(line):
# not tabbed but still part of first par.
items.append(line)
i += 1
continue
else:
return items, i+1
else: # Blank line: _maybe_ we are done.
blank_line = True
i += 1 # advance
# Find the next non-blank line
for j in range(i, len(lines)):
if lines[j].strip():
next_line = lines[j]; break
else:
break # There is no more text; we are done.
# Check if the next non-blank line is tabbed
if detab(next_line): # Yes, more work to do.
items.append("")
continue
else:
break # No, we are done.
else:
i += 1
return items, i
class FootnotePattern(Pattern):
""" InlinePattern for footnote markers in a document's body text. """
def __init__(self, pattern, footnotes):
super(FootnotePattern, self).__init__(pattern)
self.footnotes = footnotes
def handleMatch(self, m):
id = m.group(2)
if id in self.footnotes.footnotes.keys():
sup = etree.Element("sup")
a = etree.SubElement(sup, "a")
sup.set('id', self.footnotes.makeFootnoteRefId(id))
a.set('href', '#' + self.footnotes.makeFootnoteId(id))
if self.footnotes.md.output_format not in ['html5', 'xhtml5']:
a.set('rel', 'footnote') # invalid in HTML5
a.set('class', 'footnote-ref')
a.text = text_type(self.footnotes.footnotes.index(id) + 1)
return sup
else:
return None
class FootnoteTreeprocessor(Treeprocessor):
""" Build and append footnote div to end of document. """
def __init__ (self, footnotes):
self.footnotes = footnotes
def run(self, root):
footnotesDiv = self.footnotes.makeFootnotesDiv(root)
if footnotesDiv:
result = self.footnotes.findFootnotesPlaceholder(root)
if result:
child, parent, isText = result
ind = parent.getchildren().index(child)
if isText:
parent.remove(child)
parent.insert(ind, footnotesDiv)
else:
parent.insert(ind + 1, footnotesDiv)
child.tail = None
else:
root.append(footnotesDiv)
class FootnotePostprocessor(Postprocessor):
""" Replace placeholders with html entities. """
def __init__(self, footnotes):
self.footnotes = footnotes
def run(self, text):
text = text.replace(FN_BACKLINK_TEXT, self.footnotes.getConfig("BACKLINK_TEXT"))
return text.replace(NBSP_PLACEHOLDER, " ")
def makeExtension(configs=[]):
""" Return an instance of the FootnoteExtension """
return FootnoteExtension(configs=configs)
|
dulems/hue
|
refs/heads/master
|
desktop/core/ext-py/avro-1.7.6/src/avro/ipc.py
|
42
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Support for inter-process calls.
"""
import httplib
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from avro import io
from avro import protocol
from avro import schema
#
# Constants
#
# Handshake schema is pulled in during build
HANDSHAKE_REQUEST_SCHEMA = schema.parse("""
{
"type": "record",
"name": "HandshakeRequest", "namespace":"org.apache.avro.ipc",
"fields": [
{"name": "clientHash",
"type": {"type": "fixed", "name": "MD5", "size": 16}},
{"name": "clientProtocol", "type": ["null", "string"]},
{"name": "serverHash", "type": "MD5"},
{"name": "meta", "type": ["null", {"type": "map", "values": "bytes"}]}
]
}
""")
HANDSHAKE_RESPONSE_SCHEMA = schema.parse("""
{
"type": "record",
"name": "HandshakeResponse", "namespace": "org.apache.avro.ipc",
"fields": [
{"name": "match",
"type": {"type": "enum", "name": "HandshakeMatch",
"symbols": ["BOTH", "CLIENT", "NONE"]}},
{"name": "serverProtocol",
"type": ["null", "string"]},
{"name": "serverHash",
"type": ["null", {"type": "fixed", "name": "MD5", "size": 16}]},
{"name": "meta",
"type": ["null", {"type": "map", "values": "bytes"}]}
]
}
""")
HANDSHAKE_REQUESTOR_WRITER = io.DatumWriter(HANDSHAKE_REQUEST_SCHEMA)
HANDSHAKE_REQUESTOR_READER = io.DatumReader(HANDSHAKE_RESPONSE_SCHEMA)
HANDSHAKE_RESPONDER_WRITER = io.DatumWriter(HANDSHAKE_RESPONSE_SCHEMA)
HANDSHAKE_RESPONDER_READER = io.DatumReader(HANDSHAKE_REQUEST_SCHEMA)
META_SCHEMA = schema.parse('{"type": "map", "values": "bytes"}')
META_WRITER = io.DatumWriter(META_SCHEMA)
META_READER = io.DatumReader(META_SCHEMA)
SYSTEM_ERROR_SCHEMA = schema.parse('["string"]')
# protocol cache
REMOTE_HASHES = {}
REMOTE_PROTOCOLS = {}
BIG_ENDIAN_INT_STRUCT = io.struct_class('!I')
BUFFER_HEADER_LENGTH = 4
BUFFER_SIZE = 8192
#
# Exceptions
#
class AvroRemoteException(schema.AvroException):
"""
Raised when an error message is sent by an Avro requestor or responder.
"""
def __init__(self, fail_msg=None):
schema.AvroException.__init__(self, fail_msg)
class ConnectionClosedException(schema.AvroException):
pass
#
# Base IPC Classes (Requestor/Responder)
#
class BaseRequestor(object):
"""Base class for the client side of a protocol interaction."""
def __init__(self, local_protocol, transceiver):
self._local_protocol = local_protocol
self._transceiver = transceiver
self._remote_protocol = None
self._remote_hash = None
self._send_protocol = None
# read-only properties
local_protocol = property(lambda self: self._local_protocol)
transceiver = property(lambda self: self._transceiver)
# read/write properties
def set_remote_protocol(self, new_remote_protocol):
self._remote_protocol = new_remote_protocol
REMOTE_PROTOCOLS[self.transceiver.remote_name] = self.remote_protocol
remote_protocol = property(lambda self: self._remote_protocol,
set_remote_protocol)
def set_remote_hash(self, new_remote_hash):
self._remote_hash = new_remote_hash
REMOTE_HASHES[self.transceiver.remote_name] = self.remote_hash
remote_hash = property(lambda self: self._remote_hash, set_remote_hash)
def set_send_protocol(self, new_send_protocol):
self._send_protocol = new_send_protocol
send_protocol = property(lambda self: self._send_protocol, set_send_protocol)
def request(self, message_name, request_datum):
"""
Writes a request message and reads a response or error message.
"""
# build handshake and call request
buffer_writer = StringIO()
buffer_encoder = io.BinaryEncoder(buffer_writer)
self.write_handshake_request(buffer_encoder)
self.write_call_request(message_name, request_datum, buffer_encoder)
# send the handshake and call request; block until call response
call_request = buffer_writer.getvalue()
return self.issue_request(call_request, message_name, request_datum)
def write_handshake_request(self, encoder):
local_hash = self.local_protocol.md5
remote_name = self.transceiver.remote_name
remote_hash = REMOTE_HASHES.get(remote_name)
if remote_hash is None:
remote_hash = local_hash
self.remote_protocol = self.local_protocol
request_datum = {}
request_datum['clientHash'] = local_hash
request_datum['serverHash'] = remote_hash
if self.send_protocol:
request_datum['clientProtocol'] = str(self.local_protocol)
HANDSHAKE_REQUESTOR_WRITER.write(request_datum, encoder)
def write_call_request(self, message_name, request_datum, encoder):
"""
The format of a call request is:
* request metadata, a map with values of type bytes
* the message name, an Avro string, followed by
* the message parameters. Parameters are serialized according to
the message's request declaration.
"""
# request metadata (not yet implemented)
request_metadata = {}
META_WRITER.write(request_metadata, encoder)
# message name
message = self.local_protocol.messages.get(message_name)
if message is None:
raise schema.AvroException('Unknown message: %s' % message_name)
encoder.write_utf8(message.name)
# message parameters
self.write_request(message.request, request_datum, encoder)
def write_request(self, request_schema, request_datum, encoder):
datum_writer = io.DatumWriter(request_schema)
datum_writer.write(request_datum, encoder)
def read_handshake_response(self, decoder):
handshake_response = HANDSHAKE_REQUESTOR_READER.read(decoder)
match = handshake_response.get('match')
if match == 'BOTH':
self.send_protocol = False
return True
elif match == 'CLIENT':
if self.send_protocol:
raise schema.AvroException('Handshake failure.')
self.remote_protocol = protocol.parse(
handshake_response.get('serverProtocol'))
self.remote_hash = handshake_response.get('serverHash')
self.send_protocol = False
return True
elif match == 'NONE':
if self.send_protocol:
raise schema.AvroException('Handshake failure.')
self.remote_protocol = protocol.parse(
handshake_response.get('serverProtocol'))
self.remote_hash = handshake_response.get('serverHash')
self.send_protocol = True
return False
else:
raise schema.AvroException('Unexpected match: %s' % match)
def read_call_response(self, message_name, decoder):
"""
The format of a call response is:
* response metadata, a map with values of type bytes
* a one-byte error flag boolean, followed by either:
o if the error flag is false,
the message response, serialized per the message's response schema.
o if the error flag is true,
the error, serialized per the message's error union schema.
"""
# response metadata
response_metadata = META_READER.read(decoder)
# remote response schema
remote_message_schema = self.remote_protocol.messages.get(message_name)
if remote_message_schema is None:
raise schema.AvroException('Unknown remote message: %s' % message_name)
# local response schema
local_message_schema = self.local_protocol.messages.get(message_name)
if local_message_schema is None:
raise schema.AvroException('Unknown local message: %s' % message_name)
# error flag
if not decoder.read_boolean():
writers_schema = remote_message_schema.response
readers_schema = local_message_schema.response
return self.read_response(writers_schema, readers_schema, decoder)
else:
writers_schema = remote_message_schema.errors
readers_schema = local_message_schema.errors
raise self.read_error(writers_schema, readers_schema, decoder)
def read_response(self, writers_schema, readers_schema, decoder):
datum_reader = io.DatumReader(writers_schema, readers_schema)
result = datum_reader.read(decoder)
return result
def read_error(self, writers_schema, readers_schema, decoder):
datum_reader = io.DatumReader(writers_schema, readers_schema)
return AvroRemoteException(datum_reader.read(decoder))
class Requestor(BaseRequestor):
def issue_request(self, call_request, message_name, request_datum):
call_response = self.transceiver.transceive(call_request)
# process the handshake and call response
buffer_decoder = io.BinaryDecoder(StringIO(call_response))
call_response_exists = self.read_handshake_response(buffer_decoder)
if call_response_exists:
return self.read_call_response(message_name, buffer_decoder)
else:
return self.request(message_name, request_datum)
class Responder(object):
"""Base class for the server side of a protocol interaction."""
def __init__(self, local_protocol):
self._local_protocol = local_protocol
self._local_hash = self.local_protocol.md5
self._protocol_cache = {}
self.set_protocol_cache(self.local_hash, self.local_protocol)
# read-only properties
local_protocol = property(lambda self: self._local_protocol)
local_hash = property(lambda self: self._local_hash)
protocol_cache = property(lambda self: self._protocol_cache)
# utility functions to manipulate protocol cache
def get_protocol_cache(self, hash):
return self.protocol_cache.get(hash)
def set_protocol_cache(self, hash, protocol):
self.protocol_cache[hash] = protocol
def respond(self, call_request):
"""
Called by a server to deserialize a request, compute and serialize
a response or error. Compare to 'handle()' in Thrift.
"""
buffer_reader = StringIO(call_request)
buffer_decoder = io.BinaryDecoder(buffer_reader)
buffer_writer = StringIO()
buffer_encoder = io.BinaryEncoder(buffer_writer)
error = None
response_metadata = {}
try:
remote_protocol = self.process_handshake(buffer_decoder, buffer_encoder)
# handshake failure
if remote_protocol is None:
return buffer_writer.getvalue()
# read request using remote protocol
request_metadata = META_READER.read(buffer_decoder)
remote_message_name = buffer_decoder.read_utf8()
# get remote and local request schemas so we can do
# schema resolution (one fine day)
remote_message = remote_protocol.messages.get(remote_message_name)
if remote_message is None:
fail_msg = 'Unknown remote message: %s' % remote_message_name
raise schema.AvroException(fail_msg)
local_message = self.local_protocol.messages.get(remote_message_name)
if local_message is None:
fail_msg = 'Unknown local message: %s' % remote_message_name
raise schema.AvroException(fail_msg)
writers_schema = remote_message.request
readers_schema = local_message.request
request = self.read_request(writers_schema, readers_schema,
buffer_decoder)
# perform server logic
try:
response = self.invoke(local_message, request)
except AvroRemoteException, e:
error = e
except Exception, e:
error = AvroRemoteException(str(e))
# write response using local protocol
META_WRITER.write(response_metadata, buffer_encoder)
buffer_encoder.write_boolean(error is not None)
if error is None:
writers_schema = local_message.response
self.write_response(writers_schema, response, buffer_encoder)
else:
writers_schema = local_message.errors
self.write_error(writers_schema, error, buffer_encoder)
except schema.AvroException, e:
error = AvroRemoteException(str(e))
buffer_encoder = io.BinaryEncoder(StringIO())
META_WRITER.write(response_metadata, buffer_encoder)
buffer_encoder.write_boolean(True)
self.write_error(SYSTEM_ERROR_SCHEMA, error, buffer_encoder)
return buffer_writer.getvalue()
def process_handshake(self, decoder, encoder):
handshake_request = HANDSHAKE_RESPONDER_READER.read(decoder)
handshake_response = {}
# determine the remote protocol
client_hash = handshake_request.get('clientHash')
client_protocol = handshake_request.get('clientProtocol')
remote_protocol = self.get_protocol_cache(client_hash)
if remote_protocol is None and client_protocol is not None:
remote_protocol = protocol.parse(client_protocol)
self.set_protocol_cache(client_hash, remote_protocol)
# evaluate remote's guess of the local protocol
server_hash = handshake_request.get('serverHash')
if self.local_hash == server_hash:
if remote_protocol is None:
handshake_response['match'] = 'NONE'
else:
handshake_response['match'] = 'BOTH'
else:
if remote_protocol is None:
handshake_response['match'] = 'NONE'
else:
handshake_response['match'] = 'CLIENT'
if handshake_response['match'] != 'BOTH':
handshake_response['serverProtocol'] = str(self.local_protocol)
handshake_response['serverHash'] = self.local_hash
HANDSHAKE_RESPONDER_WRITER.write(handshake_response, encoder)
return remote_protocol
def invoke(self, local_message, request):
"""
Aactual work done by server: cf. handler in thrift.
"""
pass
def read_request(self, writers_schema, readers_schema, decoder):
datum_reader = io.DatumReader(writers_schema, readers_schema)
return datum_reader.read(decoder)
def write_response(self, writers_schema, response_datum, encoder):
datum_writer = io.DatumWriter(writers_schema)
datum_writer.write(response_datum, encoder)
def write_error(self, writers_schema, error_exception, encoder):
datum_writer = io.DatumWriter(writers_schema)
datum_writer.write(str(error_exception), encoder)
#
# Utility classes
#
class FramedReader(object):
"""Wrapper around a file-like object to read framed data."""
def __init__(self, reader):
self._reader = reader
# read-only properties
reader = property(lambda self: self._reader)
def read_framed_message(self):
message = []
while True:
buffer = StringIO()
buffer_length = self._read_buffer_length()
if buffer_length == 0:
return ''.join(message)
while buffer.tell() < buffer_length:
chunk = self.reader.read(buffer_length - buffer.tell())
if chunk == '':
raise ConnectionClosedException("Reader read 0 bytes.")
buffer.write(chunk)
message.append(buffer.getvalue())
def _read_buffer_length(self):
read = self.reader.read(BUFFER_HEADER_LENGTH)
if read == '':
raise ConnectionClosedException("Reader read 0 bytes.")
return BIG_ENDIAN_INT_STRUCT.unpack(read)[0]
class FramedWriter(object):
"""Wrapper around a file-like object to write framed data."""
def __init__(self, writer):
self._writer = writer
# read-only properties
writer = property(lambda self: self._writer)
def write_framed_message(self, message):
message_length = len(message)
total_bytes_sent = 0
while message_length - total_bytes_sent > 0:
if message_length - total_bytes_sent > BUFFER_SIZE:
buffer_length = BUFFER_SIZE
else:
buffer_length = message_length - total_bytes_sent
self.write_buffer(message[total_bytes_sent:
(total_bytes_sent + buffer_length)])
total_bytes_sent += buffer_length
# A message is always terminated by a zero-length buffer.
self.write_buffer_length(0)
def write_buffer(self, chunk):
buffer_length = len(chunk)
self.write_buffer_length(buffer_length)
self.writer.write(chunk)
def write_buffer_length(self, n):
self.writer.write(BIG_ENDIAN_INT_STRUCT.pack(n))
#
# Transceiver Implementations
#
class HTTPTransceiver(object):
"""
A simple HTTP-based transceiver implementation.
Useful for clients but not for servers
"""
def __init__(self, host, port, req_resource='/'):
self.req_resource = req_resource
self.conn = httplib.HTTPConnection(host, port)
self.conn.connect()
# read-only properties
sock = property(lambda self: self.conn.sock)
remote_name = property(lambda self: self.sock.getsockname())
# read/write properties
def set_conn(self, new_conn):
self._conn = new_conn
conn = property(lambda self: self._conn, set_conn)
req_resource = '/'
def transceive(self, request):
self.write_framed_message(request)
result = self.read_framed_message()
return result
def read_framed_message(self):
response = self.conn.getresponse()
response_reader = FramedReader(response)
framed_message = response_reader.read_framed_message()
response.read() # ensure we're ready for subsequent requests
return framed_message
def write_framed_message(self, message):
req_method = 'POST'
req_headers = {'Content-Type': 'avro/binary'}
req_body_buffer = FramedWriter(StringIO())
req_body_buffer.write_framed_message(message)
req_body = req_body_buffer.writer.getvalue()
self.conn.request(req_method, self.req_resource, req_body, req_headers)
def close(self):
self.conn.close()
#
# Server Implementations (none yet)
#
|
jlspyaozhongkai/Uter
|
refs/heads/master
|
third_party_build/Python-2.7.9/lib/python2.7/test/test_unary.py
|
137
|
"""Test compiler changes for unary ops (+, -, ~) introduced in Python 2.2"""
import unittest
from test.test_support import run_unittest, have_unicode
class UnaryOpTestCase(unittest.TestCase):
def test_negative(self):
self.assertTrue(-2 == 0 - 2)
self.assertTrue(-0 == 0)
self.assertTrue(--2 == 2)
self.assertTrue(-2L == 0 - 2L)
self.assertTrue(-2.0 == 0 - 2.0)
self.assertTrue(-2j == 0 - 2j)
def test_positive(self):
self.assertTrue(+2 == 2)
self.assertTrue(+0 == 0)
self.assertTrue(++2 == 2)
self.assertTrue(+2L == 2L)
self.assertTrue(+2.0 == 2.0)
self.assertTrue(+2j == 2j)
def test_invert(self):
self.assertTrue(-2 == 0 - 2)
self.assertTrue(-0 == 0)
self.assertTrue(--2 == 2)
self.assertTrue(-2L == 0 - 2L)
def test_no_overflow(self):
nines = "9" * 32
self.assertTrue(eval("+" + nines) == eval("+" + nines + "L"))
self.assertTrue(eval("-" + nines) == eval("-" + nines + "L"))
self.assertTrue(eval("~" + nines) == eval("~" + nines + "L"))
def test_negation_of_exponentiation(self):
# Make sure '**' does the right thing; these form a
# regression test for SourceForge bug #456756.
self.assertEqual(-2 ** 3, -8)
self.assertEqual((-2) ** 3, -8)
self.assertEqual(-2 ** 4, -16)
self.assertEqual((-2) ** 4, 16)
def test_bad_types(self):
for op in '+', '-', '~':
self.assertRaises(TypeError, eval, op + "'a'")
if have_unicode:
self.assertRaises(TypeError, eval, op + "u'a'")
self.assertRaises(TypeError, eval, "~2j")
self.assertRaises(TypeError, eval, "~2.0")
def test_main():
run_unittest(UnaryOpTestCase)
if __name__ == "__main__":
test_main()
|
vishdha/erpnext
|
refs/heads/develop
|
erpnext/stock/doctype/item/item.py
|
5
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import erpnext
import json
import itertools
from frappe import msgprint, _
from frappe.utils import (cstr, flt, cint, getdate, now_datetime, formatdate,
strip, get_timestamp, random_string)
from frappe.website.website_generator import WebsiteGenerator
from erpnext.setup.doctype.item_group.item_group import invalidate_cache_for, get_parent_item_groups
from frappe.website.render import clear_cache
from frappe.website.doctype.website_slideshow.website_slideshow import get_slideshow
from erpnext.controllers.item_variant import (get_variant, copy_attributes_to_variant,
make_variant_item_code, validate_item_variant_attributes, ItemVariantExistsError)
class DuplicateReorderRows(frappe.ValidationError): pass
class Item(WebsiteGenerator):
website = frappe._dict(
page_title_field = "item_name",
condition_field = "show_in_website",
template = "templates/generators/item.html",
no_cache = 1
)
def onload(self):
super(Item, self).onload()
self.set_onload('sle_exists', self.check_if_sle_exists())
if self.is_fixed_asset:
asset = frappe.db.get_all("Asset", filters={"item_code": self.name, "docstatus": 1}, limit=1)
self.set_onload("asset_exists", True if asset else False)
def autoname(self):
if frappe.db.get_default("item_naming_by")=="Naming Series":
if self.variant_of:
if not self.item_code:
template_item_name = frappe.db.get_value("Item", self.variant_of, "item_name")
self.item_code = make_variant_item_code(self.variant_of, template_item_name, self)
else:
from frappe.model.naming import make_autoname
self.item_code = make_autoname(self.naming_series+'.#####')
elif not self.item_code:
msgprint(_("Item Code is mandatory because Item is not automatically numbered"), raise_exception=1)
self.item_code = strip(self.item_code)
self.name = self.item_code
def before_insert(self):
if not self.description:
self.description = self.item_name
self.publish_in_hub = 1
def after_insert(self):
'''set opening stock and item price'''
if self.standard_rate:
self.add_price()
if self.opening_stock:
self.set_opening_stock()
def validate(self):
super(Item, self).validate()
if not self.item_name:
self.item_name = self.item_code
if not self.description:
self.description = self.item_name
self.validate_uom()
self.add_default_uom_in_conversion_factor_table()
self.validate_conversion_factor()
self.validate_item_type()
self.check_for_active_boms()
self.fill_customer_code()
self.check_item_tax()
self.validate_barcode()
self.cant_change()
self.validate_warehouse_for_reorder()
self.update_item_desc()
self.synced_with_hub = 0
self.validate_has_variants()
self.validate_attributes()
self.validate_variant_attributes()
self.validate_website_image()
self.make_thumbnail()
self.validate_fixed_asset()
if not self.get("__islocal"):
self.old_item_group = frappe.db.get_value(self.doctype, self.name, "item_group")
self.old_website_item_groups = frappe.db.sql_list("""select item_group
from `tabWebsite Item Group`
where parentfield='website_item_groups' and parenttype='Item' and parent=%s""", self.name)
def on_update(self):
invalidate_cache_for_item(self)
self.validate_name_with_item_group()
self.update_item_price()
self.update_template_item()
def add_price(self, price_list=None):
'''Add a new price'''
if not price_list:
price_list = (frappe.db.get_single_value('Selling Settings', 'selling_price_list')
or frappe.db.get_value('Price List', _('Standard Selling')))
if price_list:
item_price = frappe.get_doc({
"doctype": "Item Price",
"price_list": price_list,
"item_code": self.name,
"currency": erpnext.get_default_currency(),
"price_list_rate": self.standard_rate
})
item_price.insert()
def set_opening_stock(self):
'''set opening stock'''
if not self.is_stock_item or self.has_serial_no or self.has_batch_no:
return
if not self.valuation_rate and self.standard_rate:
self.valuation_rate = self.standard_rate
if not self.valuation_rate:
frappe.throw(_("Valuation Rate is mandatory if Opening Stock entered"))
from erpnext.stock.doctype.stock_entry.stock_entry_utils import make_stock_entry
# default warehouse, or Stores
default_warehouse = (frappe.db.get_single_value('Stock Settings', 'default_warehouse')
or frappe.db.get_value('Warehouse', {'warehouse_name': _('Stores')}))
if default_warehouse:
stock_entry = make_stock_entry(item_code=self.name, target=default_warehouse,
qty=self.opening_stock, rate=self.valuation_rate)
stock_entry.add_comment("Comment", _("Opening Stock"))
def make_route(self):
if not self.route:
return cstr(frappe.db.get_value('Item Group', self.item_group,
'route')) + '/' + self.scrub(self.item_name + '-' + random_string(5))
def validate_website_image(self):
"""Validate if the website image is a public file"""
auto_set_website_image = False
if not self.website_image and self.image:
auto_set_website_image = True
self.website_image = self.image
if not self.website_image:
return
# find if website image url exists as public
file_doc = frappe.get_all("File", filters={
"file_url": self.website_image
}, fields=["name", "is_private"], order_by="is_private asc", limit_page_length=1)
if file_doc:
file_doc = file_doc[0]
if not file_doc:
if not auto_set_website_image:
frappe.msgprint(_("Website Image {0} attached to Item {1} cannot be found")
.format(self.website_image, self.name))
self.website_image = None
elif file_doc.is_private:
if not auto_set_website_image:
frappe.msgprint(_("Website Image should be a public file or website URL"))
self.website_image = None
def make_thumbnail(self):
"""Make a thumbnail of `website_image`"""
import requests.exceptions
if not self.is_new() and self.website_image != frappe.db.get_value(self.doctype, self.name, "website_image"):
self.thumbnail = None
if self.website_image and not self.thumbnail:
file_doc = None
try:
file_doc = frappe.get_doc("File", {
"file_url": self.website_image,
"attached_to_doctype": "Item",
"attached_to_name": self.name
})
except frappe.DoesNotExistError:
pass
# cleanup
frappe.local.message_log.pop()
except requests.exceptions.HTTPError:
frappe.msgprint(_("Warning: Invalid attachment {0}").format(self.website_image))
self.website_image = None
except requests.exceptions.SSLError:
frappe.msgprint(_("Warning: Invalid SSL certificate on attachment {0}").format(self.website_image))
self.website_image = None
# for CSV import
if self.website_image and not file_doc:
try:
file_doc = frappe.get_doc({
"doctype": "File",
"file_url": self.website_image,
"attached_to_doctype": "Item",
"attached_to_name": self.name
}).insert()
except IOError:
self.website_image = None
if file_doc:
if not file_doc.thumbnail_url:
file_doc.make_thumbnail()
self.thumbnail = file_doc.thumbnail_url
def validate_fixed_asset(self):
if self.is_fixed_asset:
if self.is_stock_item:
frappe.throw(_("Fixed Asset Item must be a non-stock item."))
if not self.asset_category:
frappe.throw(_("Asset Category is mandatory for Fixed Asset item"))
def get_context(self, context):
context.show_search=True
context.search_link = '/product_search'
context.parents = get_parent_item_groups(self.item_group)
self.set_variant_context(context)
self.set_attribute_context(context)
self.set_disabled_attributes(context)
return context
def set_variant_context(self, context):
if self.has_variants:
context.no_cache = True
# load variants
# also used in set_attribute_context
context.variants = frappe.get_all("Item",
filters={"variant_of": self.name, "show_variant_in_website": 1},
order_by="name asc")
variant = frappe.form_dict.variant
if not variant and context.variants:
# the case when the item is opened for the first time from its list
variant = context.variants[0]
if variant:
context.variant = frappe.get_doc("Item", variant)
for fieldname in ("website_image", "web_long_description", "description",
"website_specifications"):
if context.variant.get(fieldname):
value = context.variant.get(fieldname)
if isinstance(value, list):
value = [d.as_dict() for d in value]
context[fieldname] = value
if self.slideshow:
if context.variant and context.variant.slideshow:
context.update(get_slideshow(context.variant))
else:
context.update(get_slideshow(self))
def set_attribute_context(self, context):
if self.has_variants:
attribute_values_available = {}
context.attribute_values = {}
context.selected_attributes = {}
# load attributes
for v in context.variants:
v.attributes = frappe.get_all("Item Variant Attribute",
fields=["attribute", "attribute_value"], filters={"parent": v.name})
for attr in v.attributes:
values = attribute_values_available.setdefault(attr.attribute, [])
if attr.attribute_value not in values:
values.append(attr.attribute_value)
if v.name==context.variant.name:
context.selected_attributes[attr.attribute] = attr.attribute_value
# filter attributes, order based on attribute table
for attr in self.attributes:
values = context.attribute_values.setdefault(attr.attribute, [])
if cint(frappe.db.get_value("Item Attribute", attr.attribute, "numeric_values")):
for val in sorted(attribute_values_available.get(attr.attribute, []), key=flt):
values.append(val)
else:
# get list of values defined (for sequence)
for attr_value in frappe.db.get_all("Item Attribute Value",
fields=["attribute_value"], filters={"parent": attr.attribute}, order_by="idx asc"):
if attr_value.attribute_value in attribute_values_available.get(attr.attribute, []):
values.append(attr_value.attribute_value)
context.variant_info = json.dumps(context.variants)
def set_disabled_attributes(self, context):
"""Disable selection options of attribute combinations that do not result in a variant"""
if not self.attributes or not self.has_variants:
return
context.disabled_attributes = {}
attributes = [attr.attribute for attr in self.attributes]
def find_variant(combination):
for variant in context.variants:
if len(variant.attributes) < len(attributes):
continue
if "combination" not in variant:
ref_combination = []
for attr in variant.attributes:
idx = attributes.index(attr.attribute)
ref_combination.insert(idx, attr.attribute_value)
variant["combination"] = ref_combination
if not (set(combination) - set(variant["combination"])):
# check if the combination is a subset of a variant combination
# eg. [Blue, 0.5] is a possible combination if exists [Blue, Large, 0.5]
return True
for i, attr in enumerate(self.attributes):
if i==0:
continue
combination_source = []
# loop through previous attributes
for prev_attr in self.attributes[:i]:
combination_source.append([context.selected_attributes.get(prev_attr.attribute)])
combination_source.append(context.attribute_values[attr.attribute])
for combination in itertools.product(*combination_source):
if not find_variant(combination):
context.disabled_attributes.setdefault(attr.attribute, []).append(combination[-1])
def add_default_uom_in_conversion_factor_table(self):
uom_conv_list = [d.uom for d in self.get("uoms")]
if self.stock_uom not in uom_conv_list:
ch = self.append('uoms', {})
ch.uom = self.stock_uom
ch.conversion_factor = 1
to_remove = []
for d in self.get("uoms"):
if d.conversion_factor == 1 and d.uom != self.stock_uom:
to_remove.append(d)
[self.remove(d) for d in to_remove]
def update_template_tables(self):
template = frappe.get_doc("Item", self.variant_of)
# add item taxes from template
for d in template.get("taxes"):
self.append("taxes", {"tax_type": d.tax_type, "tax_rate": d.tax_rate})
# copy re-order table if empty
if not self.get("reorder_levels"):
for d in template.get("reorder_levels"):
n = {}
for k in ("warehouse", "warehouse_reorder_level",
"warehouse_reorder_qty", "material_request_type"):
n[k] = d.get(k)
self.append("reorder_levels", n)
def validate_conversion_factor(self):
check_list = []
for d in self.get('uoms'):
if cstr(d.uom) in check_list:
frappe.throw(_("Unit of Measure {0} has been entered more than once in Conversion Factor Table").format(d.uom))
else:
check_list.append(cstr(d.uom))
if d.uom and cstr(d.uom) == cstr(self.stock_uom) and flt(d.conversion_factor) != 1:
frappe.throw(_("Conversion factor for default Unit of Measure must be 1 in row {0}").format(d.idx))
def validate_item_type(self):
if self.has_serial_no == 1 and self.is_stock_item == 0:
msgprint(_("'Has Serial No' can not be 'Yes' for non-stock item"), raise_exception=1)
if self.has_serial_no == 0 and self.serial_no_series:
self.serial_no_series = None
def check_for_active_boms(self):
if self.default_bom:
bom_item = frappe.db.get_value("BOM", self.default_bom, "item")
if bom_item not in (self.name, self.variant_of):
frappe.throw(_("Default BOM ({0}) must be active for this item or its template").format(bom_item))
def fill_customer_code(self):
""" Append all the customer codes and insert into "customer_code" field of item table """
cust_code=[]
for d in self.get('customer_items'):
cust_code.append(d.ref_code)
self.customer_code=','.join(cust_code)
def check_item_tax(self):
"""Check whether Tax Rate is not entered twice for same Tax Type"""
check_list=[]
for d in self.get('taxes'):
if d.tax_type:
account_type = frappe.db.get_value("Account", d.tax_type, "account_type")
if account_type not in ['Tax', 'Chargeable', 'Income Account', 'Expense Account']:
frappe.throw(_("Item Tax Row {0} must have account of type Tax or Income or Expense or Chargeable").format(d.idx))
else:
if d.tax_type in check_list:
frappe.throw(_("{0} entered twice in Item Tax").format(d.tax_type))
else:
check_list.append(d.tax_type)
def validate_barcode(self):
if self.barcode:
duplicate = frappe.db.sql("""select name from tabItem where barcode = %s
and name != %s""", (self.barcode, self.name))
if duplicate:
frappe.throw(_("Barcode {0} already used in Item {1}").format(self.barcode, duplicate[0][0]))
def cant_change(self):
if not self.get("__islocal"):
to_check = ("has_serial_no", "is_stock_item",
"valuation_method", "has_batch_no", "is_fixed_asset")
vals = frappe.db.get_value("Item", self.name, to_check, as_dict=True)
if not vals.get('valuation_method') and self.get('valuation_method'):
vals['valuation_method'] = frappe.db.get_single_value("Stock Settings", "valuation_method") or "FIFO"
if vals:
for key in to_check:
if cstr(self.get(key)) != cstr(vals.get(key)):
if not self.check_if_linked_document_exists(key):
break # no linked document, allowed
else:
frappe.throw(_("As there are existing transactions against item {0}, you can not change the value of {1}").format(self.name, frappe.bold(self.meta.get_label(key))))
if vals and not self.is_fixed_asset and self.is_fixed_asset != vals.is_fixed_asset:
asset = frappe.db.get_all("Asset", filters={"item_code": self.name, "docstatus": 1}, limit=1)
if asset:
frappe.throw(_('"Is Fixed Asset" cannot be unchecked, as Asset record exists against the item'))
def check_if_linked_document_exists(self, key):
linked_doctypes = ["Delivery Note Item", "Sales Invoice Item", "Purchase Receipt Item",
"Purchase Invoice Item", "Stock Entry Detail", "Stock Reconciliation Item"]
# For "Is Stock Item", following doctypes is important
# because reserved_qty, ordered_qty and requested_qty updated from these doctypes
if key == "is_stock_item":
linked_doctypes += ["Sales Order Item", "Purchase Order Item", "Material Request Item"]
for doctype in linked_doctypes:
if frappe.db.get_value(doctype, filters={"item_code": self.name, "docstatus": 1}) or \
frappe.db.get_value("Production Order",
filters={"production_item": self.name, "docstatus": 1}):
return True
for d in self.get("reorder_levels"):
if d.warehouse_reorder_level and not d.warehouse_reorder_qty:
frappe.throw(_("Row #{0}: Please set reorder quantity").format(d.idx))
def validate_warehouse_for_reorder(self):
warehouse = []
for i in self.get("reorder_levels"):
if i.get("warehouse") and i.get("warehouse") not in warehouse:
warehouse += [i.get("warehouse")]
else:
frappe.throw(_("Row {0}: An Reorder entry already exists for this warehouse {1}")
.format(i.idx, i.warehouse), DuplicateReorderRows)
def check_if_sle_exists(self):
sle = frappe.db.sql("""select name from `tabStock Ledger Entry`
where item_code = %s""", self.name)
return sle and 'exists' or 'not exists'
def validate_name_with_item_group(self):
# causes problem with tree build
if frappe.db.exists("Item Group", self.name):
frappe.throw(_("An Item Group exists with same name, please change the item name or rename the item group"))
def update_item_price(self):
frappe.db.sql("""update `tabItem Price` set item_name=%s,
item_description=%s, modified=NOW() where item_code=%s""",
(self.item_name, self.description, self.name))
def on_trash(self):
super(Item, self).on_trash()
frappe.db.sql("""delete from tabBin where item_code=%s""", self.item_code)
frappe.db.sql("delete from `tabItem Price` where item_code=%s", self.name)
for variant_of in frappe.get_all("Item", filters={"variant_of": self.name}):
frappe.delete_doc("Item", variant_of.name)
def before_rename(self, old_name, new_name, merge=False):
if self.item_name==old_name:
frappe.db.set_value("Item", old_name, "item_name", new_name)
if merge:
# Validate properties before merging
if not frappe.db.exists("Item", new_name):
frappe.throw(_("Item {0} does not exist").format(new_name))
field_list = ["stock_uom", "is_stock_item", "has_serial_no", "has_batch_no"]
new_properties = [cstr(d) for d in frappe.db.get_value("Item", new_name, field_list)]
if new_properties != [cstr(self.get(fld)) for fld in field_list]:
frappe.throw(_("To merge, following properties must be same for both items")
+ ": \n" + ", ".join([self.meta.get_label(fld) for fld in field_list]))
def after_rename(self, old_name, new_name, merge):
if self.route:
invalidate_cache_for_item(self)
clear_cache(self.route)
frappe.db.set_value("Item", new_name, "item_code", new_name)
if merge:
self.set_last_purchase_rate(new_name)
self.recalculate_bin_qty(new_name)
for dt in ("Sales Taxes and Charges", "Purchase Taxes and Charges"):
for d in frappe.db.sql("""select name, item_wise_tax_detail from `tab{0}`
where ifnull(item_wise_tax_detail, '') != ''""".format(dt), as_dict=1):
item_wise_tax_detail = json.loads(d.item_wise_tax_detail)
if old_name in item_wise_tax_detail:
item_wise_tax_detail[new_name] = item_wise_tax_detail[old_name]
item_wise_tax_detail.pop(old_name)
frappe.db.set_value(dt, d.name, "item_wise_tax_detail",
json.dumps(item_wise_tax_detail), update_modified=False)
def set_last_purchase_rate(self, new_name):
last_purchase_rate = get_last_purchase_details(new_name).get("base_rate", 0)
frappe.db.set_value("Item", new_name, "last_purchase_rate", last_purchase_rate)
def recalculate_bin_qty(self, new_name):
from erpnext.stock.stock_balance import repost_stock
frappe.db.auto_commit_on_many_writes = 1
existing_allow_negative_stock = frappe.db.get_value("Stock Settings", None, "allow_negative_stock")
frappe.db.set_value("Stock Settings", None, "allow_negative_stock", 1)
repost_stock_for_warehouses = frappe.db.sql_list("""select distinct warehouse
from tabBin where item_code=%s""", new_name)
# Delete all existing bins to avoid duplicate bins for the same item and warehouse
frappe.db.sql("delete from `tabBin` where item_code=%s", new_name)
for warehouse in repost_stock_for_warehouses:
repost_stock(new_name, warehouse)
frappe.db.set_value("Stock Settings", None, "allow_negative_stock", existing_allow_negative_stock)
frappe.db.auto_commit_on_many_writes = 0
def copy_specification_from_item_group(self):
self.set("website_specifications", [])
if self.item_group:
for label, desc in frappe.db.get_values("Item Website Specification",
{"parent": self.item_group}, ["label", "description"]):
row = self.append("website_specifications")
row.label = label
row.description = desc
def update_item_desc(self):
if frappe.db.get_value('BOM',self.name, 'description') != self.description:
frappe.db.sql("""update `tabBOM` set description = %s where item = %s and docstatus < 2""",(self.description, self.name))
frappe.db.sql("""update `tabBOM Item` set description = %s where
item_code = %s and docstatus < 2""",(self.description, self.name))
frappe.db.sql("""update `tabBOM Explosion Item` set description = %s where
item_code = %s and docstatus < 2""",(self.description, self.name))
def update_template_item(self):
"""Set Show in Website for Template Item if True for its Variant"""
if self.variant_of and self.show_in_website:
self.show_variant_in_website = 1
self.show_in_website = 0
if self.show_variant_in_website:
# show template
template_item = frappe.get_doc("Item", self.variant_of)
if not template_item.show_in_website:
template_item.show_in_website = 1
template_item.flags.ignore_permissions = True
template_item.save()
def validate_has_variants(self):
if not self.has_variants and frappe.db.get_value("Item", self.name, "has_variants"):
if frappe.db.exists("Item", {"variant_of": self.name}):
frappe.throw(_("Item has variants."))
def validate_uom(self):
if not self.get("__islocal"):
check_stock_uom_with_bin(self.name, self.stock_uom)
if self.has_variants:
for d in frappe.db.get_all("Item", filters= {"variant_of": self.name}):
check_stock_uom_with_bin(d.name, self.stock_uom)
if self.variant_of:
template_uom = frappe.db.get_value("Item", self.variant_of, "stock_uom")
if template_uom != self.stock_uom:
frappe.throw(_("Default Unit of Measure for Variant '{0}' must be same as in Template '{1}'")
.format(self.stock_uom, template_uom))
def validate_attributes(self):
if (self.has_variants or self.variant_of) and self.variant_based_on=='Item Attribute':
attributes = []
if not self.attributes:
frappe.throw(_("Attribute table is mandatory"))
for d in self.attributes:
if d.attribute in attributes:
frappe.throw(_("Attribute {0} selected multiple times in Attributes Table".format(d.attribute)))
else:
attributes.append(d.attribute)
def validate_variant_attributes(self):
if self.variant_of and self.variant_based_on=='Item Attribute':
args = {}
for d in self.attributes:
if not d.attribute_value:
frappe.throw(_("Please specify Attribute Value for attribute {0}").format(d.attribute))
args[d.attribute] = d.attribute_value
variant = get_variant(self.variant_of, args, self.name)
if variant:
frappe.throw(_("Item variant {0} exists with same attributes")
.format(variant), ItemVariantExistsError)
validate_item_variant_attributes(self, args)
def get_timeline_data(doctype, name):
'''returns timeline data based on stock ledger entry'''
out = {}
items = dict(frappe.db.sql('''select posting_date, count(*)
from `tabStock Ledger Entry` where item_code=%s
and posting_date > date_sub(curdate(), interval 1 year)
group by posting_date''', name))
for date, count in items.iteritems():
timestamp = get_timestamp(date)
out.update({ timestamp: count })
return out
def validate_end_of_life(item_code, end_of_life=None, disabled=None, verbose=1):
if (not end_of_life) or (disabled is None):
end_of_life, disabled = frappe.db.get_value("Item", item_code, ["end_of_life", "disabled"])
if end_of_life and end_of_life!="0000-00-00" and getdate(end_of_life) <= now_datetime().date():
msg = _("Item {0} has reached its end of life on {1}").format(item_code, formatdate(end_of_life))
_msgprint(msg, verbose)
if disabled:
_msgprint(_("Item {0} is disabled").format(item_code), verbose)
def validate_is_stock_item(item_code, is_stock_item=None, verbose=1):
if not is_stock_item:
is_stock_item = frappe.db.get_value("Item", item_code, "is_stock_item")
if is_stock_item != 1:
msg = _("Item {0} is not a stock Item").format(item_code)
_msgprint(msg, verbose)
def validate_cancelled_item(item_code, docstatus=None, verbose=1):
if docstatus is None:
docstatus = frappe.db.get_value("Item", item_code, "docstatus")
if docstatus == 2:
msg = _("Item {0} is cancelled").format(item_code)
_msgprint(msg, verbose)
def _msgprint(msg, verbose):
if verbose:
msgprint(msg, raise_exception=True)
else:
raise frappe.ValidationError, msg
def get_last_purchase_details(item_code, doc_name=None, conversion_rate=1.0):
"""returns last purchase details in stock uom"""
# get last purchase order item details
last_purchase_order = frappe.db.sql("""\
select po.name, po.transaction_date, po.conversion_rate,
po_item.conversion_factor, po_item.base_price_list_rate,
po_item.discount_percentage, po_item.base_rate
from `tabPurchase Order` po, `tabPurchase Order Item` po_item
where po.docstatus = 1 and po_item.item_code = %s and po.name != %s and
po.name = po_item.parent
order by po.transaction_date desc, po.name desc
limit 1""", (item_code, cstr(doc_name)), as_dict=1)
# get last purchase receipt item details
last_purchase_receipt = frappe.db.sql("""\
select pr.name, pr.posting_date, pr.posting_time, pr.conversion_rate,
pr_item.conversion_factor, pr_item.base_price_list_rate, pr_item.discount_percentage,
pr_item.base_rate
from `tabPurchase Receipt` pr, `tabPurchase Receipt Item` pr_item
where pr.docstatus = 1 and pr_item.item_code = %s and pr.name != %s and
pr.name = pr_item.parent
order by pr.posting_date desc, pr.posting_time desc, pr.name desc
limit 1""", (item_code, cstr(doc_name)), as_dict=1)
purchase_order_date = getdate(last_purchase_order and last_purchase_order[0].transaction_date \
or "1900-01-01")
purchase_receipt_date = getdate(last_purchase_receipt and \
last_purchase_receipt[0].posting_date or "1900-01-01")
if (purchase_order_date > purchase_receipt_date) or \
(last_purchase_order and not last_purchase_receipt):
# use purchase order
last_purchase = last_purchase_order[0]
purchase_date = purchase_order_date
elif (purchase_receipt_date > purchase_order_date) or \
(last_purchase_receipt and not last_purchase_order):
# use purchase receipt
last_purchase = last_purchase_receipt[0]
purchase_date = purchase_receipt_date
else:
return frappe._dict()
conversion_factor = flt(last_purchase.conversion_factor)
out = frappe._dict({
"base_price_list_rate": flt(last_purchase.base_price_list_rate) / conversion_factor,
"base_rate": flt(last_purchase.base_rate) / conversion_factor,
"discount_percentage": flt(last_purchase.discount_percentage),
"purchase_date": purchase_date
})
conversion_rate = flt(conversion_rate) or 1.0
out.update({
"price_list_rate": out.base_price_list_rate / conversion_rate,
"rate": out.base_rate / conversion_rate,
"base_rate": out.base_rate
})
return out
def invalidate_cache_for_item(doc):
invalidate_cache_for(doc, doc.item_group)
website_item_groups = list(set((doc.get("old_website_item_groups") or [])
+ [d.item_group for d in doc.get({"doctype":"Website Item Group"}) if d.item_group]))
for item_group in website_item_groups:
invalidate_cache_for(doc, item_group)
if doc.get("old_item_group") and doc.get("old_item_group") != doc.item_group:
invalidate_cache_for(doc, doc.old_item_group)
def check_stock_uom_with_bin(item, stock_uom):
if stock_uom == frappe.db.get_value("Item", item, "stock_uom"):
return
matched=True
ref_uom = frappe.db.get_value("Stock Ledger Entry",
{"item_code": item}, "stock_uom")
if ref_uom:
if cstr(ref_uom) != cstr(stock_uom):
matched = False
else:
bin_list = frappe.db.sql("select * from tabBin where item_code=%s", item, as_dict=1)
for bin in bin_list:
if (bin.reserved_qty > 0 or bin.ordered_qty > 0 or bin.indented_qty > 0 \
or bin.planned_qty > 0) and cstr(bin.stock_uom) != cstr(stock_uom):
matched = False
break
if matched and bin_list:
frappe.db.sql("""update tabBin set stock_uom=%s where item_code=%s""", (stock_uom, item))
if not matched:
frappe.throw(_("Default Unit of Measure for Item {0} cannot be changed directly because you have already made some transaction(s) with another UOM. You will need to create a new Item to use a different Default UOM.").format(item))
|
sfriesel/suds
|
refs/heads/master
|
suds/transport/__init__.py
|
2
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
Contains transport interface (classes).
"""
class TransportError(Exception):
def __init__(self, reason, httpcode, fp=None):
Exception.__init__(self, reason)
self.httpcode = httpcode
self.fp = fp
class Request:
"""
A transport request
@ivar url: The url for the request.
@type url: str
@ivar message: The message to be sent in a POST request.
@type message: str
@ivar headers: The http headers to be used for the request.
@type headers: dict
"""
def __init__(self, url, message=None):
"""
@param url: The url for the request.
@type url: str
@param message: The (optional) message to be send in the request.
@type message: str
"""
self.url = url
self.headers = {}
self.message = message
def __str__(self):
s = []
s.append('URL:%s' % self.url)
s.append('HEADERS: %s' % self.headers)
s.append('MESSAGE:')
s.append(str(self.message))
return '\n'.join(s)
class Reply:
"""
A transport reply
@ivar code: The http code returned.
@type code: int
@ivar message: The message to be sent in a POST request.
@type message: str
@ivar headers: The http headers to be used for the request.
@type headers: dict
"""
def __init__(self, code, headers, message):
"""
@param code: The http code returned.
@type code: int
@param headers: The http returned headers.
@type headers: dict
@param message: The (optional) reply message received.
@type message: str
"""
self.code = code
self.headers = headers
self.message = message
def __str__(self):
s = []
s.append('CODE: %s' % self.code)
s.append('HEADERS: %s' % self.headers)
s.append('MESSAGE:')
s.append(str(self.message))
return '\n'.join(s)
class Transport:
"""
The transport I{interface}.
"""
def __init__(self):
"""
Constructor.
"""
from suds.transport.options import Options
self.options = Options()
del Options
def open(self, request):
"""
Open the url in the specified request.
@param request: A transport request.
@type request: L{Request}
@return: An input stream.
@rtype: stream
@raise TransportError: On all transport errors.
"""
raise Exception('not-implemented')
def send(self, request):
"""
Send soap message. Implementations are expected to handle:
- proxies
- I{http} headers
- cookies
- sending message
- brokering exceptions into L{TransportError}
@param request: A transport request.
@type request: L{Request}
@return: The reply
@rtype: L{Reply}
@raise TransportError: On all transport errors.
"""
raise Exception('not-implemented')
|
fangxingli/hue
|
refs/heads/master
|
desktop/core/ext-py/Django-1.6.10/django/conf/locale/bs/formats.py
|
118
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. N Y.'
TIME_FORMAT = 'G:i'
DATETIME_FORMAT = 'j. N. Y. G:i T'
YEAR_MONTH_FORMAT = 'F Y.'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'Y M j'
# SHORT_DATETIME_FORMAT =
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
|
paweljasinski/ironpython3
|
refs/heads/master
|
Src/StdLib/Lib/timeit.py
|
72
|
#! /usr/bin/env python3
"""Tool for measuring execution time of small code snippets.
This module avoids a number of common traps for measuring execution
times. See also Tim Peters' introduction to the Algorithms chapter in
the Python Cookbook, published by O'Reilly.
Library usage: see the Timer class.
Command line usage:
python timeit.py [-n N] [-r N] [-s S] [-t] [-c] [-p] [-h] [--] [statement]
Options:
-n/--number N: how many times to execute 'statement' (default: see below)
-r/--repeat N: how many times to repeat the timer (default 3)
-s/--setup S: statement to be executed once initially (default 'pass')
-p/--process: use time.process_time() (default is time.perf_counter())
-t/--time: use time.time() (deprecated)
-c/--clock: use time.clock() (deprecated)
-v/--verbose: print raw timing results; repeat for more digits precision
-h/--help: print this usage message and exit
--: separate options from statement, use when statement starts with -
statement: statement to be timed (default 'pass')
A multi-line statement may be given by specifying each line as a
separate argument; indented lines are possible by enclosing an
argument in quotes and using leading spaces. Multiple -s options are
treated similarly.
If -n is not given, a suitable number of loops is calculated by trying
successive powers of 10 until the total time is at least 0.2 seconds.
Note: there is a certain baseline overhead associated with executing a
pass statement. It differs between versions. The code here doesn't try
to hide it, but you should be aware of it. The baseline overhead can be
measured by invoking the program without arguments.
Classes:
Timer
Functions:
timeit(string, string) -> float
repeat(string, string) -> list
default_timer() -> float
"""
import gc
import sys
import time
import itertools
__all__ = ["Timer", "timeit", "repeat", "default_timer"]
dummy_src_name = "<timeit-src>"
default_number = 1000000
default_repeat = 3
default_timer = time.perf_counter
# Don't change the indentation of the template; the reindent() calls
# in Timer.__init__() depend on setup being indented 4 spaces and stmt
# being indented 8 spaces.
template = """
def inner(_it, _timer):
{setup}
_t0 = _timer()
for _i in _it:
{stmt}
_t1 = _timer()
return _t1 - _t0
"""
def reindent(src, indent):
"""Helper to reindent a multi-line statement."""
return src.replace("\n", "\n" + " "*indent)
def _template_func(setup, func):
"""Create a timer function. Used if the "statement" is a callable."""
def inner(_it, _timer, _func=func):
setup()
_t0 = _timer()
for _i in _it:
_func()
_t1 = _timer()
return _t1 - _t0
return inner
class Timer:
"""Class for timing execution speed of small code snippets.
The constructor takes a statement to be timed, an additional
statement used for setup, and a timer function. Both statements
default to 'pass'; the timer function is platform-dependent (see
module doc string).
To measure the execution time of the first statement, use the
timeit() method. The repeat() method is a convenience to call
timeit() multiple times and return a list of results.
The statements may contain newlines, as long as they don't contain
multi-line string literals.
"""
def __init__(self, stmt="pass", setup="pass", timer=default_timer):
"""Constructor. See class doc string."""
self.timer = timer
ns = {}
if isinstance(stmt, str):
stmt = reindent(stmt, 8)
if isinstance(setup, str):
setup = reindent(setup, 4)
src = template.format(stmt=stmt, setup=setup)
elif callable(setup):
src = template.format(stmt=stmt, setup='_setup()')
ns['_setup'] = setup
else:
raise ValueError("setup is neither a string nor callable")
self.src = src # Save for traceback display
code = compile(src, dummy_src_name, "exec")
exec(code, globals(), ns)
self.inner = ns["inner"]
elif callable(stmt):
self.src = None
if isinstance(setup, str):
_setup = setup
def setup():
exec(_setup, globals(), ns)
elif not callable(setup):
raise ValueError("setup is neither a string nor callable")
self.inner = _template_func(setup, stmt)
else:
raise ValueError("stmt is neither a string nor callable")
def print_exc(self, file=None):
"""Helper to print a traceback from the timed code.
Typical use:
t = Timer(...) # outside the try/except
try:
t.timeit(...) # or t.repeat(...)
except:
t.print_exc()
The advantage over the standard traceback is that source lines
in the compiled template will be displayed.
The optional file argument directs where the traceback is
sent; it defaults to sys.stderr.
"""
import linecache, traceback
if self.src is not None:
linecache.cache[dummy_src_name] = (len(self.src),
None,
self.src.split("\n"),
dummy_src_name)
# else the source is already stored somewhere else
traceback.print_exc(file=file)
def timeit(self, number=default_number):
"""Time 'number' executions of the main statement.
To be precise, this executes the setup statement once, and
then returns the time it takes to execute the main statement
a number of times, as a float measured in seconds. The
argument is the number of times through the loop, defaulting
to one million. The main statement, the setup statement and
the timer function to be used are passed to the constructor.
"""
it = itertools.repeat(None, number)
gcold = gc.isenabled()
gc.disable()
try:
timing = self.inner(it, self.timer)
finally:
if gcold:
gc.enable()
return timing
def repeat(self, repeat=default_repeat, number=default_number):
"""Call timeit() a few times.
This is a convenience function that calls the timeit()
repeatedly, returning a list of results. The first argument
specifies how many times to call timeit(), defaulting to 3;
the second argument specifies the timer argument, defaulting
to one million.
Note: it's tempting to calculate mean and standard deviation
from the result vector and report these. However, this is not
very useful. In a typical case, the lowest value gives a
lower bound for how fast your machine can run the given code
snippet; higher values in the result vector are typically not
caused by variability in Python's speed, but by other
processes interfering with your timing accuracy. So the min()
of the result is probably the only number you should be
interested in. After that, you should look at the entire
vector and apply common sense rather than statistics.
"""
r = []
for i in range(repeat):
t = self.timeit(number)
r.append(t)
return r
def timeit(stmt="pass", setup="pass", timer=default_timer,
number=default_number):
"""Convenience function to create Timer object and call timeit method."""
return Timer(stmt, setup, timer).timeit(number)
def repeat(stmt="pass", setup="pass", timer=default_timer,
repeat=default_repeat, number=default_number):
"""Convenience function to create Timer object and call repeat method."""
return Timer(stmt, setup, timer).repeat(repeat, number)
def main(args=None, *, _wrap_timer=None):
"""Main program, used when run as a script.
The optional 'args' argument specifies the command line to be parsed,
defaulting to sys.argv[1:].
The return value is an exit code to be passed to sys.exit(); it
may be None to indicate success.
When an exception happens during timing, a traceback is printed to
stderr and the return value is 1. Exceptions at other times
(including the template compilation) are not caught.
'_wrap_timer' is an internal interface used for unit testing. If it
is not None, it must be a callable that accepts a timer function
and returns another timer function (used for unit testing).
"""
if args is None:
args = sys.argv[1:]
import getopt
try:
opts, args = getopt.getopt(args, "n:s:r:tcpvh",
["number=", "setup=", "repeat=",
"time", "clock", "process",
"verbose", "help"])
except getopt.error as err:
print(err)
print("use -h/--help for command line help")
return 2
timer = default_timer
stmt = "\n".join(args) or "pass"
number = 0 # auto-determine
setup = []
repeat = default_repeat
verbose = 0
precision = 3
for o, a in opts:
if o in ("-n", "--number"):
number = int(a)
if o in ("-s", "--setup"):
setup.append(a)
if o in ("-r", "--repeat"):
repeat = int(a)
if repeat <= 0:
repeat = 1
if o in ("-t", "--time"):
timer = time.time
if o in ("-c", "--clock"):
timer = time.clock
if o in ("-p", "--process"):
timer = time.process_time
if o in ("-v", "--verbose"):
if verbose:
precision += 1
verbose += 1
if o in ("-h", "--help"):
print(__doc__, end=' ')
return 0
setup = "\n".join(setup) or "pass"
# Include the current directory, so that local imports work (sys.path
# contains the directory of this script, rather than the current
# directory)
import os
sys.path.insert(0, os.curdir)
if _wrap_timer is not None:
timer = _wrap_timer(timer)
t = Timer(stmt, setup, timer)
if number == 0:
# determine number so that 0.2 <= total time < 2.0
for i in range(1, 10):
number = 10**i
try:
x = t.timeit(number)
except:
t.print_exc()
return 1
if verbose:
print("%d loops -> %.*g secs" % (number, precision, x))
if x >= 0.2:
break
try:
r = t.repeat(repeat, number)
except:
t.print_exc()
return 1
best = min(r)
if verbose:
print("raw times:", " ".join(["%.*g" % (precision, x) for x in r]))
print("%d loops," % number, end=' ')
usec = best * 1e6 / number
if usec < 1000:
print("best of %d: %.*g usec per loop" % (repeat, precision, usec))
else:
msec = usec / 1000
if msec < 1000:
print("best of %d: %.*g msec per loop" % (repeat, precision, msec))
else:
sec = msec / 1000
print("best of %d: %.*g sec per loop" % (repeat, precision, sec))
return None
if __name__ == "__main__":
sys.exit(main())
|
sahat/cloudbucket
|
refs/heads/master
|
py/pdfminer/pdffont.py
|
2
|
#!/usr/bin/env python2
import sys
import struct
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from cmapdb import CMapDB, CMapParser, FileUnicodeMap, CMap
from encodingdb import EncodingDB, name2unicode
from psparser import PSStackParser
from psparser import PSSyntaxError, PSEOF
from psparser import LIT, KWD, STRICT
from psparser import PSLiteral, literal_name
from pdftypes import PDFException, resolve1
from pdftypes import int_value, float_value, num_value
from pdftypes import str_value, list_value, dict_value, stream_value
from fontmetrics import FONT_METRICS
from utils import apply_matrix_norm, nunpack, choplist
def get_widths(seq):
widths = {}
r = []
for v in seq:
if isinstance(v, list):
if r:
char1 = r[-1]
for (i,w) in enumerate(v):
widths[char1+i] = w
r = []
elif isinstance(v, int):
r.append(v)
if len(r) == 3:
(char1,char2,w) = r
for i in xrange(char1, char2+1):
widths[i] = w
r = []
return widths
#assert get_widths([1]) == {}
#assert get_widths([1,2,3]) == {1:3, 2:3}
#assert get_widths([1,[2,3],6,[7,8]]) == {1:2,2:3, 6:7,7:8}
def get_widths2(seq):
widths = {}
r = []
for v in seq:
if isinstance(v, list):
if r:
char1 = r[-1]
for (i,(w,vx,vy)) in enumerate(choplist(3,v)):
widths[char1+i] = (w,(vx,vy))
r = []
elif isinstance(v, int):
r.append(v)
if len(r) == 5:
(char1,char2,w,vx,vy) = r
for i in xrange(char1, char2+1):
widths[i] = (w,(vx,vy))
r = []
return widths
#assert get_widths2([1]) == {}
#assert get_widths2([1,2,3,4,5]) == {1:(3,(4,5)), 2:(3,(4,5))}
#assert get_widths2([1,[2,3,4,5],6,[7,8,9]]) == {1:(2,(3,4)), 6:(7,(8,9))}
## FontMetricsDB
##
class FontMetricsDB(object):
@classmethod
def get_metrics(klass, fontname):
return FONT_METRICS[fontname]
## Type1FontHeaderParser
##
class Type1FontHeaderParser(PSStackParser):
KEYWORD_BEGIN = KWD('begin')
KEYWORD_END = KWD('end')
KEYWORD_DEF = KWD('def')
KEYWORD_PUT = KWD('put')
KEYWORD_DICT = KWD('dict')
KEYWORD_ARRAY = KWD('array')
KEYWORD_READONLY = KWD('readonly')
KEYWORD_FOR = KWD('for')
KEYWORD_FOR = KWD('for')
def __init__(self, data):
PSStackParser.__init__(self, data)
self._cid2unicode = {}
return
def get_encoding(self):
while 1:
try:
(cid,name) = self.nextobject()
except PSEOF:
break
try:
self._cid2unicode[cid] = name2unicode(name)
except KeyError:
pass
return self._cid2unicode
def do_keyword(self, pos, token):
if token is self.KEYWORD_PUT:
((_,key),(_,value)) = self.pop(2)
if (isinstance(key, int) and
isinstance(value, PSLiteral)):
self.add_results((key, literal_name(value)))
return
## CFFFont
## (Format specified in Adobe Technical Note: #5176
## "The Compact Font Format Specification")
##
NIBBLES = ('0','1','2','3','4','5','6','7','8','9','.','e','e-',None,'-')
def getdict(data):
d = {}
fp = StringIO(data)
stack = []
while 1:
c = fp.read(1)
if not c: break
b0 = ord(c)
if b0 <= 21:
d[b0] = stack
stack = []
continue
if b0 == 30:
s = ''
loop = True
while loop:
b = ord(fp.read(1))
for n in (b >> 4, b & 15):
if n == 15:
loop = False
else:
s += NIBBLES[n]
value = float(s)
elif 32 <= b0 and b0 <= 246:
value = b0-139
else:
b1 = ord(fp.read(1))
if 247 <= b0 and b0 <= 250:
value = ((b0-247)<<8)+b1+108
elif 251 <= b0 and b0 <= 254:
value = -((b0-251)<<8)-b1-108
else:
b2 = ord(fp.read(1))
if 128 <= b1: b1 -= 256
if b0 == 28:
value = b1<<8 | b2
else:
value = b1<<24 | b2<<16 | struct.unpack('>H', fp.read(2))[0]
stack.append(value)
return d
class CFFFont(object):
STANDARD_STRINGS = (
'.notdef', 'space', 'exclam', 'quotedbl', 'numbersign',
'dollar', 'percent', 'ampersand', 'quoteright', 'parenleft',
'parenright', 'asterisk', 'plus', 'comma', 'hyphen', 'period',
'slash', 'zero', 'one', 'two', 'three', 'four', 'five', 'six',
'seven', 'eight', 'nine', 'colon', 'semicolon', 'less', 'equal',
'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F', 'G',
'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T',
'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash',
'bracketright', 'asciicircum', 'underscore', 'quoteleft', 'a',
'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n',
'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
'braceleft', 'bar', 'braceright', 'asciitilde', 'exclamdown',
'cent', 'sterling', 'fraction', 'yen', 'florin', 'section',
'currency', 'quotesingle', 'quotedblleft', 'guillemotleft',
'guilsinglleft', 'guilsinglright', 'fi', 'fl', 'endash',
'dagger', 'daggerdbl', 'periodcentered', 'paragraph', 'bullet',
'quotesinglbase', 'quotedblbase', 'quotedblright',
'guillemotright', 'ellipsis', 'perthousand', 'questiondown',
'grave', 'acute', 'circumflex', 'tilde', 'macron', 'breve',
'dotaccent', 'dieresis', 'ring', 'cedilla', 'hungarumlaut',
'ogonek', 'caron', 'emdash', 'AE', 'ordfeminine', 'Lslash',
'Oslash', 'OE', 'ordmasculine', 'ae', 'dotlessi', 'lslash',
'oslash', 'oe', 'germandbls', 'onesuperior', 'logicalnot', 'mu',
'trademark', 'Eth', 'onehalf', 'plusminus', 'Thorn',
'onequarter', 'divide', 'brokenbar', 'degree', 'thorn',
'threequarters', 'twosuperior', 'registered', 'minus', 'eth',
'multiply', 'threesuperior', 'copyright', 'Aacute',
'Acircumflex', 'Adieresis', 'Agrave', 'Aring', 'Atilde',
'Ccedilla', 'Eacute', 'Ecircumflex', 'Edieresis', 'Egrave',
'Iacute', 'Icircumflex', 'Idieresis', 'Igrave', 'Ntilde',
'Oacute', 'Ocircumflex', 'Odieresis', 'Ograve', 'Otilde',
'Scaron', 'Uacute', 'Ucircumflex', 'Udieresis', 'Ugrave',
'Yacute', 'Ydieresis', 'Zcaron', 'aacute', 'acircumflex',
'adieresis', 'agrave', 'aring', 'atilde', 'ccedilla', 'eacute',
'ecircumflex', 'edieresis', 'egrave', 'iacute', 'icircumflex',
'idieresis', 'igrave', 'ntilde', 'oacute', 'ocircumflex',
'odieresis', 'ograve', 'otilde', 'scaron', 'uacute',
'ucircumflex', 'udieresis', 'ugrave', 'yacute', 'ydieresis',
'zcaron', 'exclamsmall', 'Hungarumlautsmall', 'dollaroldstyle',
'dollarsuperior', 'ampersandsmall', 'Acutesmall',
'parenleftsuperior', 'parenrightsuperior', 'twodotenleader',
'onedotenleader', 'zerooldstyle', 'oneoldstyle', 'twooldstyle',
'threeoldstyle', 'fouroldstyle', 'fiveoldstyle', 'sixoldstyle',
'sevenoldstyle', 'eightoldstyle', 'nineoldstyle',
'commasuperior', 'threequartersemdash', 'periodsuperior',
'questionsmall', 'asuperior', 'bsuperior', 'centsuperior',
'dsuperior', 'esuperior', 'isuperior', 'lsuperior', 'msuperior',
'nsuperior', 'osuperior', 'rsuperior', 'ssuperior', 'tsuperior',
'ff', 'ffi', 'ffl', 'parenleftinferior', 'parenrightinferior',
'Circumflexsmall', 'hyphensuperior', 'Gravesmall', 'Asmall',
'Bsmall', 'Csmall', 'Dsmall', 'Esmall', 'Fsmall', 'Gsmall',
'Hsmall', 'Ismall', 'Jsmall', 'Ksmall', 'Lsmall', 'Msmall',
'Nsmall', 'Osmall', 'Psmall', 'Qsmall', 'Rsmall', 'Ssmall',
'Tsmall', 'Usmall', 'Vsmall', 'Wsmall', 'Xsmall', 'Ysmall',
'Zsmall', 'colonmonetary', 'onefitted', 'rupiah', 'Tildesmall',
'exclamdownsmall', 'centoldstyle', 'Lslashsmall', 'Scaronsmall',
'Zcaronsmall', 'Dieresissmall', 'Brevesmall', 'Caronsmall',
'Dotaccentsmall', 'Macronsmall', 'figuredash', 'hypheninferior',
'Ogoneksmall', 'Ringsmall', 'Cedillasmall', 'questiondownsmall',
'oneeighth', 'threeeighths', 'fiveeighths', 'seveneighths',
'onethird', 'twothirds', 'zerosuperior', 'foursuperior',
'fivesuperior', 'sixsuperior', 'sevensuperior', 'eightsuperior',
'ninesuperior', 'zeroinferior', 'oneinferior', 'twoinferior',
'threeinferior', 'fourinferior', 'fiveinferior', 'sixinferior',
'seveninferior', 'eightinferior', 'nineinferior',
'centinferior', 'dollarinferior', 'periodinferior',
'commainferior', 'Agravesmall', 'Aacutesmall',
'Acircumflexsmall', 'Atildesmall', 'Adieresissmall',
'Aringsmall', 'AEsmall', 'Ccedillasmall', 'Egravesmall',
'Eacutesmall', 'Ecircumflexsmall', 'Edieresissmall',
'Igravesmall', 'Iacutesmall', 'Icircumflexsmall',
'Idieresissmall', 'Ethsmall', 'Ntildesmall', 'Ogravesmall',
'Oacutesmall', 'Ocircumflexsmall', 'Otildesmall',
'Odieresissmall', 'OEsmall', 'Oslashsmall', 'Ugravesmall',
'Uacutesmall', 'Ucircumflexsmall', 'Udieresissmall',
'Yacutesmall', 'Thornsmall', 'Ydieresissmall', '001.000',
'001.001', '001.002', '001.003', 'Black', 'Bold', 'Book',
'Light', 'Medium', 'Regular', 'Roman', 'Semibold',
)
class INDEX(object):
def __init__(self, fp):
self.fp = fp
self.offsets = []
(count, offsize) = struct.unpack('>HB', self.fp.read(3))
for i in xrange(count+1):
self.offsets.append(nunpack(self.fp.read(offsize)))
self.base = self.fp.tell()-1
self.fp.seek(self.base+self.offsets[-1])
return
def __repr__(self):
return '<INDEX: size=%d>' % len(self)
def __len__(self):
return len(self.offsets)-1
def __getitem__(self, i):
self.fp.seek(self.base+self.offsets[i])
return self.fp.read(self.offsets[i+1]-self.offsets[i])
def __iter__(self):
return iter( self[i] for i in xrange(len(self)) )
def __init__(self, name, fp):
self.name = name
self.fp = fp
# Header
(_major,_minor,hdrsize,offsize) = struct.unpack('BBBB', self.fp.read(4))
self.fp.read(hdrsize-4)
# Name INDEX
self.name_index = self.INDEX(self.fp)
# Top DICT INDEX
self.dict_index = self.INDEX(self.fp)
# String INDEX
self.string_index = self.INDEX(self.fp)
# Global Subr INDEX
self.subr_index = self.INDEX(self.fp)
# Top DICT DATA
self.top_dict = getdict(self.dict_index[0])
(charset_pos,) = self.top_dict.get(15, [0])
(encoding_pos,) = self.top_dict.get(16, [0])
(charstring_pos,) = self.top_dict.get(17, [0])
# CharStrings
self.fp.seek(charstring_pos)
self.charstring = self.INDEX(self.fp)
self.nglyphs = len(self.charstring)
# Encodings
self.code2gid = {}
self.gid2code = {}
self.fp.seek(encoding_pos)
format = self.fp.read(1)
if format == '\x00':
# Format 0
(n,) = struct.unpack('B', self.fp.read(1))
for (code,gid) in enumerate(struct.unpack('B'*n, self.fp.read(n))):
self.code2gid[code] = gid
self.gid2code[gid] = code
elif format == '\x01':
# Format 1
(n,) = struct.unpack('B', self.fp.read(1))
code = 0
for i in xrange(n):
(first,nleft) = struct.unpack('BB', self.fp.read(2))
for gid in xrange(first,first+nleft+1):
self.code2gid[code] = gid
self.gid2code[gid] = code
code += 1
else:
raise ValueError('unsupported encoding format: %r' % format)
# Charsets
self.name2gid = {}
self.gid2name = {}
self.fp.seek(charset_pos)
format = self.fp.read(1)
if format == '\x00':
# Format 0
n = self.nglyphs-1
for (gid,sid) in enumerate(struct.unpack('>'+'H'*n, self.fp.read(2*n))):
gid += 1
name = self.getstr(sid)
self.name2gid[name] = gid
self.gid2name[gid] = name
elif format == '\x01':
# Format 1
(n,) = struct.unpack('B', self.fp.read(1))
sid = 0
for i in xrange(n):
(first,nleft) = struct.unpack('BB', self.fp.read(2))
for gid in xrange(first,first+nleft+1):
name = self.getstr(sid)
self.name2gid[name] = gid
self.gid2name[gid] = name
sid += 1
elif format == '\x02':
# Format 2
assert 0
else:
raise ValueError('unsupported charset format: %r' % format)
#print self.code2gid
#print self.name2gid
#assert 0
return
def getstr(self, sid):
if sid < len(self.STANDARD_STRINGS):
return self.STANDARD_STRINGS[sid]
return self.string_index[sid-len(self.STANDARD_STRINGS)]
## TrueTypeFont
##
class TrueTypeFont(object):
class CMapNotFound(Exception): pass
def __init__(self, name, fp):
self.name = name
self.fp = fp
self.tables = {}
self.fonttype = fp.read(4)
(ntables, _1, _2, _3) = struct.unpack('>HHHH', fp.read(8))
for _ in xrange(ntables):
(name, tsum, offset, length) = struct.unpack('>4sLLL', fp.read(16))
self.tables[name] = (offset, length)
return
def create_unicode_map(self):
if 'cmap' not in self.tables:
raise TrueTypeFont.CMapNotFound
(base_offset, length) = self.tables['cmap']
fp = self.fp
fp.seek(base_offset)
(version, nsubtables) = struct.unpack('>HH', fp.read(4))
subtables = []
for i in xrange(nsubtables):
subtables.append(struct.unpack('>HHL', fp.read(8)))
char2gid = {}
# Only supports subtable type 0, 2 and 4.
for (_1, _2, st_offset) in subtables:
fp.seek(base_offset+st_offset)
(fmttype, fmtlen, fmtlang) = struct.unpack('>HHH', fp.read(6))
if fmttype == 0:
char2gid.update(enumerate(struct.unpack('>256B', fp.read(256))))
elif fmttype == 2:
subheaderkeys = struct.unpack('>256H', fp.read(512))
firstbytes = [0]*8192
for (i,k) in enumerate(subheaderkeys):
firstbytes[k/8] = i
nhdrs = max(subheaderkeys)/8 + 1
hdrs = []
for i in xrange(nhdrs):
(firstcode,entcount,delta,offset) = struct.unpack('>HHhH', fp.read(8))
hdrs.append((i,firstcode,entcount,delta,fp.tell()-2+offset))
for (i,firstcode,entcount,delta,pos) in hdrs:
if not entcount: continue
first = firstcode + (firstbytes[i] << 8)
fp.seek(pos)
for c in xrange(entcount):
gid = struct.unpack('>H', fp.read(2))
if gid:
gid += delta
char2gid[first+c] = gid
elif fmttype == 4:
(segcount, _1, _2, _3) = struct.unpack('>HHHH', fp.read(8))
segcount /= 2
ecs = struct.unpack('>%dH' % segcount, fp.read(2*segcount))
fp.read(2)
scs = struct.unpack('>%dH' % segcount, fp.read(2*segcount))
idds = struct.unpack('>%dh' % segcount, fp.read(2*segcount))
pos = fp.tell()
idrs = struct.unpack('>%dH' % segcount, fp.read(2*segcount))
for (ec,sc,idd,idr) in zip(ecs, scs, idds, idrs):
if idr:
fp.seek(pos+idr)
for c in xrange(sc, ec+1):
char2gid[c] = (struct.unpack('>H', fp.read(2))[0] + idd) & 0xffff
else:
for c in xrange(sc, ec+1):
char2gid[c] = (c + idd) & 0xffff
else:
assert 0
# create unicode map
unicode_map = FileUnicodeMap()
for (char,gid) in char2gid.iteritems():
unicode_map.add_cid2unichr(gid, char)
return unicode_map
## Fonts
##
class PDFFontError(PDFException): pass
class PDFUnicodeNotDefined(PDFFontError): pass
LITERAL_STANDARD_ENCODING = LIT('StandardEncoding')
LITERAL_TYPE1C = LIT('Type1C')
# PDFFont
class PDFFont(object):
def __init__(self, descriptor, widths, default_width=None):
self.descriptor = descriptor
self.widths = widths
self.fontname = resolve1(descriptor.get('FontName', 'unknown'))
if isinstance(self.fontname, PSLiteral):
self.fontname = literal_name(self.fontname)
self.flags = int_value(descriptor.get('Flags', 0))
self.ascent = num_value(descriptor.get('Ascent', 0))
self.descent = num_value(descriptor.get('Descent', 0))
self.italic_angle = num_value(descriptor.get('ItalicAngle', 0))
self.default_width = default_width or num_value(descriptor.get('MissingWidth', 0))
self.leading = num_value(descriptor.get('Leading', 0))
self.bbox = list_value(descriptor.get('FontBBox', (0,0,0,0)))
self.hscale = self.vscale = .001
return
def __repr__(self):
return '<PDFFont>'
def is_vertical(self):
return False
def is_multibyte(self):
return False
def decode(self, bytes):
return map(ord, bytes)
def get_ascent(self):
return self.ascent * self.vscale
def get_descent(self):
return self.descent * self.vscale
def get_width(self):
w = self.bbox[2]-self.bbox[0]
if w == 0:
w = -self.default_width
return w * self.hscale
def get_height(self):
h = self.bbox[3]-self.bbox[1]
if h == 0:
h = self.ascent - self.descent
return h * self.vscale
def char_width(self, cid):
try:
return self.widths[cid] * self.hscale
except KeyError:
try:
return self.widths[self.to_unichr(cid)] * self.hscale
except (KeyError, PDFUnicodeNotDefined):
return self.default_width * self.hscale
def char_disp(self, cid):
return 0
def string_width(self, s):
return sum( self.char_width(cid) for cid in self.decode(s) )
# PDFSimpleFont
class PDFSimpleFont(PDFFont):
def __init__(self, descriptor, widths, spec):
# Font encoding is specified either by a name of
# built-in encoding or a dictionary that describes
# the differences.
if 'Encoding' in spec:
encoding = resolve1(spec['Encoding'])
else:
encoding = LITERAL_STANDARD_ENCODING
if isinstance(encoding, dict):
name = literal_name(encoding.get('BaseEncoding', LITERAL_STANDARD_ENCODING))
diff = list_value(encoding.get('Differences', None))
self.cid2unicode = EncodingDB.get_encoding(name, diff)
else:
self.cid2unicode = EncodingDB.get_encoding(literal_name(encoding))
self.unicode_map = None
if 'ToUnicode' in spec:
strm = stream_value(spec['ToUnicode'])
self.unicode_map = FileUnicodeMap()
CMapParser(self.unicode_map, StringIO(strm.get_data())).run()
PDFFont.__init__(self, descriptor, widths)
return
def to_unichr(self, cid):
if self.unicode_map:
try:
return self.unicode_map.get_unichr(cid)
except KeyError:
pass
try:
return self.cid2unicode[cid]
except KeyError:
raise PDFUnicodeNotDefined(None, cid)
# PDFType1Font
class PDFType1Font(PDFSimpleFont):
def __init__(self, rsrcmgr, spec):
try:
self.basefont = literal_name(spec['BaseFont'])
except KeyError:
if STRICT:
raise PDFFontError('BaseFont is missing')
self.basefont = 'unknown'
try:
(descriptor, widths) = FontMetricsDB.get_metrics(self.basefont)
except KeyError:
descriptor = dict_value(spec.get('FontDescriptor', {}))
firstchar = int_value(spec.get('FirstChar', 0))
lastchar = int_value(spec.get('LastChar', 255))
widths = list_value(spec.get('Widths', [0]*256))
widths = dict( (i+firstchar,w) for (i,w) in enumerate(widths) )
PDFSimpleFont.__init__(self, descriptor, widths, spec)
if 'Encoding' not in spec and 'FontFile' in descriptor:
# try to recover the missing encoding info from the font file.
self.fontfile = stream_value(descriptor.get('FontFile'))
length1 = int_value(self.fontfile['Length1'])
data = self.fontfile.get_data()[:length1]
parser = Type1FontHeaderParser(StringIO(data))
self.cid2unicode = parser.get_encoding()
return
def __repr__(self):
return '<PDFType1Font: basefont=%r>' % self.basefont
# PDFTrueTypeFont
class PDFTrueTypeFont(PDFType1Font):
def __repr__(self):
return '<PDFTrueTypeFont: basefont=%r>' % self.basefont
# PDFType3Font
class PDFType3Font(PDFSimpleFont):
def __init__(self, rsrcmgr, spec):
firstchar = int_value(spec.get('FirstChar', 0))
lastchar = int_value(spec.get('LastChar', 0))
widths = list_value(spec.get('Widths', [0]*256))
widths = dict( (i+firstchar,w) for (i,w) in enumerate(widths))
if 'FontDescriptor' in spec:
descriptor = dict_value(spec['FontDescriptor'])
else:
descriptor = {'Ascent':0, 'Descent':0,
'FontBBox':spec['FontBBox']}
PDFSimpleFont.__init__(self, descriptor, widths, spec)
self.matrix = tuple(list_value(spec.get('FontMatrix')))
(_,self.descent,_,self.ascent) = self.bbox
(self.hscale,self.vscale) = apply_matrix_norm(self.matrix, (1,1))
return
def __repr__(self):
return '<PDFType3Font>'
# PDFCIDFont
class PDFCIDFont(PDFFont):
def __init__(self, rsrcmgr, spec):
try:
self.basefont = literal_name(spec['BaseFont'])
except KeyError:
if STRICT:
raise PDFFontError('BaseFont is missing')
self.basefont = 'unknown'
self.cidsysteminfo = dict_value(spec.get('CIDSystemInfo', {}))
self.cidcoding = '%s-%s' % (self.cidsysteminfo.get('Registry', 'unknown'),
self.cidsysteminfo.get('Ordering', 'unknown'))
try:
name = literal_name(spec['Encoding'])
except KeyError:
if STRICT:
raise PDFFontError('Encoding is unspecified')
name = 'unknown'
try:
self.cmap = CMapDB.get_cmap(name)
except CMapDB.CMapNotFound, e:
if STRICT:
raise PDFFontError(e)
self.cmap = CMap()
try:
descriptor = dict_value(spec['FontDescriptor'])
except KeyError:
if STRICT:
raise PDFFontError('FontDescriptor is missing')
descriptor = {}
ttf = None
if 'FontFile2' in descriptor:
self.fontfile = stream_value(descriptor.get('FontFile2'))
ttf = TrueTypeFont(self.basefont,
StringIO(self.fontfile.get_data()))
self.unicode_map = None
if 'ToUnicode' in spec:
strm = stream_value(spec['ToUnicode'])
self.unicode_map = FileUnicodeMap()
CMapParser(self.unicode_map, StringIO(strm.get_data())).run()
elif self.cidcoding == 'Adobe-Identity':
if ttf:
try:
self.unicode_map = ttf.create_unicode_map()
except TrueTypeFont.CMapNotFound:
pass
else:
try:
self.unicode_map = CMapDB.get_unicode_map(self.cidcoding, self.cmap.is_vertical())
except CMapDB.CMapNotFound, e:
pass
self.vertical = self.cmap.is_vertical()
if self.vertical:
# writing mode: vertical
widths = get_widths2(list_value(spec.get('W2', [])))
self.disps = dict( (cid,(vx,vy)) for (cid,(_,(vx,vy))) in widths.iteritems() )
(vy,w) = spec.get('DW2', [880, -1000])
self.default_disp = (None,vy)
widths = dict( (cid,w) for (cid,(w,_)) in widths.iteritems() )
default_width = w
else:
# writing mode: horizontal
self.disps = {}
self.default_disp = 0
widths = get_widths(list_value(spec.get('W', [])))
default_width = spec.get('DW', 1000)
PDFFont.__init__(self, descriptor, widths, default_width=default_width)
return
def __repr__(self):
return '<PDFCIDFont: basefont=%r, cidcoding=%r>' % (self.basefont, self.cidcoding)
def is_vertical(self):
return self.vertical
def is_multibyte(self):
return True
def decode(self, bytes):
return self.cmap.decode(bytes)
def char_disp(self, cid):
"Returns an integer for horizontal fonts, a tuple for vertical fonts."
return self.disps.get(cid, self.default_disp)
def to_unichr(self, cid):
try:
if not self.unicode_map: raise KeyError(cid)
return self.unicode_map.get_unichr(cid)
except KeyError:
raise PDFUnicodeNotDefined(self.cidcoding, cid)
# main
def main(argv):
for fname in argv[1:]:
fp = file(fname, 'rb')
#font = TrueTypeFont(fname, fp)
font = CFFFont(fname, fp)
print font
fp.close()
return
if __name__ == '__main__': sys.exit(main(sys.argv))
|
mavit/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/nxos/nxos_ping.py
|
37
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_ping
extends_documentation_fragment: nxos
version_added: "2.1"
short_description: Tests reachability using ping from Nexus switch.
description:
- Tests reachability using ping from switch to a remote destination.
- For a general purpose network module, see the M(net_ping) module.
- For Windows targets, use the M(win_ping) module instead.
- For targets running Python, use the M(ping) module instead.
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
options:
dest:
description:
- IP address or hostname (resolvable by switch) of remote node.
required: true
count:
description:
- Number of packets to send.
default: 5
source:
description:
- Source IP Address or hostname (resolvable by switch)
vrf:
description:
- Outgoing VRF.
state:
description:
- Determines if the expected result is success or fail.
choices: [ absent, present ]
default: present
notes:
- For a general purpose network module, see the M(net_ping) module.
- For Windows targets, use the M(win_ping) module instead.
- For targets running Python, use the M(ping) module instead.
'''
EXAMPLES = '''
- name: Test reachability to 8.8.8.8 using mgmt vrf
nxos_ping:
dest: 8.8.8.8
vrf: management
host: 68.170.147.165
- name: Test reachability to a few different public IPs using mgmt vrf
nxos_ping:
dest: nxos_ping
vrf: management
host: 68.170.147.165
with_items:
- 8.8.8.8
- 4.4.4.4
- 198.6.1.4
'''
RETURN = '''
commands:
description: Show the command sent
returned: always
type: list
sample: ["ping 8.8.8.8 count 2 vrf management"]
rtt:
description: Show RTT stats
returned: always
type: dict
sample: {"avg": 6.264, "max": 6.564, "min": 5.978}
packets_rx:
description: Packets successfully received
returned: always
type: int
sample: 2
packets_tx:
description: Packets successfully transmitted
returned: always
type: int
sample: 2
packet_loss:
description: Percentage of packets lost
returned: always
type: string
sample: "0.00%"
'''
from ansible.module_utils.network.nxos.nxos import run_commands
from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
def get_summary(results_list, reference_point):
summary_string = results_list[reference_point + 1]
summary_list = summary_string.split(',')
summary = dict(
packets_tx=int(summary_list[0].split('packets')[0].strip()),
packets_rx=int(summary_list[1].split('packets')[0].strip()),
packet_loss=summary_list[2].split('packet')[0].strip(),
)
if 'bytes from' not in results_list[reference_point - 2]:
ping_pass = False
else:
ping_pass = True
return summary, ping_pass
def get_rtt(results_list, packet_loss, location):
rtt = dict(min=None, avg=None, max=None)
if packet_loss != '100.00%':
rtt_string = results_list[location]
base = rtt_string.split('=')[1]
rtt_list = base.split('/')
rtt['min'] = float(rtt_list[0].lstrip())
rtt['avg'] = float(rtt_list[1])
rtt['max'] = float(rtt_list[2][:-3])
return rtt
def get_statistics_summary_line(response_as_list):
for each in response_as_list:
if '---' in each:
index = response_as_list.index(each)
return index
def get_ping_results(command, module):
cmd = {'command': command, 'output': 'text'}
ping = run_commands(module, [cmd])[0]
if not ping:
module.fail_json(msg="An unexpected error occurred. Check all params.",
command=command, destination=module.params['dest'],
vrf=module.params['vrf'],
source=module.params['source'])
elif "can't bind to address" in ping:
module.fail_json(msg="Can't bind to source address.", command=command)
elif "bad context" in ping:
module.fail_json(msg="Wrong VRF name inserted.", command=command,
vrf=module.params['vrf'])
else:
splitted_ping = ping.split('\n')
reference_point = get_statistics_summary_line(splitted_ping)
summary, ping_pass = get_summary(splitted_ping, reference_point)
rtt = get_rtt(splitted_ping, summary['packet_loss'], reference_point + 2)
return (summary, rtt, ping_pass)
def main():
argument_spec = dict(
dest=dict(required=True),
count=dict(required=False, default=5),
vrf=dict(required=False),
source=dict(required=False),
state=dict(required=False, choices=['present', 'absent'], default='present'),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
warnings = list()
check_args(module, warnings)
destination = module.params['dest']
count = module.params['count']
state = module.params['state']
ping_command = 'ping {0}'.format(destination)
for command in ['count', 'source', 'vrf']:
arg = module.params[command]
if arg:
ping_command += ' {0} {1}'.format(command, arg)
summary, rtt, ping_pass = get_ping_results(ping_command, module)
results = summary
results['rtt'] = rtt
results['commands'] = [ping_command]
if ping_pass and state == 'absent':
module.fail_json(msg="Ping succeeded unexpectedly")
elif not ping_pass and state == 'present':
module.fail_json(msg="Ping failed unexpectedly")
module.exit_json(**results)
if __name__ == '__main__':
main()
|
pongem/python-bot-project
|
refs/heads/master
|
storage/cloud-client/encryption_test.py
|
3
|
# Copyright 2016 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import tempfile
from google.cloud import storage
from google.cloud.storage import Blob
import pytest
import encryption
TEST_ENCRYPTION_KEY = 'brtJUWneL92g5q0N2gyDSnlPSYAiIVZ/cWgjyZNeMy0='
TEST_ENCRYPTION_KEY_DECODED = base64.b64decode(TEST_ENCRYPTION_KEY)
TEST_ENCRYPTION_KEY_2 = 'o4OD7SWCaPjfeEGhAY+YCgMdY9UW+OJ8mvfWD9lNtO4='
TEST_ENCRYPTION_KEY_2_DECODED = base64.b64decode(TEST_ENCRYPTION_KEY_2)
def test_generate_encryption_key(capsys):
encryption.generate_encryption_key()
out, _ = capsys.readouterr()
encoded_key = out.split(':', 1).pop().strip()
key = base64.b64decode(encoded_key)
assert len(key) == 32, 'Returned key should be 32 bytes'
def test_upload_encrypted_blob(cloud_config):
with tempfile.NamedTemporaryFile() as source_file:
source_file.write(b'test')
encryption.upload_encrypted_blob(
cloud_config.storage_bucket,
source_file.name,
'test_encrypted_upload_blob',
TEST_ENCRYPTION_KEY)
@pytest.fixture
def test_blob(cloud_config):
"""Provides a pre-existing blob in the test bucket."""
bucket = storage.Client().bucket(cloud_config.storage_bucket)
blob = Blob('encryption_test_sigil',
bucket, encryption_key=TEST_ENCRYPTION_KEY_DECODED)
content = 'Hello, is it me you\'re looking for?'
blob.upload_from_string(content)
return blob.name, content
def test_download_blob(test_blob, cloud_config):
test_blob_name, test_blob_content = test_blob
with tempfile.NamedTemporaryFile() as dest_file:
encryption.download_encrypted_blob(
cloud_config.storage_bucket,
test_blob_name,
dest_file.name,
TEST_ENCRYPTION_KEY)
downloaded_content = dest_file.read().decode('utf-8')
assert downloaded_content == test_blob_content
def test_rotate_encryption_key(test_blob, cloud_config):
test_blob_name, test_blob_content = test_blob
encryption.rotate_encryption_key(
cloud_config.storage_bucket,
test_blob_name,
TEST_ENCRYPTION_KEY,
TEST_ENCRYPTION_KEY_2)
with tempfile.NamedTemporaryFile() as dest_file:
encryption.download_encrypted_blob(
cloud_config.storage_bucket,
test_blob_name,
dest_file.name,
TEST_ENCRYPTION_KEY_2)
downloaded_content = dest_file.read().decode('utf-8')
assert downloaded_content == test_blob_content
|
cshallue/models
|
refs/heads/master
|
research/minigo/evaluation.py
|
2
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Evaluation of playing games between two neural nets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
import go
from gtp_wrapper import MCTSPlayer
import sgf_wrapper
def play_match(params, black_net, white_net, games, readouts,
sgf_dir, verbosity):
"""Plays matches between two neural nets.
One net that wins by a margin of 55% will be the winner.
Args:
params: An object of hyperparameters.
black_net: Instance of the DualNetRunner class to play as black.
white_net: Instance of the DualNetRunner class to play as white.
games: Number of games to play. We play all the games at the same time.
readouts: Number of readouts to perform for each step in each game.
sgf_dir: Directory to write the sgf results.
verbosity: Verbosity to show evaluation process.
Returns:
'B' is the winner is black_net, otherwise 'W'.
"""
# For n games, we create lists of n black and n white players
black = MCTSPlayer(
params.board_size, black_net, verbosity=verbosity, two_player_mode=True,
num_parallel=params.simultaneous_leaves)
white = MCTSPlayer(
params.board_size, white_net, verbosity=verbosity, two_player_mode=True,
num_parallel=params.simultaneous_leaves)
black_name = os.path.basename(black_net.save_file)
white_name = os.path.basename(white_net.save_file)
black_win_counts = 0
white_win_counts = 0
for i in range(games):
num_move = 0 # The move number of the current game
black.initialize_game()
white.initialize_game()
while True:
start = time.time()
active = white if num_move % 2 else black
inactive = black if num_move % 2 else white
current_readouts = active.root.N
while active.root.N < current_readouts + readouts:
active.tree_search()
# print some stats on the search
if verbosity >= 3:
print(active.root.position)
# First, check the roots for hopeless games.
if active.should_resign(): # Force resign
active.set_result(-active.root.position.to_play, was_resign=True)
inactive.set_result(
active.root.position.to_play, was_resign=True)
if active.is_done():
fname = '{:d}-{:s}-vs-{:s}-{:d}.sgf'.format(
int(time.time()), white_name, black_name, i)
with open(os.path.join(sgf_dir, fname), 'w') as f:
sgfstr = sgf_wrapper.make_sgf(
params.board_size, active.position.recent, active.result_string,
black_name=black_name, white_name=white_name)
f.write(sgfstr)
print('Finished game', i, active.result_string)
if active.result_string is not None:
if active.result_string[0] == 'B':
black_win_counts += 1
elif active.result_string[0] == 'W':
white_win_counts += 1
break
move = active.pick_move()
active.play_move(move)
inactive.play_move(move)
dur = time.time() - start
num_move += 1
if (verbosity > 1) or (verbosity == 1 and num_move % 10 == 9):
timeper = (dur / readouts) * 100.0
print(active.root.position)
print('{:d}: {:d} readouts, {:.3f} s/100. ({:.2f} sec)'.format(
num_move, readouts, timeper, dur))
if (black_win_counts - white_win_counts) > params.eval_win_rate * games:
return go.BLACK_NAME
else:
return go.WHITE_NAME
|
R3v1L/pyevo
|
refs/heads/master
|
pyevo/checksum.py
|
2
|
# -*- coding: utf-8 -*-
"""
PyEVO checksum utility functions
===============================================
.. module:: pyevo.checksum
:platform: Unix, Windows
:synopsis: Checksum utility functions
.. moduleauthor:: (C) 2013 Oliver Gutiérrez
"""
# Python imports
import string
def mod10(value):
"""
Luhn (mod10) algorithm verification
:param value: Value to be checked
:type value: Numeric string or integer
:returns: MOD10 checksum value
:rtype: single digit integer value 0-9
"""
num = map(int, str(value))
return sum(num[::-2] + [sum(divmod(d * 2, 10)) for d in num[-2::-2]]) % 10 == 0
def spanish_dni_letter(dni):
"""
Generates spanish DNI letter
:param dni: DNI numeric part
:type dni: Numeric string or integer
:returns: DNI letter
:rtype: Single letter string
"""
letters='TRWAGMYFPDXBNJZSQVHLCKE'
return letters[int(dni[:8])%23]
def spanish_nie_letter(nie):
"""
Generates spanish NIE letter
:param nie: NIE numeric part
:type nie: Numeric string or integer
:returns: NIE letter
:rtype: Single letter string
"""
letters='XYZ'
return spanish_dni_letter(str(letters.index(nie[0]))+nie[1:])
def spanish_bank_account_control_digits(entity,office,account):
"""
Calculate spanish bank account control digits
:param entity: Four digit bank account entity code
:type entity: Numeric string or integer
:param office: Four digit bank account entity code
:type office: Numeric string or integer
:param account: Ten digit bank account entity code
:type account: Numeric string or integer
:returns: Spanish bank account control digits
:rtype: Two digits string
"""
entity=int(entity)
office=int(office)
account=int(account)
dcs=''
mults=(10,9,7,3,6,1,2,4,8,5)
numbers=[
(entity+office).zfill(10),
account.zfill(10)
]
for num in numbers:
accum=0
for pos in range(10):
accum+=int(num[pos])*mults[pos]
mod=accum%11
if mod==10:
dcs+='1'
else:
dcs+=str(mod)
return dcs
def calculate_iban(countrycode,accountnum,printing=True):
"""
IBAN account number calculation
:param countrycode: Country code for the account
:type countrycode: String
:param accountnum: Account number
:type accountnum: String
:param printing: Return IBAN in printing format
:type printing: True or False
:returns: IBAN account number
:rtype: String
"""
countryval=''
for digit in countrycode.upper():
countryval+=str(string.ascii_uppercase.index(digit)+10)
countryval+='00'
dc=str(98-(int(accountnum+countryval)%97))
iban=countrycode.upper()+dc+accountnum
if printing:
piban=''
i=0
j=4
block=iban[i:j]
while block:
piban+=block+' '
i+=4
j+=4
block=iban[i:j]
return piban.strip()
else:
return iban
|
marcore/edx-platform
|
refs/heads/master
|
lms/djangoapps/commerce/migrations/0004_auto_20160531_0950.py
|
50
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('commerce', '0003_auto_20160329_0709'),
]
operations = [
migrations.AddField(
model_name='commerceconfiguration',
name='cache_ttl',
field=models.PositiveIntegerField(default=0, help_text='Specified in seconds. Enable caching by setting this to a value greater than 0.', verbose_name='Cache Time To Live'),
),
migrations.AddField(
model_name='commerceconfiguration',
name='receipt_page',
field=models.CharField(default=b'/commerce/checkout/receipt/?orderNum=', help_text='Path to order receipt page.', max_length=255),
),
]
|
loco-odoo/localizacion_co
|
refs/heads/master
|
openerp/addons/warning/__openerp__.py
|
261
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Warning Messages and Alerts',
'version': '1.0',
'category': 'Tools',
'description': """
Module to trigger warnings in OpenERP objects.
==============================================
Warning messages can be displayed for objects like sale order, purchase order,
picking and invoice. The message is triggered by the form's onchange event.
""",
'author': 'OpenERP SA',
'depends': ['base', 'sale_stock', 'purchase'],
'data': ['warning_view.xml'],
'demo': [],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
gw0/myhdl
|
refs/heads/master
|
myhdl/test/bugs/test_issue_122.py
|
2
|
from __future__ import absolute_import
from myhdl import *
from myhdl.conversion import verify
def issue_122(dout, i):
d = i*10+1
@instance
def write():
# dout[i].next = int(i)
dout[i].next = i
yield delay(d)
print(int(dout[i]))
if i == 0:
return write
else:
inst = issue_122(dout, i-1)
return write, inst
def tb_issue_122():
n = 7
dout = [Signal(intbv(0, min=0, max=n+1)) for i in range(n+1)]
inst = issue_122(dout, n)
return inst
def test_issue_122():
assert verify(tb_issue_122) == 0
|
stonegithubs/micropython
|
refs/heads/master
|
tests/basics/bytearray_add.py
|
22
|
# test bytearray + bytearray
b = bytearray(2)
b[0] = 1
b[1] = 2
print(b + bytearray(2))
# inplace add
b += bytearray(3)
print(b)
# extend
b.extend(bytearray(4))
print(b)
|
quickresolve/accel.ai
|
refs/heads/master
|
flask-aws/lib/python2.7/site-packages/ebcli/objects/tier.py
|
5
|
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from ..objects.exceptions import NotFoundError
import re
class Tier():
def __init__(self, name, typ, version):
self.name = name
self.type = typ
self.version = version.strip()
self.string = self.__str__()
def to_struct(self):
strct = {
'Name': self.name,
'Type': self.type,
}
if self.version:
strct['Version'] = self.version
return strct
def __str__(self):
s = self.name + '-' + self.type
if self.version:
s += '-' + self.version
return s
def __eq__(self, other):
if not isinstance(other, Tier):
return False
return self.string.lower() == other.string.lower()
@staticmethod
def get_all_tiers():
lst = [
Tier('WebServer', 'Standard', '1.0'),
Tier('Worker', 'SQS/HTTP', '1.0'),
Tier('Worker', 'SQS/HTTP', '1.1'),
Tier('Worker', 'SQS/HTTP', ''),
]
return lst
@staticmethod
def parse_tier(string):
if string.lower() == 'web' or string.lower() == 'webserver':
return Tier('WebServer', 'Standard', '1.0')
if string.lower() == 'worker':
return Tier('Worker', 'SQS/HTTP', '')
params = string.split('-')
if len(params) == 3:
name, typ, version = string.split('-')
elif len(params) == 2:
name, typ = string.split('-')
if re.match('\d+[.]\d+', typ):
version = typ
else:
version = ''
else:
raise NotFoundError('Tier Not found')
# we want to return the Proper, uppercase version
if name.lower() == 'webserver' or name.lower() == 'web':
return Tier('WebServer', 'Standard', version)
elif name.lower() == 'worker':
return Tier('Worker', 'SQS/HTTP', version)
# tier not found
raise NotFoundError('Tier Not found')
|
slorg1/heroku-buildpack-python
|
refs/heads/master
|
vendor/pip-pop/pip/_vendor/requests/packages/chardet/big5prober.py
|
2930
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import Big5DistributionAnalysis
from .mbcssm import Big5SMModel
class Big5Prober(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(Big5SMModel)
self._mDistributionAnalyzer = Big5DistributionAnalysis()
self.reset()
def get_charset_name(self):
return "Big5"
|
RobinD42/pyside
|
refs/heads/master
|
tests/QtDeclarative/bug_997.py
|
6
|
from PySide import QtCore, QtDeclarative
import unittest
from helper import adjust_filename, UsesQApplication
class TestBug(UsesQApplication):
def testQMLFunctionCall(self):
ownerData = QtDeclarative.QDeclarativePropertyMap()
ownerData.insert('name', 'John Smith')
ownerData.insert('phone', '555-5555')
ownerData.insert('newValue', '')
view = QtDeclarative.QDeclarativeView()
ctxt = view.rootContext()
ctxt.setContextProperty('owner', ownerData)
view.setSource(QtCore.QUrl.fromLocalFile(adjust_filename('bug_997.qml', __file__)))
view.show()
QtCore.QTimer.singleShot(1000, self.app.quit)
self.app.exec_()
self.assertEqual(ownerData.value('newName'), ownerData.value('name'))
if __name__ == '__main__':
unittest.main()
|
joone/chromium-crosswalk
|
refs/heads/2016.04.css-round-display-edtior-draft-1
|
tools/telemetry/third_party/pyfakefs/pyfakefs/fake_filesystem_unittest_test.py
|
21
|
#! /usr/bin/env python
#
# Copyright 2014 Altera Corporation. All Rights Reserved.
# Author: John McGehee
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test the :py:class`pyfakefs.fake_filesystem_unittest.TestCase` base class.
"""
import os
import glob
import shutil
import tempfile
import sys
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
import fake_filesystem_unittest
import pytest
class TestPyfakefsUnittest(fake_filesystem_unittest.TestCase): # pylint: disable=R0904
'''Test the pyfakefs.fake_filesystem_unittest.TestCase` base class.'''
def setUp(self):
'''Set up the fake file system'''
self.setUpPyfakefs()
def tearDown(self):
'''Tear down the fake file system'''
self.tearDownPyfakefs()
@unittest.skipIf(sys.version_info > (2,), "file() was removed in Python 3")
def test_file(self):
'''Fake `file()` function is bound'''
self.assertFalse(os.path.exists('/fake_file.txt'))
with file('/fake_file.txt', 'w') as f:
f.write("This test file was created using the file() function.\n")
self.assertTrue(self.fs.Exists('/fake_file.txt'))
with file('/fake_file.txt') as f:
content = f.read()
self.assertEqual(content,
'This test file was created using the file() function.\n')
def test_open(self):
'''Fake `open()` function is bound'''
self.assertFalse(os.path.exists('/fake_file.txt'))
with open('/fake_file.txt', 'w') as f:
f.write("This test file was created using the open() function.\n")
self.assertTrue(self.fs.Exists('/fake_file.txt'))
with open('/fake_file.txt') as f:
content = f.read()
self.assertEqual(content,
'This test file was created using the open() function.\n')
def test_os(self):
'''Fake os module is bound'''
self.assertFalse(self.fs.Exists('/test/dir1/dir2'))
os.makedirs('/test/dir1/dir2')
self.assertTrue(self.fs.Exists('/test/dir1/dir2'))
def test_glob(self):
'''Fake glob module is bound'''
self.assertCountEqual(glob.glob('/test/dir1/dir*'),
[])
self.fs.CreateDirectory('/test/dir1/dir2a')
self.assertCountEqual(glob.glob('/test/dir1/dir*'),
['/test/dir1/dir2a'])
self.fs.CreateDirectory('/test/dir1/dir2b')
self.assertCountEqual(glob.glob('/test/dir1/dir*'),
['/test/dir1/dir2a', '/test/dir1/dir2b'])
def test_shutil(self):
'''Fake shutil module is bound'''
self.fs.CreateDirectory('/test/dir1/dir2a')
self.fs.CreateDirectory('/test/dir1/dir2b')
self.assertTrue(self.fs.Exists('/test/dir1/dir2b'))
self.assertTrue(self.fs.Exists('/test/dir1/dir2a'))
shutil.rmtree('/test/dir1')
self.assertFalse(self.fs.Exists('/test/dir1'))
def test_tempfile(self):
'''Fake tempfile module is bound'''
with tempfile.NamedTemporaryFile() as tf:
tf.write(b'Temporary file contents\n')
name = tf.name
self.assertTrue(self.fs.Exists(tf.name))
def test_pytest(self):
'''Compatibility with the :py:module:`pytest` module.'''
pass
if __name__ == "__main__":
unittest.main()
|
NaturalGIS/naturalgis_qgis
|
refs/heads/master
|
tests/src/python/test_qgsbinarywidget.py
|
45
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for binary editor widgets.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '11/11/2018'
__copyright__ = 'Copyright 2018, The QGIS Project'
import qgis # NOQA
from qgis.PyQt.QtCore import QByteArray
from qgis.core import QgsFeature, QgsGeometry, QgsPointXY, QgsVectorLayer, NULL
from qgis.gui import QgsGui
from qgis.testing import start_app, unittest
start_app()
class TestQgsBinaryWidget(unittest.TestCase):
@classmethod
def setUpClass(cls):
QgsGui.editorWidgetRegistry().initEditors()
def setUp(self):
"""
create a layer with one feature
"""
self.layer = QgsVectorLayer("Point?crs=EPSG:21781&field=fldint:integer&field=fldbin:binary",
"addfeat", "memory")
self.assertTrue(self.layer.isValid())
f = QgsFeature()
bin_1 = b'xxx'
bin_val1 = QByteArray(bin_1)
f.setAttributes([123, bin_val1])
f.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(600000, 200000)))
def __createBinaryWidget(self):
"""
create a binary widget
"""
reg = QgsGui.editorWidgetRegistry()
configWdg = reg.createConfigWidget('Binary', self.layer, 1, None)
config = configWdg.config()
binary_widget = reg.create('Binary', self.layer, 1, config, None, None)
return binary_widget
def testValue(self):
widget = self.__createBinaryWidget()
self.assertTrue(widget.widget())
self.assertFalse(widget.value())
bin_2 = b'yyy'
bin_val2 = QByteArray(bin_2)
widget.setValue(bin_val2)
self.assertEqual(widget.value(), bin_val2)
widget.setValue(NULL)
self.assertEqual(widget.value(), QByteArray())
if __name__ == '__main__':
unittest.main()
|
Varentsov/servo
|
refs/heads/master
|
tests/wpt/web-platform-tests/websockets/handlers/wrong_accept_key_wsh.py
|
242
|
#!/usr/bin/python
import sys, urllib, time
from mod_pywebsocket import common, msgutil, util
def web_socket_do_extra_handshake(request):
request.connection.write('HTTP/1.1 101 Switching Protocols:\x0D\x0AConnection: Upgrade\x0D\x0AUpgrade: WebSocket\x0D\x0ASec-WebSocket-Origin: '+request.ws_origin+'\x0D\x0ASec-WebSocket-Accept: thisisawrongacceptkey\x0D\x0A\x0D\x0A')
return
def web_socket_transfer_data(request):
while True:
request.ws_stream.send_message('test', binary=False)
return
|
simone/web-scuola-janua
|
refs/heads/master
|
mezzajanua/settings.py
|
1
|
from __future__ import absolute_import, unicode_literals
import os
from django import VERSION as DJANGO_VERSION
from django.utils.translation import ugettext_lazy as _
######################
# MEZZANINE SETTINGS #
######################
# The following settings are already defined with default values in
# the ``defaults.py`` module within each of Mezzanine's apps, but are
# common enough to be put here, commented out, for conveniently
# overriding. Please consult the settings documentation for a full list
# of settings Mezzanine implements:
# http://mezzanine.jupo.org/docs/configuration.html#default-settings
# Controls the ordering and grouping of the admin menu.
#
# ADMIN_MENU_ORDER = (
# ("Content", ("pages.Page", "blog.BlogPost",
# "generic.ThreadedComment", (_("Media Library"), "media-library"),)),
# ("Site", ("sites.Site", "redirects.Redirect", "conf.Setting")),
# ("Users", ("auth.User", "auth.Group",)),
# )
# A three item sequence, each containing a sequence of template tags
# used to render the admin dashboard.
#
# DASHBOARD_TAGS = (
# ("blog_tags.quick_blog", "mezzanine_tags.app_list"),
# ("comment_tags.recent_comments",),
# ("mezzanine_tags.recent_actions",),
# )
# A sequence of templates used by the ``page_menu`` template tag. Each
# item in the sequence is a three item sequence, containing a unique ID
# for the template, a label for the template, and the template path.
# These templates are then available for selection when editing which
# menus a page should appear in. Note that if a menu template is used
# that doesn't appear in this setting, all pages will appear in it.
PAGE_MENU_TEMPLATES = (
#(0, _("Top navigation bar"), "pages/menus/topbar.html"),
(1, _("Main Menu"), "pages/menus/dropdown.html"),
(2, _("Slides"), "pages/menus/slides.html"),
(3, _("Banner Menu"), "pages/menus/banner.html"),
#(4, _("Footer"), "pages/menus/footer.html"),
(5, _("Static"), "pages/menus/static.html"),
)
# A sequence of fields that will be injected into Mezzanine's (or any
# library's) models. Each item in the sequence is a four item sequence.
# The first two items are the dotted path to the model and its field
# name to be added, and the dotted path to the field class to use for
# the field. The third and fourth items are a sequence of positional
# args and a dictionary of keyword args, to use when creating the
# field instance. When specifying the field class, the path
# ``django.models.db.`` can be omitted for regular Django model fields.
#
# EXTRA_MODEL_FIELDS = (
# (
# # Dotted path to field.
# "mezzanine.blog.models.BlogPost.image",
# # Dotted path to field class.
# "somelib.fields.ImageField",
# # Positional args for field class.
# (_("Image"),),
# # Keyword args for field class.
# {"blank": True, "upload_to": "blog"},
# ),
# # Example of adding a field to *all* of Mezzanine's content types:
# (
# "mezzanine.pages.models.Page.another_field",
# "IntegerField", # 'django.db.models.' is implied if path is omitted.
# (_("Another name"),),
# {"blank": True, "default": 1},
# ),
# )
# Setting to turn on featured images for blog posts. Defaults to False.
#
# BLOG_USE_FEATURED_IMAGE = True
# If True, the django-modeltranslation will be added to the
# INSTALLED_APPS setting.
USE_MODELTRANSLATION = True
########################
# MAIN DJANGO SETTINGS #
########################
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Rome'
#EVENT_TIME_ZONE = 'Europe/Rome'
#EVENT_SLUG="events"
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = True
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = "it"
MODELTRANSLATION_DEFAULT_LANGUAGE = 'it'
# Supported languages
LANGUAGES = (
('it', _('Italian')),
('en', _('English')),
('es', _('Spanish')),
('fr', _('French')),
('de', _('German')),
)
# A boolean that turns on/off debug mode. When set to ``True``, stack traces
# are displayed for error pages. Should always be set to ``False`` in
# production. Best set to ``True`` in local_settings.py
DEBUG = False
# Whether a user's session cookie expires when the Web browser is closed.
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
AUTHENTICATION_BACKENDS = ("mezzanine.core.auth_backends.MezzanineBackend",)
# The numeric mode to set newly-uploaded files to. The value should be
# a mode you'd pass directly to os.chmod.
FILE_UPLOAD_PERMISSIONS = 0o644
FILE_UPLOAD_MAX_MEMORY_SIZE = 10485760
FILEBROWSER_MAX_UPLOAD_SIZE = 10485760
#############
# DATABASES #
#############
DATABASES = {
"default": {
# Add "postgresql_psycopg2", "mysql", "sqlite3" or "oracle".
"ENGINE": "django.db.backends.",
# DB name or path to database file if using sqlite3.
"NAME": "",
# Not used with sqlite3.
"USER": "",
# Not used with sqlite3.
"PASSWORD": "",
# Set to empty string for localhost. Not used with sqlite3.
"HOST": "",
# Set to empty string for default. Not used with sqlite3.
"PORT": "",
}
}
#########
# PATHS #
#########
# Full filesystem path to the project.
PROJECT_APP_PATH = os.path.dirname(os.path.abspath(__file__))
PROJECT_APP = os.path.basename(PROJECT_APP_PATH)
PROJECT_ROOT = BASE_DIR = os.path.dirname(PROJECT_APP_PATH)
# Every cache key will get prefixed with this value - here we set it to
# the name of the directory the project is in to try and use something
# project specific.
CACHE_MIDDLEWARE_KEY_PREFIX = PROJECT_APP
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = "/static/"
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, STATIC_URL.strip("/"))
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = STATIC_URL + "media/"
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, *MEDIA_URL.strip("/").split("/"))
# Package/module name to import the root urlpatterns from for the project.
ROOT_URLCONF = "%s.urls" % PROJECT_APP
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [
os.path.join(PROJECT_ROOT, "templates")
],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.static",
"django.template.context_processors.media",
"django.template.context_processors.request",
"django.template.context_processors.tz",
"mezzanine.conf.context_processors.settings",
"mezzanine.pages.context_processors.page",
],
"builtins": [
"mezzanine.template.loader_tags",
],
},
},
]
if DJANGO_VERSION < (1, 9):
del TEMPLATES[0]["OPTIONS"]["builtins"]
#TEMPLATE_ACCESSIBLE_SETTINGS
################
# APPLICATIONS #
################
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.redirects",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.sitemaps",
"django.contrib.staticfiles",
"grafica",
"mezzanine.boot",
"mezzanine.conf",
"mezzanine.core",
"mezzanine.generic",
"mezzanine.pages",
"mezzanine.blog",
"mezzanine.forms",
"mezzanine.galleries",
"mezzanine.twitter",
"widget_tweaks", # x bootstrap forms
"post_office", # send email async - require uwsgi cron
#"mezzanine.accounts",
#"mezzanine.mobile",
# -e git+git@github.com:yomguy/mezzanine-agenda.git@de0e4d9103be73c8865919cf47960d9bd505fc5b#egg=mezzanine_agenda
# "mezzanine_agenda"
)
# List of middleware classes to use. Order is important; in the request phase,
# these middleware classes will be applied in the order given, and in the
# response phase the middleware will be applied in reverse order.
MIDDLEWARE_CLASSES = (
"mezzanine.core.middleware.UpdateCacheMiddleware",
'django.contrib.sessions.middleware.SessionMiddleware',
# Uncomment if using internationalisation or localisation
# 'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
"mezzanine.core.request.CurrentRequestMiddleware",
"mezzanine.core.middleware.RedirectFallbackMiddleware",
"mezzanine.core.middleware.TemplateForDeviceMiddleware",
"mezzanine.core.middleware.TemplateForHostMiddleware",
"mezzanine.core.middleware.AdminLoginInterfaceSelectorMiddleware",
"mezzanine.core.middleware.SitePermissionMiddleware",
# Uncomment the following if using any of the SSL settings:
# "mezzanine.core.middleware.SSLRedirectMiddleware",
"mezzanine.pages.middleware.PageMiddleware",
"mezzanine.core.middleware.FetchFromCacheMiddleware",
)
# Store these package names here as they may change in the future since
# at the moment we are using custom forks of them.
PACKAGE_NAME_FILEBROWSER = "filebrowser_safe"
PACKAGE_NAME_GRAPPELLI = "grappelli_safe"
#########################
# OPTIONAL APPLICATIONS #
#########################
# These will be added to ``INSTALLED_APPS``, only if available.
OPTIONAL_APPS = (
"debug_toolbar",
"django_extensions",
"compressor",
PACKAGE_NAME_FILEBROWSER,
PACKAGE_NAME_GRAPPELLI,
)
RICHTEXT_FILTER_LEVEL = 2
EMAIL_BACKEND = 'post_office.EmailBackend'
##################
# LOCAL SETTINGS #
##################
# Allow any settings to be defined in local_settings.py which should be
# ignored in your version control system allowing for settings to be
# defined per machine.
# Instead of doing "from .local_settings import *", we use exec so that
# local_settings has full access to everything defined in this module.
# Also force into sys.modules so it's visible to Django's autoreload.
f = os.path.join(PROJECT_APP_PATH, "local_settings.py")
if os.path.exists(f):
import sys
import imp
module_name = "%s.local_settings" % PROJECT_APP
module = imp.new_module(module_name)
module.__file__ = f
sys.modules[module_name] = module
exec(open(f, "rb").read())
SITE_NAME = SITE_TITLE = "Scuola Janua"
SITE_URL = "http://scuola.janua.it"
#UPLOAD_DIRECTORY = os.path.join(MEDIA_ROOT, 'uploads')
EXTRA_MODEL_FIELDS = (
("mezzanine.pages.models.Page.subtitle", "CharField", (_("Subtitle"),), {"blank": True, "max_length": 400}),
# ("mezzanine.pages.models.Page.banner", "ImageField", (_("Banner"),), {"blank": True, "upload_to": UPLOAD_DIRECTORY})
)
MIGRATION_MODULES = {
"blog": "grafica.migrations.blog_migration",
"conf": "grafica.migrations.conf_migration",
"forms": "grafica.migrations.forms_migration",
"galleries": "grafica.migrations.galleries_migration",
"pages": "grafica.migrations.pages_migration",
}
####################
# DYNAMIC SETTINGS #
####################
# set_dynamic_settings() will rewrite globals based on what has been
# defined so far, in order to provide some better defaults where
# applicable. We also allow this settings module to be imported
# without Mezzanine installed, as the case may be when using the
# fabfile, where setting the dynamic settings below isn't strictly
# required.
try:
from mezzanine.utils.conf import set_dynamic_settings
except ImportError:
pass
else:
set_dynamic_settings(globals())
|
dagwieers/ansible
|
refs/heads/devel
|
lib/ansible/modules/cloud/cloudstack/cs_vpc_offering.py
|
25
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017, David Passante (@dpassante)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_vpc_offering
short_description: Manages vpc offerings on Apache CloudStack based clouds.
description:
- Create, update, enable, disable and remove CloudStack VPC offerings.
version_added: '2.5'
author: David Passante (@dpassante)
options:
name:
description:
- The name of the vpc offering
type: str
required: true
state:
description:
- State of the vpc offering.
type: str
choices: [ enabled, present, disabled, absent ]
default: present
display_text:
description:
- Display text of the vpc offerings
type: str
service_capabilities:
description:
- Desired service capabilities as part of vpc offering.
type: list
aliases: [ service_capability ]
service_offering:
description:
- The name or ID of the service offering for the VPC router appliance.
type: str
supported_services:
description:
- Services supported by the vpc offering
type: list
aliases: [ supported_service ]
service_providers:
description:
- provider to service mapping. If not specified, the provider for the service will be mapped to the default provider on the physical network
type: list
aliases: [ service_provider ]
poll_async:
description:
- Poll async jobs until job has finished.
default: yes
type: bool
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: Create a vpc offering and enable it
cs_vpc_offering:
name: my_vpc_offering
display_text: vpc offering description
state: enabled
supported_services: [ Dns, Dhcp ]
service_providers:
- {service: 'dns', provider: 'VpcVirtualRouter'}
- {service: 'dhcp', provider: 'VpcVirtualRouter'}
delegate_to: localhost
- name: Create a vpc offering with redundant router
cs_vpc_offering:
name: my_vpc_offering
display_text: vpc offering description
supported_services: [ Dns, Dhcp, SourceNat ]
service_providers:
- {service: 'dns', provider: 'VpcVirtualRouter'}
- {service: 'dhcp', provider: 'VpcVirtualRouter'}
- {service: 'SourceNat', provider: 'VpcVirtualRouter'}
service_capabilities:
- {service: 'SourceNat', capabilitytype: 'RedundantRouter', capabilityvalue: true}
delegate_to: localhost
- name: Create a region level vpc offering with distributed router
cs_vpc_offering:
name: my_vpc_offering
display_text: vpc offering description
state: present
supported_services: [ Dns, Dhcp, SourceNat ]
service_providers:
- {service: 'dns', provider: 'VpcVirtualRouter'}
- {service: 'dhcp', provider: 'VpcVirtualRouter'}
- {service: 'SourceNat', provider: 'VpcVirtualRouter'}
service_capabilities:
- {service: 'Connectivity', capabilitytype: 'DistributedRouter', capabilityvalue: true}
- {service: 'Connectivity', capabilitytype: 'RegionLevelVPC', capabilityvalue: true}
delegate_to: localhost
- name: Remove a vpc offering
cs_vpc_offering:
name: my_vpc_offering
state: absent
delegate_to: localhost
'''
RETURN = '''
---
id:
description: UUID of the vpc offering.
returned: success
type: str
sample: a6f7a5fc-43f8-11e5-a151-feff819cdc9f
name:
description: The name of the vpc offering
returned: success
type: str
sample: MyCustomVPCOffering
display_text:
description: The display text of the vpc offering
returned: success
type: str
sample: My vpc offering
state:
description: The state of the vpc offering
returned: success
type: str
sample: Enabled
service_offering_id:
description: The service offering ID.
returned: success
type: str
sample: c5f7a5fc-43f8-11e5-a151-feff819cdc9f
is_default:
description: Whether VPC offering is the default offering or not.
returned: success
type: bool
sample: false
region_level:
description: Indicated if the offering can support region level vpc.
returned: success
type: bool
sample: false
distributed:
description: Indicates if the vpc offering supports distributed router for one-hop forwarding.
returned: success
type: bool
sample: false
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together,
)
class AnsibleCloudStackVPCOffering(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackVPCOffering, self).__init__(module)
self.returns = {
'serviceofferingid': 'service_offering_id',
'isdefault': 'is_default',
'distributedvpcrouter': 'distributed',
'supportsregionLevelvpc': 'region_level',
}
self.vpc_offering = None
def get_vpc_offering(self):
if self.vpc_offering:
return self.vpc_offering
args = {
'name': self.module.params.get('name'),
}
vo = self.query_api('listVPCOfferings', **args)
if vo:
for vpc_offer in vo['vpcoffering']:
if args['name'] == vpc_offer['name']:
self.vpc_offering = vpc_offer
return self.vpc_offering
def get_service_offering_id(self):
service_offering = self.module.params.get('service_offering')
if not service_offering:
return None
args = {
'issystem': True
}
service_offerings = self.query_api('listServiceOfferings', **args)
if service_offerings:
for s in service_offerings['serviceoffering']:
if service_offering in [s['name'], s['id']]:
return s['id']
self.fail_json(msg="Service offering '%s' not found" % service_offering)
def create_or_update(self):
vpc_offering = self.get_vpc_offering()
if not vpc_offering:
vpc_offering = self.create_vpc_offering()
return self.update_vpc_offering(vpc_offering)
def create_vpc_offering(self):
vpc_offering = None
self.result['changed'] = True
args = {
'name': self.module.params.get('name'),
'state': self.module.params.get('state'),
'displaytext': self.module.params.get('display_text'),
'supportedservices': self.module.params.get('supported_services'),
'serviceproviderlist': self.module.params.get('service_providers'),
'serviceofferingid': self.get_service_offering_id(),
'servicecapabilitylist': self.module.params.get('service_capabilities'),
}
required_params = [
'display_text',
'supported_services',
]
self.module.fail_on_missing_params(required_params=required_params)
if not self.module.check_mode:
res = self.query_api('createVPCOffering', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
vpc_offering = self.poll_job(res, 'vpcoffering')
return vpc_offering
def delete_vpc_offering(self):
vpc_offering = self.get_vpc_offering()
if vpc_offering:
self.result['changed'] = True
args = {
'id': vpc_offering['id'],
}
if not self.module.check_mode:
res = self.query_api('deleteVPCOffering', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
vpc_offering = self.poll_job(res, 'vpcoffering')
return vpc_offering
def update_vpc_offering(self, vpc_offering):
if not vpc_offering:
return vpc_offering
args = {
'id': vpc_offering['id'],
'state': self.module.params.get('state'),
'name': self.module.params.get('name'),
'displaytext': self.module.params.get('display_text'),
}
if args['state'] in ['enabled', 'disabled']:
args['state'] = args['state'].title()
else:
del args['state']
if self.has_changed(args, vpc_offering):
self.result['changed'] = True
if not self.module.check_mode:
res = self.query_api('updateVPCOffering', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
vpc_offering = self.poll_job(res, 'vpcoffering')
return vpc_offering
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
display_text=dict(),
state=dict(choices=['enabled', 'present', 'disabled', 'absent'], default='present'),
service_capabilities=dict(type='list', aliases=['service_capability']),
service_offering=dict(),
supported_services=dict(type='list', aliases=['supported_service']),
service_providers=dict(type='list', aliases=['service_provider']),
poll_async=dict(type='bool', default=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
acs_vpc_offering = AnsibleCloudStackVPCOffering(module)
state = module.params.get('state')
if state in ['absent']:
vpc_offering = acs_vpc_offering.delete_vpc_offering()
else:
vpc_offering = acs_vpc_offering.create_or_update()
result = acs_vpc_offering.get_result(vpc_offering)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
liyichao/spark
|
refs/heads/master
|
sql/hive/src/test/resources/data/scripts/cat.py
|
105
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import print_function
import sys
import os
table_name = None
if os.environ in 'hive_streaming_tablename':
table_name = os.environ['hive_streaming_tablename']
for line in sys.stdin:
print(line)
print("dummy", file=sys.stderr)
|
jmcarp/django
|
refs/heads/master
|
tests/migrations/test_migrations_no_default/0001_initial.py
|
381
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='SillyModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('silly_field', models.BooleanField(default=False)),
],
options={
},
bases=(models.Model,),
),
]
|
belvedere-trading/ChronosES
|
refs/heads/master
|
Chronos/tests/unit/test_Core.py
|
2
|
#pylint: skip-file
import mock
import unittest
import sys
from nose_parameterized import parameterized
class CoreBaseClassTest(unittest.TestCase):
def setUp(self):
googleMock = mock.MagicMock()
protobufMock = googleMock.protobuf
protobufMock.message = mock.MagicMock()
protobufMock.message.Message = mock.MagicMock
self.patcher = mock.patch.dict('sys.modules',
{'Chronos.EventLogger': mock.MagicMock(),
'Chronos.Map': mock.MagicMock(),
'Chronos.Chronos_pb2': mock.MagicMock(),
'google': googleMock,
'google.protobuf': protobufMock,
'ordereddict': mock.MagicMock()})
self.patcher.start()
global Aggregate, Event, MockProto, ChronosSemanticException, ChronosCoreException
mockDescriptor = mock.MagicMock()
fieldMock1 = mock.MagicMock()
fieldMock1.name = 'indexTest1'
fieldMock2 = mock.MagicMock()
fieldMock2.name = 'indexTest2'
mockDescriptor.fields = [fieldMock1, fieldMock2]
class MockProto(mock.MagicMock):
DESCRIPTOR = mockDescriptor
indexTest1 = 'test1'
indexTest2 = 'test2'
SerializeToString = mock.MagicMock()
ParseFromString = mock.MagicMock()
MockProto.SerializeToString.return_value = 'protostring'
MockProto.ParseFromString.return_value = MockProto()
from Chronos.Core import Aggregate, Event, ChronosSemanticException, ChronosCoreException
def tearDown(self):
self.patcher.stop()
def test_MultipleAggregateCreation(self):
try:
class TestAggregate(Aggregate):
Proto = MockProto
class AnotherAggregate(Aggregate):
Proto = MockProto
except ChronosSemanticException:
pass
else:
self.fail('Expected ChronosSemanticException')
def test_AggregateWithNoProto(self):
try:
class TestAggregate(Aggregate):
pass
except ChronosSemanticException:
pass
else:
self.fail('Expected ChronosSemanticException')
def test_AggregateWithBadProto(self):
try:
class TestAggregate(Aggregate):
Proto = object
except ChronosSemanticException:
pass
else:
self.fail('Expected ChronosSemanticException')
def test_AggregateGetIndices(self):
class TestAggregate(Aggregate):
Proto = MockProto
aggregate = TestAggregate(1, 1)
aggregate.IndexedAttributes = set(['a','b','c'])
aggregate.NoCaseAttributes = set()
aggregate.a, aggregate.b, aggregate.c = 1, 2, 3
self.assertEqual(aggregate.GetIndices(), {'a':'1', 'b':'2', 'c':'3'})
def test_AggregateDivergence(self):
class TestAggregate(Aggregate):
Proto = MockProto
aggregate = TestAggregate(1, 1)
aggregate.IndexedAttributes = set(['a','b','c'])
aggregate.NoCaseAttributes = set()
aggregate.a, aggregate.b, aggregate.c = 1, 2, 3
aggregate2 = TestAggregate(1, 1)
aggregate2.IndexedAttributes = set(['a','b','c'])
aggregate2.NoCaseAttributes = set()
aggregate2.a, aggregate2.b, aggregate2.c = 1, 2, 3
self.assertFalse(aggregate2.HasDivergedFrom(aggregate))
aggregate.a = 'not1'
self.assertTrue(aggregate2.HasDivergedFrom(aggregate))
@mock.patch('Chronos.Core.json')
@mock.patch('Chronos.Core.json_format')
def test_ToRESTDict(self, jsonFormat, json):
class TestAggregate(Aggregate):
Proto = MockProto
aggregate = TestAggregate(1, 1)
json.loads = mock.MagicMock(return_value = 'json')
jsonFormat.MessageToJson = mock.MagicMock(return_value = 'json')
self.assertEqual(aggregate.ToRESTDict(), {'aggregateId': 1, 'version': 1, 'proto': 'json'})
def test_AggregateToDict(self):
class TestAggregate(Aggregate):
Proto = MockProto
self.assertEqual(TestAggregate(1, 1).ToDict(),
{'aggregateId': 1, 'version': 1, 'proto': 'protostring'})
self.assertEqual(MockProto.SerializeToString.call_count, 1)
self.assertEqual(TestAggregate(2, 5).ToDict(),
{'aggregateId': 2, 'version': 5, 'proto': 'protostring'})
self.assertEqual(MockProto.SerializeToString.call_count, 2)
def test_AggregateFromDict(self):
class TestAggregate(Aggregate):
Proto = MockProto
aggregate = TestAggregate.FromDict({'aggregateId': 1L, 'version': 1L, 'proto': 'anything'})
self.assertEqual(aggregate.aggregateId, 1L)
self.assertEqual(aggregate.version, 1L)
self.assertTrue(isinstance(aggregate.proto, MockProto))
self.assertEqual(MockProto.ParseFromString.call_count, 1)
def test_EventWithNoMembers(self):
try:
class TestEvent(Event):
pass
def RaiseFor(self, aggregate):
pass
except ChronosSemanticException:
pass
else:
self.fail('Expected ChronosSemanticException')
def test_EventWithNoProto(self):
class TestAggregate(Aggregate):
Proto = MockProto
try:
class TestEvent(Event):
Aggregate = TestAggregate
def RaiseFor(self, aggregate):
pass
except ChronosSemanticException:
pass
else:
self.fail('Expected ChronosSemanticException')
def test_EventWithNoAggregate(self):
try:
class TestEvent(Event):
Proto = MockProto
def RaiseFor(self, aggregate):
pass
except ChronosSemanticException:
pass
else:
self.fail('Expected ChronosSemanticException')
def test_EventWithBadAggregate(self):
try:
class TestEvent(Event):
Proto = MockProto
Aggregate = object
def RaiseFor(self, aggregate):
pass
except ChronosSemanticException:
pass
else:
self.fail('Expected ChronosSemanticException')
def test_EventWithAggregate(self):
try:
class TestEvent(Event):
Proto = MockProto
Aggregate = Aggregate
def RaiseFor(self, aggregate):
pass
except ChronosSemanticException:
pass
else:
self.fail('Expected ChronosSemanticException')
def test_EventWithNoRaiseFor(self):
class TestAggregate(Aggregate):
Proto = MockProto
try:
class TestEvent(Event):
Aggregate = TestAggregate
Proto = MockProto
except ChronosSemanticException:
pass
else:
self.fail('Expected ChronosSemanticException')
def test_EventRaiseFor(self):
class TestAggregate(Aggregate):
Proto = MockProto
class TestEvent(Event):
Aggregate = TestAggregate
Proto = MockProto
def RaiseFor(self, aggregate):
aggregate.indexTest1 = 'modified'
aggregate = TestAggregate(1, 1)
TestEvent(1, 1).RaiseFor(aggregate)
self.assertEqual(aggregate.indexTest1, 'modified')
def test_EventFullName(self):
class TestAggregate(Aggregate):
Proto = MockProto
class TestEvent(Event):
Aggregate = TestAggregate
Proto = MockProto
def RaiseFor(self, aggregate):
aggregate.indexTest1 = 'modified'
self.assertEqual(TestEvent.fullName, 'unit.test_Core.TestEvent')
class AggregateRepositoryTest(unittest.TestCase):
def setUp(self):
googleMock = mock.MagicMock()
protobufMock = googleMock.protobuf
protobufMock.message = mock.MagicMock()
protobufMock.message.Message = mock.MagicMock
self.patcher = mock.patch.dict('sys.modules',
{'Chronos.EventLogger': mock.MagicMock(),
'Chronos.Chronos_pb2': mock.MagicMock(),
'google': googleMock,
'google.protobuf': protobufMock,
'ordereddict': mock.MagicMock()})
self.patcher.start()
mockDescriptor = mock.MagicMock()
mockDescriptor.fields = []
class MockProto(mock.MagicMock):
DESCRIPTOR = mockDescriptor
global ChronosSemanticException, ChronosCoreException, TestAggregate
from Chronos.Core import Aggregate, ChronosCoreException, AggregateRepository
class TestAggregate(Aggregate):
Proto = MockProto
self.mockEventReader = mock.MagicMock()
self.aggregateRepository = AggregateRepository(TestAggregate, self.mockEventReader)
self.aggregateRepository.repository = {}
def tearDown(self):
self.patcher.stop()
def test_Create(self):
self.mockEventReader.GetNextAggregateId.return_value = 5
aggregate = self.aggregateRepository.Create()
self.assertEqual(aggregate.aggregateId, 5)
self.assertEqual(aggregate.version, 1)
self.assertEqual(self.aggregateRepository.repository, {})
def test_Transaction_rollback(self):
self.mockEventReader.TryGetSnapshot.return_value = None
self.mockEventReader.GetNextAggregateId.side_effect = [1, 2, 3, 4]
aggregate = self.aggregateRepository.Create()
rolledBack = self.aggregateRepository.Create()
rolledBack2 = self.aggregateRepository.Create()
self.aggregateRepository.Emplace(aggregate)
self.aggregateRepository.Begin()
self.aggregateRepository.Emplace(rolledBack)
self.aggregateRepository.Emplace(rolledBack2)
self.assertEqual(self.aggregateRepository.Get(2), rolledBack)
self.aggregateRepository.Rollback()
self.assertRaises(ChronosCoreException, self.aggregateRepository.Get, 2)
def test_Transaction_commit(self):
self.mockEventReader.GetNextAggregateId.side_effect = [1, 2, 3, 4]
aggregate = self.aggregateRepository.Create()
rolledBack = self.aggregateRepository.Create()
rolledBack2 = self.aggregateRepository.Create()
self.aggregateRepository.Emplace(aggregate)
self.aggregateRepository.Begin()
self.aggregateRepository.Emplace(rolledBack)
self.aggregateRepository.Emplace(rolledBack2)
self.assertEqual(self.aggregateRepository.Get(2), rolledBack)
self.aggregateRepository.Commit()
self.aggregateRepository.Rollback()
self.assertEqual(self.aggregateRepository.Get(3), rolledBack2)
def test_TransactionInProgress(self):
self.aggregateRepository.transactionRepository = 1
self.assertTrue(self.aggregateRepository._isTransactionInProgress())
self.aggregateRepository.transactionRepository = None
self.assertFalse(self.aggregateRepository._isTransactionInProgress())
def test_NoBeginTransactionInProgress(self):
self.aggregateRepository.transactionRepository = {'not':'cleared'}
self.aggregateRepository.transactionEvent.clear = mock.MagicMock()
self.aggregateRepository.Begin()
self.assertEquals(self.aggregateRepository.transactionRepository, {'not':'cleared'})
self.assertFalse(self.aggregateRepository.transactionEvent.clear.called)
def test_NoCommitTransactionInProgress(self):
self.aggregateRepository.transactionRepository = {'not':'cleared'}
self.aggregateRepository.transactionEvent.set = mock.MagicMock()
# Eventhough the transaction repo is non-none we want to force _isTransactionInProgress to return false in order to prove it is not committed and set
self.aggregateRepository._isTransactionInProgress = mock.MagicMock(return_value = False)
self.aggregateRepository.Commit()
self.assertEquals(self.aggregateRepository.transactionRepository, {'not':'cleared'})
self.assertFalse(self.aggregateRepository.transactionEvent.set.called)
def test_GetWithInvalidAggregateId(self):
self.assertRaises(ChronosCoreException, self.aggregateRepository.Get, 0)
def test_GetWithUncachedAggregateId(self):
self.mockEventReader.TryGetSnapshot.return_value = 'snapshot'
mockAggregateClass = mock.MagicMock()
mockAggregateClass.FromDict.return_value = 'snapshot'
self.aggregateRepository.aggregateClass = mockAggregateClass
aggregate = self.aggregateRepository.Get(1)
self.assertEqual(aggregate, 'snapshot')
def test_GetWithCachedAggregateId(self):
self.aggregateRepository.repository[2] = 'cached'
aggregate = self.aggregateRepository.Get(2)
self.assertEqual(aggregate, 'cached')
def test_GetShouldCacheAggregate(self):
self.mockEventReader.TryGetSnapshot.return_value = 'snapshot'
mockAggregateClass = mock.MagicMock()
mockAggregateClass.FromDict.return_value = 'snapshot'
self.aggregateRepository.aggregateClass = mockAggregateClass
self.aggregateRepository.Get(1)
aggregate = self.aggregateRepository.Get(1)
self.assertEqual(aggregate, 'snapshot')
self.assertEqual(self.mockEventReader.TryGetSnapshot.call_count, 1)
def test_GetWithMissingAggregateIdShouldThrowException(self):
self.mockEventReader.TryGetSnapshot.return_value = None
self.assertRaises(ChronosCoreException, self.aggregateRepository.Get, 1)
def test_GetAll(self):
self.mockEventReader.GetAllSnapshots.return_value = 'aggregates'
self.assertEqual(self.aggregateRepository.GetAll(), 'aggregates')
self.mockEventReader.GetAllSnapshots.assert_called_once()
class EventProcessorTest(unittest.TestCase):
def setUp(self):
googleMock = mock.MagicMock()
protobufMock = googleMock.protobuf
protobufMock.message = mock.MagicMock()
protobufMock.message.Message = mock.MagicMock
self.patcher = mock.patch.dict('sys.modules',
{'Chronos.EventLogger': mock.MagicMock(),
'Chronos.Chronos_pb2': mock.MagicMock(),
'google': googleMock,
'google.protobuf': protobufMock,
'ordereddict': mock.MagicMock()})
self.patcher.start()
global ChronosCoreException, PersistenceBufferItem, PersistenceBufferManagementItem, TestAggregate
from Chronos.Core import (EventProcessor, ChronosCoreException, PersistenceBufferItem,
AggregateLogicCompiler, PersistenceBufferManagementItem, Aggregate)
from Chronos.Infrastructure import AbstractEventPersister, AbstractLogicStore, AbstractEventReader
mockDescriptor = mock.MagicMock()
mockDescriptor.fields = []
class MockProto(mock.MagicMock):
DESCRIPTOR = mockDescriptor
class TestAggregate(Aggregate):
Proto = MockProto
self.mockEventPersister = mock.MagicMock(spec=AbstractEventPersister)
self.eventProcessor = EventProcessor(TestAggregate, self.mockEventPersister)
def tearDown(self):
self.patcher.stop()
@mock.patch('time.time')
def test_ProcessSuccess(self, mockTime):
mockEvent = mock.MagicMock()
mockEvent.version = 1
mockAggregate = mock.MagicMock()
mockAggregate.version = 1
mockTime.return_value = 3
logicId = 1
self.eventProcessor.Process(mock.MagicMock(receivedTimestamp=1000), mockAggregate, logicId, mockEvent)
mockEvent.RaiseFor.assert_called_once_with(mockAggregate)
self.assertEqual(mockAggregate.version, 2)
def test_ProcessWithMismatchingVersionsShouldFail(self):
mockEvent = mock.MagicMock()
mockEvent.version = 1
mockAggregate = mock.MagicMock()
mockAggregate.version = 2
logicId =1
self.assertRaises(ChronosCoreException, self.eventProcessor.Process, mock.MagicMock(receivedTimestamp=1000),
mockAggregate, logicId, mockEvent)
@mock.patch('Chronos.Core.PersistenceBufferManagementItem')
def test_ProcessIndexDivergence(self, bufferItem):
self.eventProcessor.ProcessIndexDivergence(123)
item, = self.eventProcessor.persistenceBuffer
self.assertEqual(item, bufferItem().Serialize())
def test_FlushPersistenceBufferWithEmptyBufferDoesNothing(self):
self.eventProcessor.FlushPersistenceBuffer()
self.assertFalse(self.eventProcessor.persistenceBuffer)
@mock.patch('time.time')
def test_FlushPersistenceBufferWithBufferedItemsAndOverrideRemovesAll(self, mockTime):
self.eventProcessor.EnqueueForPersistence(mock.MagicMock())
self.eventProcessor.FlushPersistenceBuffer(shouldForce=True)
self.assertFalse(self.eventProcessor.persistenceBuffer)
@mock.patch('time.time')
def test_FlushPersistenceBufferWithoutOverideDoesNotRemoveItems(self, mockTime):
self.eventProcessor.EnqueueForPersistence(mock.MagicMock())
self.eventProcessor.FlushPersistenceBuffer()
self.assertEqual(len(self.eventProcessor.persistenceBuffer), 1)
@mock.patch('time.time')
def test_FlushPersistenceBufferBeyondLimitWithoutOverrideRemovesAll(self, mockTime):
for _ in xrange(20):
self.eventProcessor.EnqueueForPersistence(mock.MagicMock())
self.eventProcessor.FlushPersistenceBuffer()
self.assertFalse(self.eventProcessor.persistenceBuffer)
@mock.patch('time.time')
def test_Transaction_rollback(self, mockTime):
self.eventProcessor.Begin()
self.eventProcessor.ProcessIndexDivergence(1)
self.eventProcessor.ProcessFailure('req', 2)
self.eventProcessor.ProcessTag(3, 'tag', 'expr', 1)
self.eventProcessor.EnqueueForPersistence(4)
self.eventProcessor.Rollback()
self.eventProcessor.FlushPersistenceBuffer(shouldForce=True)
self.mockEventPersister.PersistEvents.assert_called_once_with([])
@mock.patch('Chronos.Core.PersistenceBufferTagItem')
@mock.patch('Chronos.Core.PersistenceBufferFailureItem')
@mock.patch('Chronos.Core.PersistenceBufferManagementItem')
@mock.patch('time.time')
def test_Transaction_commit(self, mockTime, managementItem, failureItem, tagItem):
self.eventProcessor.Begin()
self.eventProcessor.ProcessIndexDivergence(1)
self.eventProcessor.ProcessFailure('req', 2)
self.eventProcessor.ProcessTag(3, 'tag', 'expr', 1)
self.eventProcessor.EnqueueForPersistence(mock.MagicMock())
self.assertEqual(len(self.eventProcessor.transactionBuffer), 4)
self.eventProcessor.Commit()
self.assertEqual(len(self.eventProcessor.persistenceBuffer), 4)
self.eventProcessor.FlushPersistenceBuffer(shouldForce=True)
self.assertEqual(len(self.eventProcessor.persistenceBuffer), 0)
def test_UpsertAggregateSnapshot(self):
aggregate = 'fake_aggregate'
self.mockEventPersister.UpsertAggregateSnapshot = mock.MagicMock()
self.eventProcessor.UpsertAggregateSnapshot(aggregate)
self.mockEventPersister.UpsertAggregateSnapshot.assert_called_once_with('fake_aggregate')
self.assertEquals(self.eventProcessor.aggregateSnapshotBuffer, ['fake_aggregate'])
self.assertEquals(self.eventProcessor.aggregateSnapshotTransactionBuffer, [])
def test_UpsertAggregateSnapshotLostConnection(self):
aggregate = 'fake_aggregate'
self.mockEventPersister.UpsertAggregateSnapshot = mock.MagicMock()
self.eventProcessor._ensureAggregateSnapshotConsistency = mock.MagicMock()
self.eventProcessor.lostConnection = True
self.eventProcessor.UpsertAggregateSnapshot(aggregate)
self.eventProcessor._ensureAggregateSnapshotConsistency.assert_called_once()
self.mockEventPersister.UpsertAggregateSnapshot.assert_called_once_with('fake_aggregate')
self.assertEquals(self.eventProcessor.aggregateSnapshotBuffer, ['fake_aggregate'])
self.assertEquals(self.eventProcessor.aggregateSnapshotTransactionBuffer, [])
def test_UpsertAggregateSnapshotTransactionInProgress(self):
aggregate = 'fake_aggregate'
self.mockEventPersister.UpsertAggregateSnapshot = mock.MagicMock()
self.eventProcessor._isTransactionInProgress = mock.MagicMock(return_value = True)
self.eventProcessor.UpsertAggregateSnapshot(aggregate)
self.mockEventPersister.UpsertAggregateSnapshot.assert_called_once_with('fake_aggregate')
self.assertEquals(self.eventProcessor.aggregateSnapshotBuffer, [])
self.assertEquals(self.eventProcessor.aggregateSnapshotTransactionBuffer, ['fake_aggregate'])
# How to test recursion?
# def test_UpsertBadAggregateSnapshot(self):
# aggregate = 'fake_aggregate'
# self.mockEventPersister.UpsertAggregateSnapshot = mock.MagicMock()
# self.eventProcessor._isTransactionInProgress = mock.MagicMock(return_value = True)
# self.eventProcessor.UpsertAggregateSnapshot(aggregate)
# self.mockEventPersister.UpsertAggregateSnapshot.assert_called_once_with('fake_aggregate')
# self.assertEquals(self.eventProcessor.aggregateSnapshotBuffer, [])
# self.assertEquals(self.eventProcessor.aggregateSnapshotTransactionBuffer, ['fake_aggregate'])
# self.assertTrue(self.eventProcessor.lostConnection)
def test_EnsureAggregateSnapshotConsistency(self):
self.eventProcessor.aggregateSnapshotBuffer = ['aggregate1', 'aggregate2']
self.eventProcessor.aggregateSnapshotTransactionBuffer = ['aggregate3', 'aggregate4']
calls = [mock.call('aggregate3'), mock.call('aggregate4'),
mock.call('aggregate1'), mock.call('aggregate2')]
self.eventProcessor._ensureAggregateSnapshotConsistency()
self.mockEventPersister.UpsertAggregateSnapshot.assert_has_calls(calls, )
self.assertFalse(self.eventProcessor.lostConnection)
class ChronosConstraintTest(unittest.TestCase):
def setUp(self):
googleMock = mock.MagicMock()
protobufMock = googleMock.protobuf
protobufMock.message = mock.MagicMock()
protobufMock.message.Message = mock.MagicMock
self.patcher = mock.patch.dict('sys.modules',
{'Chronos.EventLogger': mock.MagicMock(),
'Chronos.Chronos_pb2': mock.MagicMock(),
'google': googleMock,
'google.protobuf': protobufMock,
'ordereddict': mock.MagicMock(),
're': mock.MagicMock()})
self.patcher.start()
global FakeConstraint, ChronosConstraint, ChronosCoreException
from Chronos.Core import ChronosConstraint, ChronosCoreException
class FakeConstraint(ChronosConstraint):
def Create():
pass
def Drop():
pass
self.mockRegEx = mock.MagicMock()
def tearDown(self):
self.patcher.stop()
@mock.patch('Chronos.Core.re')
def test_Construction(self, re):
self.mockRegEx.match.return_value = True
re.compile.return_value = self.mockRegEx
constraint = FakeConstraint('attribute1', 'attribute2', name='valid')
self.assertEqual(constraint.name, 'valid')
self.assertEqual(constraint.attributes, set(['attribute1', 'attribute2']))
re.compile.assert_called_once_with(r'^[a-zA-Z0-9_]+$')
def test_ConstraintNoName(self):
self.assertRaises(ChronosCoreException, FakeConstraint)
self.assertRaises(ChronosCoreException, FakeConstraint, None)
@mock.patch('Chronos.Core.re')
def test_ConstraintInvalidName(self, re):
self.mockRegEx.match.return_value = False
re.compile.return_value = self.mockRegEx
self.assertRaises(ChronosCoreException, FakeConstraint, name='not_valid')
@mock.patch('Chronos.Core.re')
def test_ConstraintInvalidName(self, re):
self.mockRegEx.match.return_value = True
re.compile.return_value = self.mockRegEx
constraint = FakeConstraint('attribute1', 'attribute2', name='valid')
@mock.patch('Chronos.Core.re')
def test_ConstraintEqualityName(self, re):
self.mockRegEx.match.return_value = True
re.compile.return_value = self.mockRegEx
a1 = FakeConstraint('attribute1', 'attribute2', name='same')
a2 = FakeConstraint('attribute1', 'attribute2', name='same')
a3 = FakeConstraint('attribute1', 'attribute2', name='different')
self.assertEqual(a1, a2)
self.assertNotEqual(a1, a3)
@mock.patch('Chronos.Core.re')
def test_ConstraintEqualityAttributes(self, re):
self.mockRegEx.match.return_value = True
re.compile.return_value = self.mockRegEx
b1 = FakeConstraint('attribute1', 'attribute2', name='same')
b2 = FakeConstraint('attribute1', 'attribute2', name='same')
b3 = FakeConstraint('attribute1', 'attribute3', name='same')
self.assertEqual(b1, b2)
self.assertNotEqual(b1, b3)
class DummyConstraint(object):
def __init__(self, name):
self.name = name
class ConstraintComparerTest(unittest.TestCase):
def setUp(self):
googleMock = mock.MagicMock()
protobufMock = googleMock.protobuf
protobufMock.message = mock.MagicMock()
protobufMock.message.Message = mock.MagicMock
self.patcher = mock.patch.dict('sys.modules',
{'Chronos.EventLogger': mock.MagicMock(),
'Chronos.Chronos_pb2': mock.MagicMock(),
'google': googleMock,
'google.protobuf': protobufMock,
'ordereddict': mock.MagicMock()})
self.patcher.start()
global ConstraintComparer
from Chronos.Core import ConstraintComparer
self.a = DummyConstraint("a")
self.b = DummyConstraint("b")
self.c = DummyConstraint("c")
self.d = DummyConstraint("d")
self.e = DummyConstraint("e")
def tearDown(self):
self.patcher.stop()
@parameterized.expand([
([],[],'','', [], []),
([],[],'new','new', [], []),
(['not'],['same'],'a','a', [], []), #same serialized so do nothing, would never happen that serialized equal but constraints do not
(['a'],[],'not','same', ['a'], []),
(['a','b','c'],[],'not','same', ['a','b','c'], []),
([],['a'],'not','same', [], ['a']),
([],['a','b','c'],'not','same', [], ['a','b','c']),
])
def test_basic_comparison(self, newConstraints, oldConstraints, serializedNew, serializedOld, expectedAdd, expectedRemove):
comparer = ConstraintComparer(newConstraints, oldConstraints, serializedNew, serializedOld)
divergence = comparer.UpdateConstraints()
self.assertEqual(expectedAdd, divergence.constraintsToAdd)
self.assertEqual(expectedRemove, divergence.constraintsToRemove)
def test_index_divergence_all_new(self):
comparer = ConstraintComparer([self.a, self.b], [self.c, self.d], "not", "same")
divergence = comparer.UpdateConstraints()
self.assertEqual(set([self.a, self.b]), set(divergence.constraintsToAdd))
self.assertEqual(set([self.c, self.d]), set(divergence.constraintsToRemove))
def test_index_divergence_some_new(self):
comparer = ConstraintComparer([self.a, self.b], [self.b, self.c, self.d], "not", "same")
divergence = comparer.UpdateConstraints()
self.assertEqual(set([self.a]), set(divergence.constraintsToAdd))
self.assertEqual(set([self.c, self.d]), set(divergence.constraintsToRemove))
def test_index_divergence_one_modified(self):
newB = DummyConstraint("b")
comparer = ConstraintComparer([self.a, newB], [self.b, self.c, self.d], "not", "same")
divergence = comparer.UpdateConstraints()
self.assertEqual(set([self.a, newB]), set(divergence.constraintsToAdd))
self.assertEqual(set([self.b, self.c, self.d]), set(divergence.constraintsToRemove))
def test_index_divergence_many_modified(self):
newA = DummyConstraint("a")
newC = DummyConstraint("c")
newE = DummyConstraint("e")
comparer = ConstraintComparer([newA, newC, newE, self.b], [self.a, self.c, self.d, self.e,], "not", "same")
divergence = comparer.UpdateConstraints()
self.assertEqual(set([newA, newC, newE, self.b]), set(divergence.constraintsToAdd))
self.assertEqual(set([self.a, self.c, self.d, self.e]), set(divergence.constraintsToRemove))
|
peterm-itr/edx-platform
|
refs/heads/master
|
lms/djangoapps/instructor_task/subtasks.py
|
11
|
"""
This module contains celery task functions for handling the management of subtasks.
"""
from time import time
import json
from uuid import uuid4
import math
import psutil
from contextlib import contextmanager
from celery.utils.log import get_task_logger
from celery.states import SUCCESS, READY_STATES, RETRY
import dogstats_wrapper as dog_stats_api
from django.db import transaction, DatabaseError
from django.core.cache import cache
from instructor_task.models import InstructorTask, PROGRESS, QUEUING
TASK_LOG = get_task_logger(__name__)
# Lock expiration should be long enough to allow a subtask to complete.
SUBTASK_LOCK_EXPIRE = 60 * 10 # Lock expires in 10 minutes
# Number of times to retry if a subtask update encounters a lock on the InstructorTask.
# (These are recursive retries, so don't make this number too large.)
MAX_DATABASE_LOCK_RETRIES = 5
class DuplicateTaskException(Exception):
"""Exception indicating that a task already exists or has already completed."""
pass
def _get_number_of_subtasks(total_num_items, items_per_task):
"""
Determines number of subtasks that would be generated by _generate_items_for_subtask.
This needs to be calculated before the query is executed so that the list of all subtasks can be
stored in the InstructorTask before any subtasks are started.
The number of subtask_id values returned by this should match the number of chunks returned
by the generate_items_for_subtask generator.
"""
num_subtasks, remainder = divmod(total_num_items, items_per_task)
if remainder:
num_subtasks += 1
return num_subtasks
@contextmanager
def track_memory_usage(metric, course_id):
"""
Context manager to track how much memory (in bytes) a given process uses.
Metrics will look like: 'course_email.subtask_generation.memory.rss'
or 'course_email.subtask_generation.memory.vms'.
"""
memory_types = ['rss', 'vms']
process = psutil.Process()
baseline_memory_info = process.get_memory_info()
baseline_usages = [getattr(baseline_memory_info, memory_type) for memory_type in memory_types]
yield
for memory_type, baseline_usage in zip(memory_types, baseline_usages):
total_memory_info = process.get_memory_info()
total_usage = getattr(total_memory_info, memory_type)
memory_used = total_usage - baseline_usage
dog_stats_api.increment(
metric + "." + memory_type,
memory_used,
tags=["course_id:{}".format(course_id)],
)
def _generate_items_for_subtask(
item_queryset,
item_fields,
total_num_items,
items_per_task,
total_num_subtasks,
course_id,
):
"""
Generates a chunk of "items" that should be passed into a subtask.
Arguments:
`item_queryset` : a query set that defines the "items" that should be passed to subtasks.
`item_fields` : the fields that should be included in the dict that is returned.
These are in addition to the 'pk' field.
`total_num_items` : the result of item_queryset.count().
`items_per_query` : size of chunks to break the query operation into.
`items_per_task` : maximum size of chunks to break each query chunk into for use by a subtask.
`course_id` : course_id of the course. Only needed for the track_memory_usage context manager.
Returns: yields a list of dicts, where each dict contains the fields in `item_fields`, plus the 'pk' field.
Warning: if the algorithm here changes, the _get_number_of_subtasks() method should similarly be changed.
"""
num_items_queued = 0
all_item_fields = list(item_fields)
all_item_fields.append('pk')
num_subtasks = 0
items_for_task = []
with track_memory_usage('course_email.subtask_generation.memory', course_id):
for item in item_queryset.values(*all_item_fields).iterator():
if len(items_for_task) == items_per_task and num_subtasks < total_num_subtasks - 1:
yield items_for_task
num_items_queued += items_per_task
items_for_task = []
num_subtasks += 1
items_for_task.append(item)
# yield remainder items for task, if any
if items_for_task:
yield items_for_task
num_items_queued += len(items_for_task)
# Note, depending on what kind of DB is used, it's possible for the queryset
# we iterate over to change in the course of the query. Therefore it's
# possible that there are more (or fewer) items queued than were initially
# calculated. It also means it's possible that the last task contains
# more items than items_per_task allows. We expect this to be a small enough
# number as to be negligible.
if num_items_queued != total_num_items:
TASK_LOG.info("Number of items generated by chunking %s not equal to original total %s", num_items_queued, total_num_items)
class SubtaskStatus(object):
"""
Create and return a dict for tracking the status of a subtask.
SubtaskStatus values are:
'task_id' : id of subtask. This is used to pass task information across retries.
'attempted' : number of attempts -- should equal succeeded plus failed
'succeeded' : number that succeeded in processing
'skipped' : number that were not processed.
'failed' : number that failed during processing
'retried_nomax' : number of times the subtask has been retried for conditions that
should not have a maximum count applied
'retried_withmax' : number of times the subtask has been retried for conditions that
should have a maximum count applied
'state' : celery state of the subtask (e.g. QUEUING, PROGRESS, RETRY, FAILURE, SUCCESS)
Object is not JSON-serializable, so to_dict and from_dict methods are provided so that
it can be passed as a serializable argument to tasks (and be reconstituted within such tasks).
In future, we may want to include specific error information
indicating the reason for failure.
Also, we should count up "not attempted" separately from attempted/failed.
"""
def __init__(self, task_id, attempted=None, succeeded=0, failed=0, skipped=0, retried_nomax=0, retried_withmax=0, state=None):
"""Construct a SubtaskStatus object."""
self.task_id = task_id
if attempted is not None:
self.attempted = attempted
else:
self.attempted = succeeded + failed
self.succeeded = succeeded
self.failed = failed
self.skipped = skipped
self.retried_nomax = retried_nomax
self.retried_withmax = retried_withmax
self.state = state if state is not None else QUEUING
@classmethod
def from_dict(self, d):
"""Construct a SubtaskStatus object from a dict representation."""
options = dict(d)
task_id = options['task_id']
del options['task_id']
return SubtaskStatus.create(task_id, **options)
@classmethod
def create(self, task_id, **options):
"""Construct a SubtaskStatus object."""
return self(task_id, **options)
def to_dict(self):
"""
Output a dict representation of a SubtaskStatus object.
Use for creating a JSON-serializable representation for use by tasks.
"""
return self.__dict__
def increment(self, succeeded=0, failed=0, skipped=0, retried_nomax=0, retried_withmax=0, state=None):
"""
Update the result of a subtask with additional results.
Kwarg arguments are incremented to the existing values.
The exception is for `state`, which if specified is used to override the existing value.
"""
self.attempted += (succeeded + failed)
self.succeeded += succeeded
self.failed += failed
self.skipped += skipped
self.retried_nomax += retried_nomax
self.retried_withmax += retried_withmax
if state is not None:
self.state = state
def get_retry_count(self):
"""Returns the number of retries of any kind."""
return self.retried_nomax + self.retried_withmax
def __repr__(self):
"""Return print representation of a SubtaskStatus object."""
return 'SubtaskStatus<%r>' % (self.to_dict(),)
def __unicode__(self):
"""Return unicode version of a SubtaskStatus object representation."""
return unicode(repr(self))
def initialize_subtask_info(entry, action_name, total_num, subtask_id_list):
"""
Store initial subtask information to InstructorTask object.
The InstructorTask's "task_output" field is initialized. This is a JSON-serialized dict.
Counters for 'attempted', 'succeeded', 'failed', 'skipped' keys are initialized to zero,
as is the 'duration_ms' value. A 'start_time' is stored for later duration calculations,
and the total number of "things to do" is set, so the user can be told how much needs to be
done overall. The `action_name` is also stored, to help with constructing more readable
task_progress messages.
The InstructorTask's "subtasks" field is also initialized. This is also a JSON-serialized dict.
Keys include 'total', 'succeeded', 'retried', 'failed', which are counters for the number of
subtasks. 'Total' is set here to the total number, while the other three are initialized to zero.
Once the counters for 'succeeded' and 'failed' match the 'total', the subtasks are done and
the InstructorTask's "status" will be changed to SUCCESS.
The "subtasks" field also contains a 'status' key, that contains a dict that stores status
information for each subtask. The value for each subtask (keyed by its task_id)
is its subtask status, as defined by SubtaskStatus.to_dict().
This information needs to be set up in the InstructorTask before any of the subtasks start
running. If not, there is a chance that the subtasks could complete before the parent task
is done creating subtasks. Doing so also simplifies the save() here, as it avoids the need
for locking.
Monitoring code should assume that if an InstructorTask has subtask information, that it should
rely on the status stored in the InstructorTask object, rather than status stored in the
corresponding AsyncResult.
"""
task_progress = {
'action_name': action_name,
'attempted': 0,
'failed': 0,
'skipped': 0,
'succeeded': 0,
'total': total_num,
'duration_ms': int(0),
'start_time': time()
}
entry.task_output = InstructorTask.create_output_for_success(task_progress)
entry.task_state = PROGRESS
# Write out the subtasks information.
num_subtasks = len(subtask_id_list)
# Note that may not be necessary to store initial value with all those zeroes!
# Write out as a dict, so it will go more smoothly into json.
subtask_status = {subtask_id: (SubtaskStatus.create(subtask_id)).to_dict() for subtask_id in subtask_id_list}
subtask_dict = {
'total': num_subtasks,
'succeeded': 0,
'failed': 0,
'status': subtask_status
}
entry.subtasks = json.dumps(subtask_dict)
# and save the entry immediately, before any subtasks actually start work:
entry.save_now()
return task_progress
def queue_subtasks_for_query(entry, action_name, create_subtask_fcn, item_queryset, item_fields, items_per_task):
"""
Generates and queues subtasks to each execute a chunk of "items" generated by a queryset.
Arguments:
`entry` : the InstructorTask object for which subtasks are being queued.
`action_name` : a past-tense verb that can be used for constructing readable status messages.
`create_subtask_fcn` : a function of two arguments that constructs the desired kind of subtask object.
Arguments are the list of items to be processed by this subtask, and a SubtaskStatus
object reflecting initial status (and containing the subtask's id).
`item_queryset` : a query set that defines the "items" that should be passed to subtasks.
`item_fields` : the fields that should be included in the dict that is returned.
These are in addition to the 'pk' field.
`items_per_task` : maximum size of chunks to break each query chunk into for use by a subtask.
Returns: the task progress as stored in the InstructorTask object.
"""
task_id = entry.task_id
total_num_items = item_queryset.count()
# Calculate the number of tasks that will be created, and create a list of ids for each task.
total_num_subtasks = _get_number_of_subtasks(total_num_items, items_per_task)
subtask_id_list = [str(uuid4()) for _ in range(total_num_subtasks)]
# Update the InstructorTask with information about the subtasks we've defined.
TASK_LOG.info(
"Task %s: updating InstructorTask %s with subtask info for %s subtasks to process %s items.",
task_id,
entry.id,
total_num_subtasks,
total_num_items,
) # pylint: disable=no-member
progress = initialize_subtask_info(entry, action_name, total_num_items, subtask_id_list)
# Construct a generator that will return the recipients to use for each subtask.
# Pass in the desired fields to fetch for each recipient.
item_list_generator = _generate_items_for_subtask(
item_queryset,
item_fields,
total_num_items,
items_per_task,
total_num_subtasks,
entry.course_id,
)
# Now create the subtasks, and start them running.
TASK_LOG.info(
"Task %s: creating %s subtasks to process %s items.",
task_id,
total_num_subtasks,
total_num_items,
)
num_subtasks = 0
for item_list in item_list_generator:
subtask_id = subtask_id_list[num_subtasks]
num_subtasks += 1
subtask_status = SubtaskStatus.create(subtask_id)
new_subtask = create_subtask_fcn(item_list, subtask_status)
new_subtask.apply_async()
# Subtasks have been queued so no exceptions should be raised after this point.
# Return the task progress as stored in the InstructorTask object.
return progress
def _acquire_subtask_lock(task_id):
"""
Mark the specified task_id as being in progress.
This is used to make sure that the same task is not worked on by more than one worker
at the same time. This can occur when tasks are requeued by Celery in response to
loss of connection to the task broker. Most of the time, such duplicate tasks are
run sequentially, but they can overlap in processing as well.
Returns true if the task_id was not already locked; false if it was.
"""
# cache.add fails if the key already exists
key = "subtask-{}".format(task_id)
succeeded = cache.add(key, 'true', SUBTASK_LOCK_EXPIRE)
if not succeeded:
TASK_LOG.warning("task_id '%s': already locked. Contains value '%s'", task_id, cache.get(key))
return succeeded
def _release_subtask_lock(task_id):
"""
Unmark the specified task_id as being no longer in progress.
This is most important to permit a task to be retried.
"""
# According to Celery task cookbook, "Memcache delete is very slow, but we have
# to use it to take advantage of using add() for atomic locking."
key = "subtask-{}".format(task_id)
cache.delete(key)
def check_subtask_is_valid(entry_id, current_task_id, new_subtask_status):
"""
Confirms that the current subtask is known to the InstructorTask and hasn't already been completed.
Problems can occur when the parent task has been run twice, and results in duplicate
subtasks being created for the same InstructorTask entry. This maybe happens when Celery
loses its connection to its broker, and any current tasks get requeued.
If a parent task gets requeued, then the same InstructorTask may have a different set of
subtasks defined (to do the same thing), so the subtasks from the first queuing would not
be known to the InstructorTask. We return an exception in this case.
If a subtask gets requeued, then the first time the subtask runs it should run fine to completion.
However, we want to prevent it from running again, so we check here to see what the existing
subtask's status is. If it is complete, we raise an exception. We also take a lock on the task,
so that we can detect if another worker has started work but has not yet completed that work.
The other worker is allowed to finish, and this raises an exception.
Raises a DuplicateTaskException exception if it's not a task that should be run.
If this succeeds, it requires that update_subtask_status() is called to release the lock on the
task.
"""
# Confirm that the InstructorTask actually defines subtasks.
entry = InstructorTask.objects.get(pk=entry_id)
if len(entry.subtasks) == 0:
format_str = "Unexpected task_id '{}': unable to find subtasks of instructor task '{}': rejecting task {}"
msg = format_str.format(current_task_id, entry, new_subtask_status)
TASK_LOG.warning(msg)
dog_stats_api.increment('instructor_task.subtask.duplicate.nosubtasks', tags=[entry.course_id])
raise DuplicateTaskException(msg)
# Confirm that the InstructorTask knows about this particular subtask.
subtask_dict = json.loads(entry.subtasks)
subtask_status_info = subtask_dict['status']
if current_task_id not in subtask_status_info:
format_str = "Unexpected task_id '{}': unable to find status for subtask of instructor task '{}': rejecting task {}"
msg = format_str.format(current_task_id, entry, new_subtask_status)
TASK_LOG.warning(msg)
dog_stats_api.increment('instructor_task.subtask.duplicate.unknown', tags=[entry.course_id])
raise DuplicateTaskException(msg)
# Confirm that the InstructorTask doesn't think that this subtask has already been
# performed successfully.
subtask_status = SubtaskStatus.from_dict(subtask_status_info[current_task_id])
subtask_state = subtask_status.state
if subtask_state in READY_STATES:
format_str = "Unexpected task_id '{}': already completed - status {} for subtask of instructor task '{}': rejecting task {}"
msg = format_str.format(current_task_id, subtask_status, entry, new_subtask_status)
TASK_LOG.warning(msg)
dog_stats_api.increment('instructor_task.subtask.duplicate.completed', tags=[entry.course_id])
raise DuplicateTaskException(msg)
# Confirm that the InstructorTask doesn't think that this subtask is already being
# retried by another task.
if subtask_state == RETRY:
# Check to see if the input number of retries is less than the recorded number.
# If so, then this is an earlier version of the task, and a duplicate.
new_retry_count = new_subtask_status.get_retry_count()
current_retry_count = subtask_status.get_retry_count()
if new_retry_count < current_retry_count:
format_str = "Unexpected task_id '{}': already retried - status {} for subtask of instructor task '{}': rejecting task {}"
msg = format_str.format(current_task_id, subtask_status, entry, new_subtask_status)
TASK_LOG.warning(msg)
dog_stats_api.increment('instructor_task.subtask.duplicate.retried', tags=[entry.course_id])
raise DuplicateTaskException(msg)
# Now we are ready to start working on this. Try to lock it.
# If it fails, then it means that another worker is already in the
# middle of working on this.
if not _acquire_subtask_lock(current_task_id):
format_str = "Unexpected task_id '{}': already being executed - for subtask of instructor task '{}'"
msg = format_str.format(current_task_id, entry)
TASK_LOG.warning(msg)
dog_stats_api.increment('instructor_task.subtask.duplicate.locked', tags=[entry.course_id])
raise DuplicateTaskException(msg)
def update_subtask_status(entry_id, current_task_id, new_subtask_status, retry_count=0):
"""
Update the status of the subtask in the parent InstructorTask object tracking its progress.
Because select_for_update is used to lock the InstructorTask object while it is being updated,
multiple subtasks updating at the same time may time out while waiting for the lock.
The actual update operation is surrounded by a try/except/else that permits the update to be
retried if the transaction times out.
The subtask lock acquired in the call to check_subtask_is_valid() is released here, only when
the attempting of retries has concluded.
"""
try:
_update_subtask_status(entry_id, current_task_id, new_subtask_status)
except DatabaseError:
# If we fail, try again recursively.
retry_count += 1
if retry_count < MAX_DATABASE_LOCK_RETRIES:
TASK_LOG.info("Retrying to update status for subtask %s of instructor task %d with status %s: retry %d",
current_task_id, entry_id, new_subtask_status, retry_count)
dog_stats_api.increment('instructor_task.subtask.retry_after_failed_update')
update_subtask_status(entry_id, current_task_id, new_subtask_status, retry_count)
else:
TASK_LOG.info("Failed to update status after %d retries for subtask %s of instructor task %d with status %s",
retry_count, current_task_id, entry_id, new_subtask_status)
dog_stats_api.increment('instructor_task.subtask.failed_after_update_retries')
raise
finally:
# Only release the lock on the subtask when we're done trying to update it.
# Note that this will be called each time a recursive call to update_subtask_status()
# returns. Fortunately, it's okay to release a lock that has already been released.
_release_subtask_lock(current_task_id)
@transaction.commit_manually
def _update_subtask_status(entry_id, current_task_id, new_subtask_status):
"""
Update the status of the subtask in the parent InstructorTask object tracking its progress.
Uses select_for_update to lock the InstructorTask object while it is being updated.
The operation is surrounded by a try/except/else that permit the manual transaction to be
committed on completion, or rolled back on error.
The InstructorTask's "task_output" field is updated. This is a JSON-serialized dict.
Accumulates values for 'attempted', 'succeeded', 'failed', 'skipped' from `new_subtask_status`
into the corresponding values in the InstructorTask's task_output. Also updates the 'duration_ms'
value with the current interval since the original InstructorTask started. Note that this
value is only approximate, since the subtask may be running on a different server than the
original task, so is subject to clock skew.
The InstructorTask's "subtasks" field is also updated. This is also a JSON-serialized dict.
Keys include 'total', 'succeeded', 'retried', 'failed', which are counters for the number of
subtasks. 'Total' is expected to have been set at the time the subtasks were created.
The other three counters are incremented depending on the value of `status`. Once the counters
for 'succeeded' and 'failed' match the 'total', the subtasks are done and the InstructorTask's
"status" is changed to SUCCESS.
The "subtasks" field also contains a 'status' key, that contains a dict that stores status
information for each subtask. At the moment, the value for each subtask (keyed by its task_id)
is the value of the SubtaskStatus.to_dict(), but could be expanded in future to store information
about failure messages, progress made, etc.
"""
TASK_LOG.info("Preparing to update status for subtask %s for instructor task %d with status %s",
current_task_id, entry_id, new_subtask_status)
try:
entry = InstructorTask.objects.select_for_update().get(pk=entry_id)
subtask_dict = json.loads(entry.subtasks)
subtask_status_info = subtask_dict['status']
if current_task_id not in subtask_status_info:
# unexpected error -- raise an exception
format_str = "Unexpected task_id '{}': unable to update status for subtask of instructor task '{}'"
msg = format_str.format(current_task_id, entry_id)
TASK_LOG.warning(msg)
raise ValueError(msg)
# Update status:
subtask_status_info[current_task_id] = new_subtask_status.to_dict()
# Update the parent task progress.
# Set the estimate of duration, but only if it
# increases. Clock skew between time() returned by different machines
# may result in non-monotonic values for duration.
task_progress = json.loads(entry.task_output)
start_time = task_progress['start_time']
prev_duration = task_progress['duration_ms']
new_duration = int((time() - start_time) * 1000)
task_progress['duration_ms'] = max(prev_duration, new_duration)
# Update counts only when subtask is done.
# In future, we can make this more responsive by updating status
# between retries, by comparing counts that change from previous
# retry.
new_state = new_subtask_status.state
if new_subtask_status is not None and new_state in READY_STATES:
for statname in ['attempted', 'succeeded', 'failed', 'skipped']:
task_progress[statname] += getattr(new_subtask_status, statname)
# Figure out if we're actually done (i.e. this is the last task to complete).
# This is easier if we just maintain a counter, rather than scanning the
# entire new_subtask_status dict.
if new_state == SUCCESS:
subtask_dict['succeeded'] += 1
elif new_state in READY_STATES:
subtask_dict['failed'] += 1
num_remaining = subtask_dict['total'] - subtask_dict['succeeded'] - subtask_dict['failed']
# If we're done with the last task, update the parent status to indicate that.
# At present, we mark the task as having succeeded. In future, we should see
# if there was a catastrophic failure that occurred, and figure out how to
# report that here.
if num_remaining <= 0:
entry.task_state = SUCCESS
entry.subtasks = json.dumps(subtask_dict)
entry.task_output = InstructorTask.create_output_for_success(task_progress)
TASK_LOG.debug("about to save....")
entry.save()
TASK_LOG.info("Task output updated to %s for subtask %s of instructor task %d",
entry.task_output, current_task_id, entry_id)
except Exception:
TASK_LOG.exception("Unexpected error while updating InstructorTask.")
transaction.rollback()
dog_stats_api.increment('instructor_task.subtask.update_exception')
raise
else:
TASK_LOG.debug("about to commit....")
transaction.commit()
|
pgaref/HTTP_Request_Randomizer
|
refs/heads/master
|
tests/__init__.py
|
12133432
| |
Jorge-Rodriguez/ansible
|
refs/heads/devel
|
test/integration/targets/module_utils/other_mu_dir/a/__init__.py
|
12133432
| |
erikr/django
|
refs/heads/master
|
tests/sites_tests/__init__.py
|
12133432
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.