repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
miloszz/DIRAC
|
refs/heads/integration
|
DataManagementSystem/Client/FailoverTransfer.py
|
3
|
""" Failover Transfer
The failover transfer client exposes the following methods:
- transferAndRegisterFile()
- transferAndRegisterFileFailover()
Initially these methods were developed inside workflow modules but
have evolved to a generic 'transfer file with failover' client.
The transferAndRegisterFile() method will correctly set registration
requests in case of failure.
The transferAndRegisterFileFailover() method will attempt to upload
a file to a list of alternative SEs and set appropriate replication
to the original target SE as well as the removal request for the
temporary replica.
"""
__RCSID__ = "$Id$"
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.DataManagementSystem.Client.DataManager import DataManager
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getRegistrationProtocols
from DIRAC.Resources.Storage.StorageElement import StorageElement
from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
from DIRAC.RequestManagementSystem.Client.Request import Request
from DIRAC.RequestManagementSystem.Client.Operation import Operation
from DIRAC.RequestManagementSystem.Client.File import File
from DIRAC.RequestManagementSystem.private.RequestValidator import RequestValidator
from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient
class FailoverTransfer( object ):
""" .. class:: FailoverTransfer
"""
#############################################################################
def __init__( self, requestObject = None, log = None, defaultChecksumType = 'ADLER32' ):
""" Constructor function, can specify request object to instantiate
FailoverTransfer or a new request object is created.
"""
self.log = log
if not self.log:
self.log = gLogger.getSubLogger( "FailoverTransfer" )
self.request = requestObject
if not self.request:
self.request = Request()
self.request.RequestName = 'noname_request'
self.request.SourceComponent = 'FailoverTransfer'
self.defaultChecksumType = defaultChecksumType
self.registrationProtocols = getRegistrationProtocols()
#############################################################################
def transferAndRegisterFile( self,
fileName,
localPath,
lfn,
destinationSEList,
fileMetaDict,
fileCatalog = None,
masterCatalogOnly = False ):
"""Performs the transfer and register operation with failover.
"""
errorList = []
fileGUID = fileMetaDict.get( "GUID", None )
for se in destinationSEList:
self.log.info( "Attempting dm.putAndRegister('%s','%s','%s',guid='%s',catalog='%s')" % ( lfn,
localPath,
se,
fileGUID,
fileCatalog ) )
result = DataManager( catalogs = fileCatalog, masterCatalogOnly = masterCatalogOnly ).putAndRegister( lfn, localPath, se, guid = fileGUID )
self.log.verbose( result )
if not result['OK']:
self.log.error( 'dm.putAndRegister failed with message', result['Message'] )
errorList.append( result['Message'] )
continue
if not result['Value']['Failed']:
self.log.info( 'dm.putAndRegister successfully uploaded and registered %s to %s' % ( fileName, se ) )
return S_OK( {'uploadedSE':se, 'lfn':lfn} )
# Now we know something went wrong
self.log.warn( "Didn't manage to do everything, now adding requests for the missing operation" )
errorDict = result['Value']['Failed'][lfn]
if 'register' not in errorDict:
self.log.error( 'dm.putAndRegister failed with unknown error', str( errorDict ) )
errorList.append( 'Unknown error while attempting upload to %s' % se )
continue
# fileDict = errorDict['register']
# Therefore the registration failed but the upload was successful
if not fileCatalog:
fileCatalog = ''
if masterCatalogOnly:
fileCatalog = FileCatalog().getMasterCatalogNames()['Value']
result = self._setRegistrationRequest( lfn, se, fileMetaDict, fileCatalog )
if not result['OK']:
self.log.error( 'Failed to set registration request', 'SE %s and metadata: \n%s' % ( se, fileMetaDict ) )
errorList.append( 'Failed to set registration request for: SE %s and metadata: \n%s' % ( se, fileMetaDict ) )
continue
else:
self.log.info( 'Successfully set registration request for: SE %s and metadata: \n%s' % ( se, fileMetaDict ) )
metadata = {}
metadata['filedict'] = fileMetaDict
metadata['uploadedSE'] = se
metadata['lfn'] = lfn
metadata['registration'] = 'request'
return S_OK( metadata )
self.log.error( 'Failed to upload output data file', 'Encountered %s errors' % len( errorList ) )
return S_ERROR( 'Failed to upload output data file' )
#############################################################################
def transferAndRegisterFileFailover( self,
fileName,
localPath,
lfn,
targetSE,
failoverSEList,
fileMetaDict,
fileCatalog = None,
masterCatalogOnly = False ):
"""Performs the transfer and register operation to failover storage and sets the
necessary replication and removal requests to recover.
"""
failover = self.transferAndRegisterFile( fileName, localPath, lfn, failoverSEList, fileMetaDict, fileCatalog, masterCatalogOnly = masterCatalogOnly )
if not failover['OK']:
self.log.error( 'Could not upload file to failover SEs', failover['Message'] )
return failover
# set removal requests and replication requests
result = self._setFileReplicationRequest( lfn, targetSE, fileMetaDict, sourceSE = failover['Value']['uploadedSE'] )
if not result['OK']:
self.log.error( 'Could not set file replication request', result['Message'] )
return result
lfn = failover['Value']['lfn']
failoverSE = failover['Value']['uploadedSE']
self.log.info( 'Attempting to set replica removal request for LFN %s at failover SE %s' % ( lfn, failoverSE ) )
result = self._setReplicaRemovalRequest( lfn, failoverSE )
if not result['OK']:
self.log.error( 'Could not set removal request', result['Message'] )
return result
return S_OK( {'uploadedSE':failoverSE, 'lfn':lfn} )
def getRequest( self ):
""" get the accumulated request object
"""
return self.request
def commitRequest( self ):
""" Send request to the Request Management Service
"""
if self.request.isEmpty():
return S_OK()
isValid = RequestValidator().validate( self.request )
if not isValid["OK"]:
return S_ERROR( "Failover request is not valid: %s" % isValid["Message"] )
else:
requestClient = ReqClient()
result = requestClient.putRequest( self.request )
return result
#############################################################################
def _setFileReplicationRequest( self, lfn, targetSE, fileMetaDict, sourceSE = '' ):
""" Sets a registration request.
"""
self.log.info( 'Setting ReplicateAndRegister request for %s to %s' % ( lfn, targetSE ) )
transfer = Operation()
transfer.Type = "ReplicateAndRegister"
transfer.TargetSE = targetSE
if sourceSE:
transfer.SourceSE = sourceSE
trFile = File()
trFile.LFN = lfn
cksm = fileMetaDict.get( "Checksum", None )
cksmType = fileMetaDict.get( "ChecksumType", self.defaultChecksumType )
if cksm and cksmType:
trFile.Checksum = cksm
trFile.ChecksumType = cksmType
size = fileMetaDict.get( "Size", 0 )
if size:
trFile.Size = size
guid = fileMetaDict.get( "GUID", "" )
if guid:
trFile.GUID = guid
transfer.addFile( trFile )
self.request.addOperation( transfer )
return S_OK()
#############################################################################
def _setRegistrationRequest( self, lfn, targetSE, fileDict, catalog ):
""" Sets a registration request
:param str lfn: LFN
:param list se: list of SE (or just string)
:param list catalog: list (or string) of catalogs to use
:param dict fileDict: file metadata
"""
self.log.info( 'Setting registration request for %s at %s.' % ( lfn, targetSE ) )
if not type( catalog ) == type( [] ):
catalog = [catalog]
for cat in catalog:
register = Operation()
register.Type = "RegisterFile"
register.Catalog = cat
register.TargetSE = targetSE
regFile = File()
regFile.LFN = lfn
regFile.Checksum = fileDict.get( "Checksum", "" )
regFile.ChecksumType = fileDict.get( "ChecksumType", self.defaultChecksumType )
regFile.Size = fileDict.get( "Size", 0 )
regFile.GUID = fileDict.get( "GUID", "" )
se = StorageElement( targetSE )
pfn = se.getURL( lfn, self.registrationProtocols )
if not pfn["OK"] or lfn not in pfn["Value"]['Successful']:
self.log.error( "Unable to get PFN for LFN", "%s" % pfn.get( 'Message', pfn.get( 'Value', {} ).get( 'Failed', {} ).get( lfn ) ) )
return pfn
regFile.PFN = pfn["Value"]['Successful'][lfn]
register.addFile( regFile )
self.request.addOperation( register )
return S_OK()
#############################################################################
def _setReplicaRemovalRequest( self, lfn, se ):
""" Sets a removal request for a replica.
:param str lfn: LFN
:param se:
"""
if type( se ) == str:
se = ",".join( [ se.strip() for se in se.split( "," ) if se.strip() ] )
removeReplica = Operation()
removeReplica.Type = "RemoveReplica"
removeReplica.TargetSE = se
replicaToRemove = File()
replicaToRemove.LFN = lfn
removeReplica.addFile( replicaToRemove )
self.request.addOperation( removeReplica )
return S_OK()
#############################################################################
def _setFileRemovalRequest( self, lfn, se = '', pfn = '' ):
""" Sets a removal request for a file including all replicas.
"""
remove = Operation()
remove.Type = "RemoveFile"
if se:
remove.TargetSE = se
rmFile = File()
rmFile.LFN = lfn
if pfn:
rmFile.PFN = pfn
remove.addFile( rmFile )
self.request.addOperation( remove )
return S_OK()
|
jd/hyde
|
refs/heads/master
|
hyde/ext/plugins/meta.py
|
6
|
# -*- coding: utf-8 -*-
"""
Contains classes and utilities related to meta data in hyde.
"""
from collections import namedtuple
from functools import partial
from itertools import ifilter
from operator import attrgetter
import re
import sys
from hyde.exceptions import HydeException
from hyde.model import Expando
from hyde.plugin import Plugin
from hyde.site import Node, Resource
from hyde.util import add_method, add_property, pairwalk
from fswrap import File, Folder
import yaml
#
# Metadata
#
class Metadata(Expando):
"""
Container class for yaml meta data.
"""
def __init__(self, data, parent=None):
super(Metadata, self).__init__({})
if parent:
self.update(parent.__dict__)
if data:
self.update(data)
def update(self, data):
"""
Updates the metadata with new stuff
"""
if isinstance(data, basestring):
super(Metadata, self).update(yaml.load(data))
else:
super(Metadata, self).update(data)
class MetaPlugin(Plugin):
"""
Metadata plugin for hyde. Loads meta data in the following order:
1. meta.yaml: files in any folder
2. frontmatter: any text file with content enclosed within three dashes
or three equals signs.
Example:
---
abc: def
---
Supports YAML syntax.
"""
def __init__(self, site):
super(MetaPlugin, self).__init__(site)
self.yaml_finder = re.compile(
r"^\s*(?:---|===)\s*\n((?:.|\n)+?)\n\s*(?:---|===)\s*\n*",
re.MULTILINE)
def begin_site(self):
"""
Initialize site meta data.
Go through all the nodes and resources to initialize
meta data at each level.
"""
config = self.site.config
metadata = config.meta if hasattr(config, 'meta') else {}
self.site.meta = Metadata(metadata)
self.nodemeta = 'nodemeta.yaml'
if hasattr(self.site.meta, 'nodemeta'):
self.nodemeta = self.site.meta.nodemeta
for node in self.site.content.walk():
self.__read_node__(node)
for resource in node.resources:
if not hasattr(resource, 'meta'):
resource.meta = Metadata({}, node.meta)
if resource.source_file.is_text and not resource.simple_copy:
self.__read_resource__(
resource, resource.source_file.read_all())
def __read_resource__(self, resource, text):
"""
Reads the resource metadata and assigns it to
the resource. Load meta data by looking for the marker.
Once loaded, remove the meta area from the text.
"""
self.logger.debug(
"Trying to load metadata from resource [%s]" % resource)
match = re.match(self.yaml_finder, text)
if not match:
self.logger.debug("No metadata found in resource [%s]" % resource)
data = {}
else:
text = text[match.end():]
data = match.group(1)
if not hasattr(resource, 'meta') or not resource.meta:
if not hasattr(resource.node, 'meta'):
resource.node.meta = Metadata({})
resource.meta = Metadata(data, resource.node.meta)
else:
resource.meta.update(data)
self.__update_standard_attributes__(resource)
self.logger.debug("Successfully loaded metadata from resource [%s]"
% resource)
return text or ' '
def __update_standard_attributes__(self, obj):
"""
Updates standard attributes on the resource and
page based on the provided meta data.
"""
if not hasattr(obj, 'meta'):
return
standard_attributes = ['is_processable', 'uses_template']
for attr in standard_attributes:
if hasattr(obj.meta, attr):
setattr(obj, attr, getattr(obj.meta, attr))
def __read_node__(self, node):
"""
Look for nodemeta.yaml (or configured name). Load and assign it
to the node.
"""
nodemeta = node.get_resource(self.nodemeta)
parent_meta = node.parent.meta if node.parent else self.site.meta
if nodemeta:
nodemeta.is_processable = False
metadata = nodemeta.source_file.read_all()
if hasattr(node, 'meta') and node.meta:
node.meta.update(metadata)
else:
node.meta = Metadata(metadata, parent=parent_meta)
else:
node.meta = Metadata({}, parent=parent_meta)
self.__update_standard_attributes__(node)
def begin_node(self, node):
"""
Read node meta data.
"""
self.__read_node__(node)
def begin_text_resource(self, resource, text):
"""
Update the meta data again, just in case it
has changed. Return text without meta data.
"""
return self.__read_resource__(resource, text)
#
# Auto Extend
#
class AutoExtendPlugin(Plugin):
"""
The plugin class for extending templates using metadata.
"""
def __init__(self, site):
super(AutoExtendPlugin, self).__init__(site)
def begin_text_resource(self, resource, text):
"""
If the meta data for the resource contains a layout attribute,
and there is no extends statement, this plugin automatically adds
an extends statement to the top of the file.
"""
if not resource.uses_template:
return text
layout = None
block = None
try:
layout = resource.meta.extends
except AttributeError:
pass
try:
block = resource.meta.default_block
except AttributeError:
pass
if layout:
self.logger.debug("Autoextending %s with %s" % (
resource.relative_path, layout))
extends_pattern = self.template.patterns['extends']
if not re.search(extends_pattern, text):
extended_text = self.template.get_extends_statement(layout)
extended_text += '\n'
if block:
extended_text += ('%s\n%s\n%s' %
(self.t_block_open_tag(block),
text,
self.t_block_close_tag(block)))
else:
extended_text += text
return extended_text
return text
#
# Tagging
#
class Tag(Expando):
"""
A simple object that represents a tag.
"""
def __init__(self, name):
"""
Initialize the tag with a name.
"""
self.name = name
self.resources = []
def __repr__(self):
return self.name
def __str__(self):
return self.name
def get_tagger_sort_method(site):
config = site.config
content = site.content
walker = 'walk_resources'
sorter = None
try:
sorter = attrgetter('tagger.sorter')(config)
walker = walker + '_sorted_by_%s' % sorter
except AttributeError:
pass
try:
walker = getattr(content, walker)
except AttributeError:
HydeException.reraise(
"Cannot find the sorter: %s" % sorter,
sys.exc_info())
return walker
def walk_resources_tagged_with(node, tag):
tags = set(unicode(tag).split('+'))
walker = get_tagger_sort_method(node.site)
for resource in walker():
try:
taglist = set(attrgetter("meta.tags")(resource))
except AttributeError:
continue
if tags <= taglist:
yield resource
class TaggerPlugin(Plugin):
"""
Tagger plugin for hyde. Adds the ability to do tag resources and search
based on the tags.
Configuration example
---------------------
#yaml
sorter:
kind:
attr: source.kind
tagger:
sorter: kind # How to sort the resources in a tag
archives:
blog:
template: tagged_posts.j2
source: blog
target: blog/tags
archive_extension: html
"""
def __init__(self, site):
super(TaggerPlugin, self).__init__(site)
def begin_site(self):
"""
Initialize plugin. Add tag to the site context variable
and methods for walking tagged resources.
"""
self.logger.debug("Adding tags from metadata")
# *F841 local variable 'config' is assigned to but never used
# config = self.site.config
# *F841 local variable 'content' is assigned to but never used
# content = self.site.content
tags = {}
add_method(Node,
'walk_resources_tagged_with', walk_resources_tagged_with)
walker = get_tagger_sort_method(self.site)
for resource in walker():
self._process_tags_in_resource(resource, tags)
self._process_tag_metadata(tags)
self.site.tagger = Expando(dict(tags=tags))
self._generate_archives()
def _process_tag_metadata(self, tags):
"""
Parses and adds metadata to the tagger object, if the tagger
configuration contains metadata.
"""
try:
tag_meta = self.site.config.tagger.tags.to_dict()
except AttributeError:
tag_meta = {}
for tagname, meta in tag_meta.iteritems():
# Don't allow name and resources in meta
if 'resources' in meta:
del(meta['resources'])
if 'name' in meta:
del(meta['name'])
if tagname in tags:
tags[tagname].update(meta)
def _process_tags_in_resource(self, resource, tags):
"""
Reads the tags associated with this resource and
adds them to the tag list if needed.
"""
try:
taglist = attrgetter("meta.tags")(resource)
except AttributeError:
return
for tagname in taglist:
if tagname not in tags:
tag = Tag(tagname)
tags[tagname] = tag
tag.resources.append(resource)
add_method(Node,
'walk_resources_tagged_with_%s' % tagname,
walk_resources_tagged_with,
tag=tag)
else:
tags[tagname].resources.append(resource)
if not hasattr(resource, 'tags'):
setattr(resource, 'tags', [])
resource.tags.append(tags[tagname])
def _generate_archives(self):
"""
Generates archives if the configuration demands.
"""
archive_config = None
try:
archive_config = attrgetter("tagger.archives")(self.site.config)
except AttributeError:
return
self.logger.debug("Generating archives for tags")
for name, config in archive_config.to_dict().iteritems():
self._create_tag_archive(config)
def _create_tag_archive(self, config):
"""
Generates archives for each tag based on the given configuration.
"""
if 'template' not in config:
raise HydeException(
"No Template specified in tagger configuration.")
content = self.site.content.source_folder
source = Folder(config.get('source', ''))
target = content.child_folder(config.get('target', 'tags'))
if not target.exists:
target.make()
# Write meta data for the configuration
meta = config.get('meta', {})
meta_text = u''
if meta:
import yaml
meta_text = yaml.dump(meta, default_flow_style=False)
extension = config.get('extension', 'html')
template = config['template']
archive_text = u"""
---
extends: false
%(meta)s
---
{%% set tag = site.tagger.tags['%(tag)s'] %%}
{%% set source = site.content.node_from_relative_path('%(node)s') %%}
{%% set walker = source['walk_resources_tagged_with_%(tag)s'] %%}
{%% extends "%(template)s" %%}
"""
for tagname, tag in self.site.tagger.tags.to_dict().iteritems():
tag_data = {
"tag": tagname,
"node": source.name,
"template": template,
"meta": meta_text
}
text = archive_text % tag_data
archive_file = File(target.child("%s.%s" % (tagname, extension)))
archive_file.delete()
archive_file.write(text.strip())
self.site.content.add_resource(archive_file)
#
# Sorting
#
def filter_method(item, settings=None):
"""
Returns true if all the filters in the
given settings evaluate to True.
"""
all_match = True
default_filters = {}
filters = {}
if hasattr(settings, 'filters'):
filters.update(default_filters)
filters.update(settings.filters.__dict__)
for field, value in filters.items():
try:
res = attrgetter(field)(item)
except:
res = None
if res != value:
all_match = False
break
return all_match
def attributes_checker(item, attributes=None):
"""
Checks if the given list of attributes exist.
"""
try:
attrgetter(*attributes)(item)
return True
except AttributeError:
return False
def sort_method(node, settings=None):
"""
Sorts the resources in the given node based on the
given settings.
"""
attr = 'name'
if settings and hasattr(settings, 'attr') and settings.attr:
attr = settings.attr
reverse = False
if settings and hasattr(settings, 'reverse'):
reverse = settings.reverse
if not isinstance(attr, list):
attr = [attr]
filter_ = partial(filter_method, settings=settings)
excluder_ = partial(attributes_checker, attributes=attr)
resources = ifilter(lambda x: excluder_(x) and filter_(x),
node.walk_resources())
return sorted(resources,
key=attrgetter(*attr),
reverse=reverse)
class SorterPlugin(Plugin):
"""
Sorter plugin for hyde. Adds the ability to do
sophisticated sorting by expanding the site objects
to support prebuilt sorting methods. These methods
can be used in the templates directly.
Configuration example
---------------------
#yaml
sorter:
kind:
# Sorts by this attribute name
# Uses `attrgetter` on the resource object
attr: source_file.kind
# The filters to be used before sorting
# This can be used to remove all the items
# that do not apply. For example,
# filtering non html content
filters:
source_file.kind: html
is_processable: True
meta.is_listable: True
"""
def __init__(self, site):
super(SorterPlugin, self).__init__(site)
def begin_site(self):
"""
Initialize plugin. Add a sort and match method
for every configuration mentioned in site settings
"""
config = self.site.config
if not hasattr(config, 'sorter'):
return
for name, settings in config.sorter.__dict__.items():
sort_method_name = 'walk_resources_sorted_by_%s' % name
self.logger.debug("Adding sort methods for [%s]" % name)
add_method(Node, sort_method_name, sort_method, settings=settings)
match_method_name = 'is_%s' % name
add_method(Resource, match_method_name, filter_method, settings)
prev_att = 'prev_by_%s' % name
next_att = 'next_by_%s' % name
setattr(Resource, prev_att, None)
setattr(Resource, next_att, None)
walker = getattr(self.site.content,
sort_method_name,
self.site.content.walk_resources)
first, last = None, None
for prev, next in pairwalk(walker()):
if not first:
first = prev
last = next
setattr(prev, next_att, next)
setattr(next, prev_att, prev)
try:
circular = settings.circular
except AttributeError:
circular = False
if circular and first:
setattr(first, prev_att, last)
setattr(last, next_att, first)
#
# Grouping
#
Grouper = namedtuple('Grouper', 'group resources')
class Group(Expando):
"""
A wrapper class for groups. Adds methods for
grouping resources.
"""
def __init__(self, grouping, parent=None):
self.name = 'groups'
self.parent = parent
self.root = self
self.root = parent.root if parent else self
self.groups = []
self.sorter = getattr(grouping, 'sorter', None)
if hasattr(parent, 'sorter'):
self.sorter = parent.sorter
super(Group, self).__init__(grouping)
add_method(Node,
'walk_%s_groups' % self.name,
Group.walk_groups_in_node,
group=self)
add_method(Node,
'walk_resources_grouped_by_%s' % self.name,
Group.walk_resources,
group=self)
add_property(Resource,
'%s_group' % self.name,
Group.get_resource_group,
group=self)
add_method(Resource,
'walk_%s_groups' % self.name,
Group.walk_resource_groups,
group=self)
def set_expando(self, key, value):
"""
If the key is groups, creates group objects instead of
regular expando objects.
"""
if key == "groups":
self.groups = [Group(group, parent=self) for group in value]
else:
return super(Group, self).set_expando(key, value)
@staticmethod
def get_resource_group(resource, group):
"""
This method gets attached to the resource object.
Returns group and its ancestors that the resource
belongs to, in that order.
"""
try:
group_name = getattr(resource.meta, group.root.name)
except AttributeError:
group_name = None
return next((g for g in group.walk_groups()
if g.name == group_name), None) \
if group_name \
else None
@staticmethod
def walk_resource_groups(resource, group):
"""
This method gets attached to the resource object.
Returns group and its ancestors that the resource
belongs to, in that order.
"""
try:
group_name = getattr(resource.meta, group.root.name)
except AttributeError:
group_name = None
if group_name:
for g in group.walk_groups():
if g.name == group_name:
return reversed(list(g.walk_hierarchy()))
return []
@staticmethod
def walk_resources(node, group):
"""
The method that gets attached to the node
object for walking the resources in the node
that belong to this group.
"""
for group in group.walk_groups():
for resource in group.walk_resources_in_node(node):
yield resource
@staticmethod
def walk_groups_in_node(node, group):
"""
The method that gets attached to the node
object for walking the groups in the node.
"""
walker = group.walk_groups()
for g in walker:
lister = g.walk_resources_in_node(node)
yield Grouper(group=g, resources=lister)
def walk_hierarchy(self):
"""
Walks the group hierarchy starting from
this group.
"""
g = self
yield g
while g.parent:
yield g.parent
g = g.parent
def walk_groups(self):
"""
Walks the groups in the current group
"""
yield self
for group in self.groups:
for child in group.walk_groups():
yield child
def walk_resources_in_node(self, node):
"""
Walks the resources in the given node
sorted based on sorter configuration in this
group.
"""
walker = 'walk_resources'
if hasattr(self, 'sorter') and self.sorter:
walker = 'walk_resources_sorted_by_' + self.sorter
walker = getattr(node, walker, getattr(node, 'walk_resources'))
for resource in walker():
try:
group_value = getattr(resource.meta, self.root.name)
except AttributeError:
continue
if group_value == self.name:
yield resource
class GrouperPlugin(Plugin):
"""
Grouper plugin for hyde. Adds the ability to do
group resources and nodes in an arbitrary
hierarchy.
Configuration example
---------------------
#yaml
sorter:
kind:
attr: source.kind
grouper:
hyde:
# Categorizes the nodes and resources
# based on the groups specified here.
# The node and resource should be tagged
# with the categories in their metadata
sorter: kind # A reference to the sorter
description: Articles about hyde
groups:
-
name: announcements
description: Hyde release announcements
-
name: making of
description: Articles about hyde design decisions
-
name: tips and tricks
description: >
Helpful snippets and tweaks to
make hyde more awesome.
"""
def __init__(self, site):
super(GrouperPlugin, self).__init__(site)
def begin_site(self):
"""
Initialize plugin. Add the specified groups to the
site context variable.
"""
config = self.site.config
if not hasattr(config, 'grouper'):
return
if not hasattr(self.site, 'grouper'):
self.site.grouper = {}
for name, grouping in self.site.config.grouper.__dict__.items():
grouping.name = name
prev_att = 'prev_in_%s' % name
next_att = 'next_in_%s' % name
setattr(Resource, prev_att, None)
setattr(Resource, next_att, None)
self.site.grouper[name] = Group(grouping)
walker = Group.walk_resources(
self.site.content, self.site.grouper[name])
for prev, next in pairwalk(walker):
setattr(next, prev_att, prev)
setattr(prev, next_att, next)
|
tecan/xchat-rt
|
refs/heads/master
|
plugins/scripts/Supybot-0.83.4.1-bitcoinotc-bot/build/lib/supybot/utils/transaction.py
|
8
|
###
# Copyright (c) 2005, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
"""
Defines a Transaction class for multi-file transactions.
"""
import os
import shutil
import os.path
import error
import python
import file as File
# 'txn' is used as an abbreviation for 'transaction' in the following source.
class FailedAcquisition(error.Error):
def __init__(self, txnDir, e=None):
self.txnDir = txnDir
msg = 'Could not acquire transaction directory: %s.' % self.txnDir
error.Error.__init__(self, msg, e)
class InProgress(error.Error):
def __init__(self, inProgress, e=None):
self.inProgress = inProgress
msg = 'Transaction appears to be in progress already: %s exists.' % \
self.inProgress
error.Error.__init__(self, msg, e)
class TransactionMixin(python.Object):
JOURNAL = 'journal'
ORIGINALS = 'originals'
INPROGRESS = '.inProgress'
REPLACEMENTS = 'replacements'
# expects a self.dir. used by Transaction and Rollback.
def __init__(self, txnDir):
self.txnDir = txnDir
self.dir = self.txnDir + self.INPROGRESS
self._journalName = self.dirize(self.JOURNAL)
def escape(self, filename):
return os.path.abspath(filename)[1:]
def dirize(self, *args):
return os.path.join(self.dir, *args)
def _original(self, filename):
return self.dirize(self.ORIGINALS, self.escape(filename))
def _replacement(self, filename):
return self.dirize(self.REPLACEMENTS, self.escape(filename))
def _checkCwd(self):
expected = File.contents(self.dirize('cwd'))
if os.getcwd() != expected:
raise InvalidCwd(expected)
def _journalCommands(self):
journal = file(self._journalName)
for line in journal:
line = line.rstrip('\n')
(command, rest) = line.split(None, 1)
args = rest.split()
yield (command, args)
class Transaction(TransactionMixin):
# XXX Transaction needs to be made threadsafe.
def __init__(self, *args, **kwargs):
"""Transaction(txnDir) -> None
txnDir is the directory that will hold the transaction's working files
and such. If it can't be renamed, there is probably an active
transaction.
"""
TransactionMixin.__init__(self, *args, **kwargs)
if os.path.exists(self.dir):
raise InProgress(self.dir)
if not os.path.exists(self.txnDir):
raise FailedAcquisition(self.txnDir)
try:
os.rename(self.txnDir, self.dir)
except EnvironmentError, e:
raise FailedAcquisition(self.txnDir, e)
os.mkdir(self.dirize(self.ORIGINALS))
os.mkdir(self.dirize(self.REPLACEMENTS))
self._journal = file(self._journalName, 'a')
cwd = file(self.dirize('cwd'), 'w')
cwd.write(os.getcwd())
cwd.close()
def _journalCommand(self, command, *args):
File.writeLine(self._journal,
'%s %s' % (command, ' '.join(map(str, args))))
self._journal.flush()
def _makeOriginal(self, filename):
File.copy(filename, self._original(filename))
# XXX There needs to be a way, given a transaction, to get a
# "sub-transaction", which:
#
# 1. Doesn't try to grab the txnDir and move it, but instead is just
# given the actual directory being used and uses that.
# 2. Acquires the lock of the original transaction, only releasing it
# when its .commit method is called (assuming Transaction is
# threadsafe).
# 3. Has a no-op .commit method (i.e., doesn't commit).
#
# This is so that, for instance, an object with an active Transaction
# can give other objects a Transaction-ish object without worrying that
# the transaction will be committed, while still allowing those objects
# to work properly with real transactions (i.e., they still call
# as they would on a normal Transaction, it just has no effect with a
# sub-transaction).
# The method that returns a subtransaction should be called "child."
def child(self):
raise NotImplementedError
# XXX create, replace, etc. return file objects. This class should keep a
# list of such file descriptors and only allow a commit if all of them
# are closed. Trying to commit with open file objects should raise an
# exception.
def create(self, filename):
"""
Returns a file object for a filename that should be created (with
the contents as they were written to the filename) when the transaction
is committed.
"""
raise NotImplementedError # XXX.
def mkdir(self, filename):
raise NotImplementedError # XXX
def delete(self, filename):
raise NotImplementedError # XXX
def replace(self, filename):
"""
Returns a file object for a filename that should be replaced by the
contents written to the file object when the transaction is committed.
"""
self._checkCwd()
self._makeOriginal(filename)
self._journalCommand('replace', filename)
return File.open(self._replacement(filename))
def append(self, filename):
self._checkCwd()
length = os.stat(filename).st_size
self._journalCommand('append', filename, length)
replacement = self._replacement(filename)
File.copy(filename, replacement)
return file(replacement, 'a')
def commit(self, removeWhenComplete=True):
self._journal.close()
self._checkCwd()
File.touch(self.dirize('commit'))
for (command, args) in self._journalCommands():
methodName = 'commit%s' % command.capitalize()
getattr(self, methodName)(*args)
File.touch(self.dirize('committed'))
if removeWhenComplete:
shutil.rmtree(self.dir)
def commitReplace(self, filename):
shutil.copy(self._replacement(filename), filename)
def commitAppend(self, filename, length):
shutil.copy(self._replacement(filename), filename)
# XXX need to be able to rename files transactionally. (hard; especially
# with renames that depend on one another. It might be easier to do
# rename separate from relocate.)
class Rollback(TransactionMixin):
def rollback(self, removeWhenComplete=True):
self._checkCwd()
if not os.path.exists(self.dirize('commit')):
return # No action taken; commit hadn't begun.
for (command, args) in self._journalCommands():
methodName = 'rollback%s' % command.capitalize()
getattr(self, methodName)(*args)
if removeWhenComplete:
shutil.rmtree(self.dir)
def rollbackReplace(self, filename):
shutil.copy(self._original(filename), filename)
def rollbackAppend(self, filename, length):
fd = file(filename, 'a')
fd.truncate(int(length))
fd.close()
# vim:set shiftwidth=4 softtabstop=8 expandtab textwidth=78:
|
kawamon/hue
|
refs/heads/master
|
desktop/core/ext-py/cx_Oracle-6.4.1/test/DateTimeVar.py
|
2
|
#------------------------------------------------------------------------------
# Copyright 2016, 2017, Oracle and/or its affiliates. All rights reserved.
#
# Portions Copyright 2007-2015, Anthony Tuininga. All rights reserved.
#
# Portions Copyright 2001-2007, Computronix (Canada) Ltd., Edmonton, Alberta,
# Canada. All rights reserved.
#------------------------------------------------------------------------------
"""Module for testing date/time variables."""
import datetime
import time
class TestDateTimeVar(BaseTestCase):
def setUp(self):
BaseTestCase.setUp(self)
self.rawData = []
self.dataByKey = {}
for i in range(1, 11):
timeTuple = (2002, 12, 9, 0, 0, 0, 0, 0, -1)
timeInTicks = time.mktime(timeTuple) + i * 86400 + i * 8640
dateCol = cx_Oracle.TimestampFromTicks(int(timeInTicks))
if i % 2:
timeInTicks = time.mktime(timeTuple) + i * 86400 * 2 + \
i * 12960
nullableCol = cx_Oracle.TimestampFromTicks(int(timeInTicks))
else:
nullableCol = None
tuple = (i, dateCol, nullableCol)
self.rawData.append(tuple)
self.dataByKey[i] = tuple
def testBindDate(self):
"test binding in a date"
self.cursor.execute("""
select * from TestDates
where DateCol = :value""",
value = cx_Oracle.Timestamp(2002, 12, 13, 9, 36, 0))
self.assertEqual(self.cursor.fetchall(), [self.dataByKey[4]])
def testBindDateTime(self):
"test binding in a Python 2.3 and higher date time"
self.cursor.execute("""
select * from TestDates
where DateCol = :value""",
value = datetime.datetime(2002, 12, 13, 9, 36, 0))
self.assertEqual(self.cursor.fetchall(), [self.dataByKey[4]])
def testBindDateInDateTimeVar(self):
"test binding date in a datetime variable"
var = self.cursor.var(cx_Oracle.DATETIME)
dateVal = datetime.date.today()
var.setvalue(0, dateVal)
self.assertEqual(var.getvalue().date(), dateVal)
def testBindDateAfterString(self):
"test binding in a date after setting input sizes to a string"
self.cursor.setinputsizes(value = 15)
self.cursor.execute("""
select * from TestDates
where DateCol = :value""",
value = cx_Oracle.Timestamp(2002, 12, 14, 12, 0, 0))
self.assertEqual(self.cursor.fetchall(), [self.dataByKey[5]])
def testBindNull(self):
"test binding in a null"
self.cursor.setinputsizes(value = cx_Oracle.DATETIME)
self.cursor.execute("""
select * from TestDates
where DateCol = :value""",
value = None)
self.assertEqual(self.cursor.fetchall(), [])
def testBindDateArrayDirect(self):
"test binding in a date array"
returnValue = self.cursor.var(cx_Oracle.NUMBER)
array = [r[1] for r in self.rawData]
statement = """
begin
:returnValue := pkg_TestDateArrays.TestInArrays(
:startValue, :baseDate, :array);
end;"""
self.cursor.execute(statement,
returnValue = returnValue,
startValue = 5,
baseDate = cx_Oracle.Date(2002, 12, 12),
array = array)
self.assertEqual(returnValue.getvalue(), 35.5)
array = array + array[:5]
self.cursor.execute(statement,
startValue = 7,
baseDate = cx_Oracle.Date(2002, 12, 13),
array = array)
self.assertEqual(returnValue.getvalue(), 24.0)
def testBindDateArrayBySizes(self):
"test binding in a date array (with setinputsizes)"
returnValue = self.cursor.var(cx_Oracle.NUMBER)
self.cursor.setinputsizes(array = [cx_Oracle.DATETIME, 10])
array = [r[1] for r in self.rawData]
self.cursor.execute("""
begin
:returnValue := pkg_TestDateArrays.TestInArrays(
:startValue, :baseDate, :array);
end;""",
returnValue = returnValue,
startValue = 6,
baseDate = cx_Oracle.Date(2002, 12, 13),
array = array)
self.assertEqual(returnValue.getvalue(), 26.5)
def testBindDateArrayByVar(self):
"test binding in a date array (with arrayvar)"
returnValue = self.cursor.var(cx_Oracle.NUMBER)
array = self.cursor.arrayvar(cx_Oracle.DATETIME, 10, 20)
array.setvalue(0, [r[1] for r in self.rawData])
self.cursor.execute("""
begin
:returnValue := pkg_TestDateArrays.TestInArrays(
:startValue, :baseDate, :array);
end;""",
returnValue = returnValue,
startValue = 7,
baseDate = cx_Oracle.Date(2002, 12, 14),
array = array)
self.assertEqual(returnValue.getvalue(), 17.5)
def testBindInOutDateArrayByVar(self):
"test binding in/out a date array (with arrayvar)"
array = self.cursor.arrayvar(cx_Oracle.DATETIME, 10, 100)
originalData = [r[1] for r in self.rawData]
array.setvalue(0, originalData)
self.cursor.execute("""
begin
pkg_TestDateArrays.TestInOutArrays(:numElems, :array);
end;""",
numElems = 5,
array = array)
self.assertEqual(array.getvalue(),
[ cx_Oracle.Timestamp(2002, 12, 17, 2, 24, 0),
cx_Oracle.Timestamp(2002, 12, 18, 4, 48, 0),
cx_Oracle.Timestamp(2002, 12, 19, 7, 12, 0),
cx_Oracle.Timestamp(2002, 12, 20, 9, 36, 0),
cx_Oracle.Timestamp(2002, 12, 21, 12, 0, 0) ] + \
originalData[5:])
def testBindOutDateArrayByVar(self):
"test binding out a date array (with arrayvar)"
array = self.cursor.arrayvar(cx_Oracle.DATETIME, 6, 100)
self.cursor.execute("""
begin
pkg_TestDateArrays.TestOutArrays(:numElems, :array);
end;""",
numElems = 6,
array = array)
self.assertEqual(array.getvalue(),
[ cx_Oracle.Timestamp(2002, 12, 13, 4, 48, 0),
cx_Oracle.Timestamp(2002, 12, 14, 9, 36, 0),
cx_Oracle.Timestamp(2002, 12, 15, 14, 24, 0),
cx_Oracle.Timestamp(2002, 12, 16, 19, 12, 0),
cx_Oracle.Timestamp(2002, 12, 18, 0, 0, 0),
cx_Oracle.Timestamp(2002, 12, 19, 4, 48, 0) ])
def testBindOutSetInputSizes(self):
"test binding out with set input sizes defined"
vars = self.cursor.setinputsizes(value = cx_Oracle.DATETIME)
self.cursor.execute("""
begin
:value := to_date(20021209, 'YYYYMMDD');
end;""")
self.assertEqual(vars["value"].getvalue(),
cx_Oracle.Timestamp(2002, 12, 9))
def testBindInOutSetInputSizes(self):
"test binding in/out with set input sizes defined"
vars = self.cursor.setinputsizes(value = cx_Oracle.DATETIME)
self.cursor.execute("""
begin
:value := :value + 5.25;
end;""",
value = cx_Oracle.Timestamp(2002, 12, 12, 10, 0, 0))
self.assertEqual(vars["value"].getvalue(),
cx_Oracle.Timestamp(2002, 12, 17, 16, 0, 0))
def testBindOutVar(self):
"test binding out with cursor.var() method"
var = self.cursor.var(cx_Oracle.DATETIME)
self.cursor.execute("""
begin
:value := to_date('20021231 12:31:00',
'YYYYMMDD HH24:MI:SS');
end;""",
value = var)
self.assertEqual(var.getvalue(),
cx_Oracle.Timestamp(2002, 12, 31, 12, 31, 0))
def testBindInOutVarDirectSet(self):
"test binding in/out with cursor.var() method"
var = self.cursor.var(cx_Oracle.DATETIME)
var.setvalue(0, cx_Oracle.Timestamp(2002, 12, 9, 6, 0, 0))
self.cursor.execute("""
begin
:value := :value + 5.25;
end;""",
value = var)
self.assertEqual(var.getvalue(),
cx_Oracle.Timestamp(2002, 12, 14, 12, 0, 0))
def testCursorDescription(self):
"test cursor description is accurate"
self.cursor.execute("select * from TestDates")
self.assertEqual(self.cursor.description,
[ ('INTCOL', cx_Oracle.NUMBER, 10, None, 9, 0, 0),
('DATECOL', cx_Oracle.DATETIME, 23, None, None, None, 0),
('NULLABLECOL', cx_Oracle.DATETIME, 23, None, None, None,
1) ])
def testFetchAll(self):
"test that fetching all of the data returns the correct results"
self.cursor.execute("select * From TestDates order by IntCol")
self.assertEqual(self.cursor.fetchall(), self.rawData)
self.assertEqual(self.cursor.fetchall(), [])
def testFetchMany(self):
"test that fetching data in chunks returns the correct results"
self.cursor.execute("select * From TestDates order by IntCol")
self.assertEqual(self.cursor.fetchmany(3), self.rawData[0:3])
self.assertEqual(self.cursor.fetchmany(2), self.rawData[3:5])
self.assertEqual(self.cursor.fetchmany(4), self.rawData[5:9])
self.assertEqual(self.cursor.fetchmany(3), self.rawData[9:])
self.assertEqual(self.cursor.fetchmany(3), [])
def testFetchOne(self):
"test that fetching a single row returns the correct results"
self.cursor.execute("""
select *
from TestDates
where IntCol in (3, 4)
order by IntCol""")
self.assertEqual(self.cursor.fetchone(), self.dataByKey[3])
self.assertEqual(self.cursor.fetchone(), self.dataByKey[4])
self.assertEqual(self.cursor.fetchone(), None)
|
RKrahl/pytest-dependency
|
refs/heads/develop
|
doc/examples/scope_module.py
|
1
|
import pytest
@pytest.mark.dependency()
@pytest.mark.xfail(reason="deliberate fail")
def test_a():
assert False
@pytest.mark.dependency()
def test_b():
pass
@pytest.mark.dependency(depends=["test_a"], scope='module')
def test_c():
pass
@pytest.mark.dependency(depends=["test_b"], scope='module')
def test_d():
pass
@pytest.mark.dependency(depends=["test_b", "test_c"], scope='module')
def test_e():
pass
|
toladata/TolaTables
|
refs/heads/master
|
reports/urls.py
|
1
|
import reports.views
from django.conf.urls import *
# place app url patterns here
urlpatterns = [
#display public custom dashboard
url(r'^table_list/$', reports.views.list_table_dashboards,
name='table_dashboard_list'),
url(r'^table_dashboard/(?P<id>\w+)/$',
reports.views.table_dashboard, name='table_dashboard'),
]
|
bobbzorzen/InteractiveExaltedSheets
|
refs/heads/master
|
InteractiveExalted/char_sheet/views.py
|
1
|
from django.shortcuts import render
def index(request):
return render(request, 'char_sheet/home.html')
def cards(request):
return render(request, 'char_sheet/cards.html')
|
googleapis/python-pubsub
|
refs/heads/master
|
tests/unit/pubsub_v1/publisher/batch/test_thread.py
|
1
|
# Copyright 2017, Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import threading
import time
import mock
import pytest
import google.api_core.exceptions
from google.api_core import gapic_v1
from google.auth import credentials
from google.cloud.pubsub_v1 import publisher
from google.cloud.pubsub_v1 import types
from google.cloud.pubsub_v1.publisher import exceptions
from google.cloud.pubsub_v1.publisher._batch.base import BatchStatus
from google.cloud.pubsub_v1.publisher._batch.base import BatchCancellationReason
from google.cloud.pubsub_v1.publisher._batch import thread
from google.cloud.pubsub_v1.publisher._batch.thread import Batch
from google.pubsub_v1 import types as gapic_types
def create_client():
creds = mock.Mock(spec=credentials.Credentials)
return publisher.Client(credentials=creds)
def create_batch(
topic="topic_name",
batch_done_callback=None,
commit_when_full=True,
commit_retry=gapic_v1.method.DEFAULT,
commit_timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT,
**batch_settings
):
"""Return a batch object suitable for testing.
Args:
topic (str): Topic name.
batch_done_callback (Callable[bool]): A callable that is called when
the batch is done, either with a success or a failure flag.
commit_when_full (bool): Whether to commit the batch when the batch
has reached byte-size or number-of-messages limits.
commit_retry (Optional[google.api_core.retry.Retry]): The retry settings
for the batch commit call.
commit_timeout (:class:`~.pubsub_v1.types.TimeoutType`):
The timeout to apply to the batch commit call.
batch_settings (Mapping[str, str]): Arguments passed on to the
:class:``~.pubsub_v1.types.BatchSettings`` constructor.
Returns:
~.pubsub_v1.publisher.batch.thread.Batch: A batch object.
"""
client = create_client()
settings = types.BatchSettings(**batch_settings)
return Batch(
client,
topic,
settings,
batch_done_callback=batch_done_callback,
commit_when_full=commit_when_full,
commit_retry=commit_retry,
commit_timeout=commit_timeout,
)
@mock.patch.object(threading, "Lock")
def test_make_lock(Lock):
lock = Batch.make_lock()
assert lock is Lock.return_value
Lock.assert_called_once_with()
def test_client():
client = create_client()
settings = types.BatchSettings()
batch = Batch(client, "topic_name", settings)
assert batch.client is client
def test_commit():
batch = create_batch()
with mock.patch.object(
Batch, "_start_commit_thread", autospec=True
) as _start_commit_thread:
batch.commit()
_start_commit_thread.assert_called_once()
# The batch's status needs to be something other than "accepting messages",
# since the commit started.
assert batch.status != BatchStatus.ACCEPTING_MESSAGES
assert batch.status == BatchStatus.STARTING
def test_commit_no_op():
batch = create_batch()
batch._status = BatchStatus.IN_PROGRESS
with mock.patch.object(threading, "Thread", autospec=True) as Thread:
batch.commit()
# Make sure a thread was not created.
Thread.assert_not_called()
# Check that batch status is unchanged.
assert batch.status == BatchStatus.IN_PROGRESS
def test_blocking__commit():
batch = create_batch()
futures = (
batch.publish({"data": b"This is my message."}),
batch.publish({"data": b"This is another message."}),
)
# Set up the underlying API publish method to return a PublishResponse.
publish_response = gapic_types.PublishResponse(message_ids=["a", "b"])
patch = mock.patch.object(
type(batch.client.api), "publish", return_value=publish_response
)
with patch as publish:
batch._commit()
# Establish that the underlying API call was made with expected
# arguments.
publish.assert_called_once_with(
topic="topic_name",
messages=[
gapic_types.PubsubMessage(data=b"This is my message."),
gapic_types.PubsubMessage(data=b"This is another message."),
],
retry=gapic_v1.method.DEFAULT,
timeout=gapic_v1.method.DEFAULT,
)
# Establish that all of the futures are done, and that they have the
# expected values.
assert futures[0].done()
assert futures[0].result() == "a"
assert futures[1].done()
assert futures[1].result() == "b"
def test_blocking__commit_custom_retry():
batch = create_batch(commit_retry=mock.sentinel.custom_retry)
batch.publish({"data": b"This is my message."})
# Set up the underlying API publish method to return a PublishResponse.
publish_response = gapic_types.PublishResponse(message_ids=["a"])
patch = mock.patch.object(
type(batch.client.api), "publish", return_value=publish_response
)
with patch as publish:
batch._commit()
# Establish that the underlying API call was made with expected
# arguments.
publish.assert_called_once_with(
topic="topic_name",
messages=[gapic_types.PubsubMessage(data=b"This is my message.")],
retry=mock.sentinel.custom_retry,
timeout=gapic_v1.method.DEFAULT,
)
def test_blocking__commit_custom_timeout():
batch = create_batch(commit_timeout=mock.sentinel.custom_timeout)
batch.publish({"data": b"This is my message."})
# Set up the underlying API publish method to return a PublishResponse.
publish_response = gapic_types.PublishResponse(message_ids=["a"])
patch = mock.patch.object(
type(batch.client.api), "publish", return_value=publish_response
)
with patch as publish:
batch._commit()
# Establish that the underlying API call was made with expected
# arguments.
publish.assert_called_once_with(
topic="topic_name",
messages=[gapic_types.PubsubMessage(data=b"This is my message.")],
retry=gapic_v1.method.DEFAULT,
timeout=mock.sentinel.custom_timeout,
)
def test_client_api_publish_not_blocking_additional_publish_calls():
batch = create_batch(max_messages=1)
api_publish_called = threading.Event()
def api_publish_delay(topic="", messages=(), retry=None, timeout=None):
api_publish_called.set()
time.sleep(1.0)
message_ids = [str(i) for i in range(len(messages))]
return gapic_types.PublishResponse(message_ids=message_ids)
api_publish_patch = mock.patch.object(
type(batch.client.api), "publish", side_effect=api_publish_delay
)
with api_publish_patch:
batch.publish({"data": b"first message"})
start = datetime.datetime.now()
event_set = api_publish_called.wait(timeout=1.0)
if not event_set: # pragma: NO COVER
pytest.fail("API publish was not called in time")
batch.publish({"data": b"second message"})
end = datetime.datetime.now()
# While a batch commit in progress, waiting for the API publish call to
# complete should not unnecessariliy delay other calls to batch.publish().
assert (end - start).total_seconds() < 1.0
@mock.patch.object(thread, "_LOGGER")
def test_blocking__commit_starting(_LOGGER):
batch = create_batch()
batch._status = BatchStatus.STARTING
batch._commit()
assert batch._status == BatchStatus.SUCCESS
_LOGGER.debug.assert_called_once_with("No messages to publish, exiting commit")
@mock.patch.object(thread, "_LOGGER")
def test_blocking__commit_already_started(_LOGGER):
batch = create_batch()
batch._status = BatchStatus.IN_PROGRESS
batch._commit()
assert batch._status == BatchStatus.IN_PROGRESS
_LOGGER.debug.assert_called_once_with(
"Batch is already in progress or has been cancelled, exiting commit"
)
def test_blocking__commit_no_messages():
batch = create_batch()
with mock.patch.object(type(batch.client.api), "publish") as publish:
batch._commit()
assert publish.call_count == 0
def test_blocking__commit_wrong_messageid_length():
batch = create_batch()
futures = (
batch.publish({"data": b"blah blah blah"}),
batch.publish({"data": b"blah blah blah blah"}),
)
# Set up a PublishResponse that only returns one message ID.
publish_response = gapic_types.PublishResponse(message_ids=["a"])
patch = mock.patch.object(
type(batch.client.api), "publish", return_value=publish_response
)
with patch:
batch._commit()
for future in futures:
assert future.done()
assert isinstance(future.exception(), exceptions.PublishError)
def test_block__commmit_api_error():
batch = create_batch()
futures = (
batch.publish({"data": b"blah blah blah"}),
batch.publish({"data": b"blah blah blah blah"}),
)
# Make the API throw an error when publishing.
error = google.api_core.exceptions.InternalServerError("uh oh")
patch = mock.patch.object(type(batch.client.api), "publish", side_effect=error)
with patch:
batch._commit()
for future in futures:
assert future.done()
assert future.exception() == error
def test_block__commmit_retry_error():
batch = create_batch()
futures = (
batch.publish({"data": b"blah blah blah"}),
batch.publish({"data": b"blah blah blah blah"}),
)
# Make the API throw an error when publishing.
error = google.api_core.exceptions.RetryError("uh oh", None)
patch = mock.patch.object(type(batch.client.api), "publish", side_effect=error)
with patch:
batch._commit()
for future in futures:
assert future.done()
assert future.exception() == error
def test_publish_updating_batch_size():
batch = create_batch(topic="topic_foo")
messages = (
gapic_types.PubsubMessage(data=b"foobarbaz"),
gapic_types.PubsubMessage(data=b"spameggs"),
gapic_types.PubsubMessage(data=b"1335020400"),
)
# Publish each of the messages, which should save them to the batch.
futures = [batch.publish(message) for message in messages]
# There should be three messages on the batch, and three futures.
assert len(batch.messages) == 3
assert batch._futures == futures
# The size should have been incremented by the sum of the size
# contributions of each message to the PublishRequest.
base_request_size = gapic_types.PublishRequest(topic="topic_foo")._pb.ByteSize()
expected_request_size = base_request_size + sum(
gapic_types.PublishRequest(messages=[msg])._pb.ByteSize() for msg in messages
)
assert batch.size == expected_request_size
assert batch.size > 0 # I do not always trust protobuf.
def test_publish():
batch = create_batch()
message = gapic_types.PubsubMessage()
future = batch.publish(message)
assert len(batch.messages) == 1
assert batch._futures == [future]
def test_publish_max_messages_zero():
batch = create_batch(topic="topic_foo", max_messages=0)
message = gapic_types.PubsubMessage(data=b"foobarbaz")
with mock.patch.object(batch, "commit") as commit:
future = batch.publish(message)
assert future is not None
assert len(batch.messages) == 1
assert batch._futures == [future]
commit.assert_called_once()
def test_publish_max_messages_enforced():
batch = create_batch(topic="topic_foo", max_messages=1)
message = gapic_types.PubsubMessage(data=b"foobarbaz")
message2 = gapic_types.PubsubMessage(data=b"foobarbaz2")
future = batch.publish(message)
future2 = batch.publish(message2)
assert future is not None
assert future2 is None
assert len(batch.messages) == 1
assert len(batch._futures) == 1
def test_publish_max_bytes_enforced():
batch = create_batch(topic="topic_foo", max_bytes=15)
message = gapic_types.PubsubMessage(data=b"foobarbaz")
message2 = gapic_types.PubsubMessage(data=b"foobarbaz2")
future = batch.publish(message)
future2 = batch.publish(message2)
assert future is not None
assert future2 is None
assert len(batch.messages) == 1
assert len(batch._futures) == 1
def test_publish_exceed_max_messages():
max_messages = 4
batch = create_batch(max_messages=max_messages)
messages = (
gapic_types.PubsubMessage(data=b"foobarbaz"),
gapic_types.PubsubMessage(data=b"spameggs"),
gapic_types.PubsubMessage(data=b"1335020400"),
)
# Publish each of the messages, which should save them to the batch.
with mock.patch.object(batch, "commit") as commit:
futures = [batch.publish(message) for message in messages]
assert batch._futures == futures
assert len(futures) == max_messages - 1
# Commit should not yet have been called.
assert commit.call_count == 0
# When a fourth message is published, commit should be called.
# No future will be returned in this case.
future = batch.publish(gapic_types.PubsubMessage(data=b"last one"))
commit.assert_called_once_with()
assert future is None
assert batch._futures == futures
@mock.patch.object(thread, "_SERVER_PUBLISH_MAX_BYTES", 1000)
def test_publish_single_message_size_exceeds_server_size_limit():
batch = create_batch(
topic="topic_foo",
max_messages=1000,
max_bytes=1000 * 1000, # way larger than (mocked) server side limit
)
big_message = gapic_types.PubsubMessage(data=b"x" * 984)
request_size = gapic_types.PublishRequest(
topic="topic_foo", messages=[big_message]
)._pb.ByteSize()
assert request_size == 1001 # sanity check, just above the (mocked) server limit
with pytest.raises(exceptions.MessageTooLargeError):
batch.publish(big_message)
@mock.patch.object(thread, "_SERVER_PUBLISH_MAX_BYTES", 1000)
def test_publish_total_messages_size_exceeds_server_size_limit():
batch = create_batch(topic="topic_foo", max_messages=10, max_bytes=1500)
messages = (
gapic_types.PubsubMessage(data=b"x" * 500),
gapic_types.PubsubMessage(data=b"x" * 600),
)
# Sanity check - request size is still below BatchSettings.max_bytes,
# but it exceeds the server-side size limit.
request_size = gapic_types.PublishRequest(
topic="topic_foo", messages=messages
)._pb.ByteSize()
assert 1000 < request_size < 1500
with mock.patch.object(batch, "commit") as fake_commit:
batch.publish(messages[0])
batch.publish(messages[1])
# The server side limit should kick in and cause a commit.
fake_commit.assert_called_once()
def test_publish_dict():
batch = create_batch()
future = batch.publish({"data": b"foobarbaz", "attributes": {"spam": "eggs"}})
# There should be one message on the batch.
expected_message = gapic_types.PubsubMessage(
data=b"foobarbaz", attributes={"spam": "eggs"}
)
assert batch.messages == [expected_message]
assert batch._futures == [future]
def test_cancel():
batch = create_batch()
futures = (
batch.publish({"data": b"This is my message."}),
batch.publish({"data": b"This is another message."}),
)
batch.cancel(BatchCancellationReason.PRIOR_ORDERED_MESSAGE_FAILED)
# Assert all futures are cancelled with an error.
for future in futures:
exc = future.exception()
assert type(exc) is RuntimeError
assert exc.args[0] == BatchCancellationReason.PRIOR_ORDERED_MESSAGE_FAILED.value
def test_do_not_commit_when_full_when_flag_is_off():
max_messages = 4
# Set commit_when_full flag to False
batch = create_batch(max_messages=max_messages, commit_when_full=False)
messages = (
gapic_types.PubsubMessage(data=b"foobarbaz"),
gapic_types.PubsubMessage(data=b"spameggs"),
gapic_types.PubsubMessage(data=b"1335020400"),
)
with mock.patch.object(batch, "commit") as commit:
# Publish 3 messages.
futures = [batch.publish(message) for message in messages]
assert len(futures) == 3
# When a fourth message is published, commit should not be called.
future = batch.publish(gapic_types.PubsubMessage(data=b"last one"))
assert commit.call_count == 0
assert future is None
class BatchDoneCallbackTracker(object):
def __init__(self):
self.called = False
self.success = None
def __call__(self, success):
self.called = True
self.success = success
def test_batch_done_callback_called_on_success():
batch_done_callback_tracker = BatchDoneCallbackTracker()
batch = create_batch(batch_done_callback=batch_done_callback_tracker)
# Ensure messages exist.
message = gapic_types.PubsubMessage(data=b"foobarbaz")
batch.publish(message)
# One response for one published message.
publish_response = gapic_types.PublishResponse(message_ids=["a"])
with mock.patch.object(
type(batch.client.api), "publish", return_value=publish_response
):
batch._commit()
assert batch_done_callback_tracker.called
assert batch_done_callback_tracker.success
def test_batch_done_callback_called_on_publish_failure():
batch_done_callback_tracker = BatchDoneCallbackTracker()
batch = create_batch(batch_done_callback=batch_done_callback_tracker)
# Ensure messages exist.
message = gapic_types.PubsubMessage(data=b"foobarbaz")
batch.publish(message)
# One response for one published message.
publish_response = gapic_types.PublishResponse(message_ids=["a"])
# Induce publish error.
error = google.api_core.exceptions.InternalServerError("uh oh")
with mock.patch.object(
type(batch.client.api),
"publish",
return_value=publish_response,
side_effect=error,
):
batch._commit()
assert batch_done_callback_tracker.called
assert not batch_done_callback_tracker.success
def test_batch_done_callback_called_on_publish_response_invalid():
batch_done_callback_tracker = BatchDoneCallbackTracker()
batch = create_batch(batch_done_callback=batch_done_callback_tracker)
# Ensure messages exist.
message = gapic_types.PubsubMessage(data=b"foobarbaz")
batch.publish(message)
# No message ids returned in successful publish response -> invalid.
publish_response = gapic_types.PublishResponse(message_ids=[])
with mock.patch.object(
type(batch.client.api), "publish", return_value=publish_response
):
batch._commit()
assert batch_done_callback_tracker.called
assert not batch_done_callback_tracker.success
|
zsiciarz/django
|
refs/heads/master
|
tests/string_lookup/models.py
|
106
|
from django.db import models
class Foo(models.Model):
name = models.CharField(max_length=50)
friend = models.CharField(max_length=50, blank=True)
def __str__(self):
return "Foo %s" % self.name
class Bar(models.Model):
name = models.CharField(max_length=50)
normal = models.ForeignKey(Foo, models.CASCADE, related_name='normal_foo')
fwd = models.ForeignKey("Whiz", models.CASCADE)
back = models.ForeignKey("Foo", models.CASCADE)
def __str__(self):
return "Bar %s" % self.place.name
class Whiz(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return "Whiz %s" % self.name
class Child(models.Model):
parent = models.OneToOneField('Base', models.CASCADE)
name = models.CharField(max_length=50)
def __str__(self):
return "Child %s" % self.name
class Base(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return "Base %s" % self.name
class Article(models.Model):
name = models.CharField(max_length=50)
text = models.TextField()
submitted_from = models.GenericIPAddressField(blank=True, null=True)
def __str__(self):
return "Article %s" % self.name
|
jk1/intellij-community
|
refs/heads/master
|
python/testData/refactoring/move/qualifiedReferenceInDestinationModule/after/src/a.py
|
31
|
something_else = 2
|
levkar/odoo
|
refs/heads/10.0
|
addons/payment_buckaroo/models/__init__.py
|
163
|
# -*- coding: utf-8 -*-
import payment
|
karst87/ml
|
refs/heads/master
|
dev/statistical_ml/logistic_regression/logistic_regression.py
|
1
|
# -*- coding: utf-8 -*-
# logistic_regression.py
"""
Created by jin.xia on May 09 2017
@author: jin.xia
"""
import numpy as np
import matplotlib.pyplot as plt
class logistic_regression():
"""docstring for logistic_regression"""
def __init__(self, arg):
super(logistic_regression, self).__init__()
self.arg = arg
def sigmod(self, x):
return 1 / (1 + np.exp(-x))
def learning(training_X, training_Y):
self.training_X = training_X
self.training_Y = training_Y
self.training_num = self.training_X.shape[0]
self.feature_num = self.training_X.shape[1]
w = np.ones(feature_num)
while True:
x = np.dot(training_X, w)
y = sigmod(x)
diff = training_Y - y
|
gnieboer/gnuradio
|
refs/heads/android
|
gr-blocks/python/blocks/qa_cpp_py_binding.py
|
37
|
#!/usr/bin/env python
#
# Copyright 2012,2013,2015 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
#
# This program tests mixed python and c++ ctrlport exports in a single app
#
import sys, time, random, numpy, re
from gnuradio import gr, gr_unittest, blocks
from gnuradio.ctrlport import GNURadio
from gnuradio import ctrlport
import os
def get1():
return "success"
def get2():
return "failure"
class inc_class:
def __init__(self):
self.val = 1
def pp(self):
self.val = self.val+1
return self.val
get3 = inc_class()
def get4():
random.seed(0)
rv = random.random()
return rv
def get5():
numpy.random.seed(0)
samp_t = numpy.random.randn(24)+1j*numpy.random.randn(24);
samp_f = numpy.fft.fft(samp_t);
log_pow_f = 20*numpy.log10(numpy.abs(samp_f))
rv = list(log_pow_f)
return rv;
def get6():
numpy.random.seed(0)
samp_t = numpy.random.randn(1024)+1j*numpy.random.randn(1024);
rv = list(samp_t)
return rv;
class test_cpp_py_binding(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
os.environ['GR_CONF_CONTROLPORT_ON'] = 'True'
def tearDown(self):
self.tb = None
def test_001(self):
v1 = gr.RPC_get_string("pyland", "v1", "unit_1_string",
"Python Exported String", "", "", "",
gr.DISPNULL)
v1.activate(get1)
v2 = gr.RPC_get_string("pyland", "v2", "unit_2_string",
"Python Exported String", "", "", "",
gr.DISPNULL)
v2.activate(get2)
v3 = gr.RPC_get_int("pyland", "v3", "unit_3_int",
"Python Exported Int", 0, 100, 1,
gr.DISPNULL)
v3.activate(get3.pp)
v4 = gr.RPC_get_double("pyland", "time", "unit_4_time_double",
"Python Exported Double", 0, 1000, 1,
gr.DISPNULL)
v4.activate(get4)
v5 = gr.RPC_get_vector_float("pyland", "fvec", "unit_5_float_vector",
"Python Exported Float Vector", [], [], [],
gr.DISPTIME | gr.DISPOPTCPLX)
v5.activate(get5)
v6 = gr.RPC_get_vector_gr_complex("pyland", "cvec", "unit_6_gr_complex_vector",
"Python Exported Complex Vector", [], [], [],
gr.DISPXY | gr.DISPOPTSCATTER)
v6.activate(get6)
# print some variables locally
val = get1()
rval = v1.get()
self.assertEqual(val, rval)
val = get2()
rval = v2.get()
self.assertEqual(val, rval)
val = get3.pp()
rval = v3.get()
self.assertEqual(val+1, rval)
val = get4()
rval = v4.get()
self.assertEqual(val, rval)
val = get5()
rval = v5.get()
self.assertComplexTuplesAlmostEqual(val, rval, 5)
val = get6()
rval = v6.get()
self.assertComplexTuplesAlmostEqual(val, rval, 5)
def test_002(self):
data = range(1,9)
self.src = blocks.vector_source_c(data)
self.p1 = blocks.ctrlport_probe_c("aaa","C++ exported variable")
self.p2 = blocks.ctrlport_probe_c("bbb","C++ exported variable")
probe_name = self.p2.alias()
self.tb.connect(self.src, self.p1)
self.tb.connect(self.src, self.p2)
self.tb.start()
# Probes return complex values as list of floats with re, im
# Imaginary parts of this data set are 0.
expected_result = [1, 2, 3, 4,
5, 6, 7, 8]
# Make sure we have time for flowgraph to run
time.sleep(0.1)
# Get available endpoint
ep = gr.rpcmanager_get().endpoints()[0]
hostname = re.search("-h (\S+|\d+\.\d+\.\d+\.\d+)", ep).group(1)
portnum = re.search("-p (\d+)", ep).group(1)
argv = [None, hostname, portnum]
# Initialize a simple ControlPort client from endpoint
from gnuradio.ctrlport.GNURadioControlPortClient import GNURadioControlPortClient
radiosys = GNURadioControlPortClient(argv=argv, rpcmethod='thrift')
radio = radiosys.client
# Get all exported knobs
ret = radio.getKnobs([probe_name + "::bbb"])
for name in ret.keys():
result = ret[name].value
self.assertEqual(result, expected_result)
self.tb.stop()
if __name__ == '__main__':
gr_unittest.run(test_cpp_py_binding, "test_cpp_py_binding.xml")
|
cloudera/hue
|
refs/heads/master
|
desktop/core/ext-py/gunicorn-19.9.0/examples/readline.py
|
7
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
#
# Simple example of readline, reading from a stream then echoing the response
#
# Usage:
#
# Launch a server with the app in a terminal
#
# $ gunicorn -w3 readline:app
#
# Then in another terminal launch the following command:
#
# $ curl -XPOST -d'test\r\ntest2\r\n' -H"Transfer-Encoding: Chunked" http://localhost:8000
from gunicorn import __version__
def app(environ, start_response):
"""Simplest possible application object"""
status = '200 OK'
response_headers = [
('Content-type', 'text/plain'),
('Transfer-Encoding', "chunked"),
('X-Gunicorn-Version', __version__),
#("Test", "test тест"),
]
start_response(status, response_headers)
body = environ['wsgi.input']
lines = []
while True:
line = body.readline()
if line == b"":
break
print(line)
lines.append(line)
return iter(lines)
|
nurmd2/nurmd
|
refs/heads/master
|
openerp/addons/base/ir/ir_exports.py
|
45
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.osv import fields,osv
class ir_exports(osv.osv):
_name = "ir.exports"
_order = 'name'
_columns = {
'name': fields.char('Export Name'),
'resource': fields.char('Resource', select=True),
'export_fields': fields.one2many('ir.exports.line', 'export_id',
'Export ID', copy=True),
}
class ir_exports_line(osv.osv):
_name = 'ir.exports.line'
_order = 'id'
_columns = {
'name': fields.char('Field Name'),
'export_id': fields.many2one('ir.exports', 'Export', select=True, ondelete='cascade'),
}
|
thanatoskira/AndroGuard
|
refs/heads/master
|
androguard/core/bytecodes/dvm_permissions.py
|
7
|
# Androguard is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androguard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
# frameworks/base/core/res/AndroidManifest.xml
########################################## PERMISSIONS ########################################################
DVM_PERMISSIONS = {
"MANIFEST_PERMISSION" : {
"SEND_SMS" : [ "dangerous" , "send SMS messages" , "Allows application to send SMS messages. Malicious applications may cost you money by sending messages without your confirmation." ],
"SEND_SMS_NO_CONFIRMATION" : [ "dangerous", "send SMS messages", "send SMS messages via the Messaging app with no user input or confirmation" ],
"CALL_PHONE" : [ "dangerous" , "directly call phone numbers" , "Allows the application to call phone numbers without your intervention. Malicious applications may cause unexpected calls on your phone bill. Note that this does not allow the application to call emergency numbers." ],
"RECEIVE_SMS" : [ "dangerous" , "receive SMS" , "Allows application to receive and process SMS messages. Malicious applications may monitor your messages or delete them without showing them to you." ],
"RECEIVE_MMS" : [ "dangerous" , "receive MMS" , "Allows application to receive and process MMS messages. Malicious applications may monitor your messages or delete them without showing them to you." ],
"READ_SMS" : [ "dangerous" , "read SMS or MMS" , "Allows application to read SMS messages stored on your phone or SIM card. Malicious applications may read your confidential messages." ],
"WRITE_SMS" : [ "dangerous" , "edit SMS or MMS" , "Allows application to write to SMS messages stored on your phone or SIM card. Malicious applications may delete your messages." ],
"RECEIVE_WAP_PUSH" : [ "dangerous" , "receive WAP" , "Allows application to receive and process WAP messages. Malicious applications may monitor your messages or delete them without showing them to you." ],
"READ_CONTACTS" : [ "dangerous" , "read contact data" , "Allows an application to read all of the contact (address) data stored on your phone. Malicious applications can use this to send your data to other people." ],
"WRITE_CONTACTS" : [ "dangerous" , "write contact data" , "Allows an application to modify the contact (address) data stored on your phone. Malicious applications can use this to erase or modify your contact data." ],
"READ_PROFILE" : [ "dangerous", "read the user's personal profile data", "Allows an application to read the user's personal profile data."],
"WRITE_PROFILE" : [ "dangerous", "write the user's personal profile data", "Allows an application to write (but not read) the user's personal profile data."],
"READ_SOCIAL_STREAM" : [ "dangerous", "read from the user's social stream", "Allows an application to read from the user's social stream." ],
"WRITE_SOCIAL_STREAM" : [ "dangerous", "write the user's social stream", "Allows an application to write (but not read) the user's social stream data." ],
"READ_CALENDAR" : [ "dangerous" , "read calendar events" , "Allows an application to read all of the calendar events stored on your phone. Malicious applications can use this to send your calendar events to other people." ],
"WRITE_CALENDAR" : [ "dangerous" , "add or modify calendar events and send emails to guests" , "Allows an application to add or change the events on your calendar, which may send emails to guests. Malicious applications can use this to erase or modify your calendar events or to send emails to guests." ],
"READ_USER_DICTIONARY" : [ "dangerous" , "read user-defined dictionary" , "Allows an application to read any private words, names and phrases that the user may have stored in the user dictionary." ],
"WRITE_USER_DICTIONARY" : [ "normal" , "write to user-defined dictionary" , "Allows an application to write new words into the user dictionary." ],
"READ_HISTORY_BOOKMARKS" : [ "dangerous" , "read Browser\'s history and bookmarks" , "Allows the application to read all the URLs that the browser has visited and all of the browser\'s bookmarks." ],
"WRITE_HISTORY_BOOKMARKS" : [ "dangerous" , "write Browser\'s history and bookmarks" , "Allows an application to modify the browser\'s history or bookmarks stored on your phone. Malicious applications can use this to erase or modify your browser\'s data." ],
"SET_ALARM" : [ "normal" , "set alarm in alarm clock" , "Allows the application to set an alarm in an installed alarm clock application. Some alarm clock applications may not implement this feature." ],
"ACCESS_FINE_LOCATION" : [ "dangerous" , "fine (GPS) location" , "Access fine location sources, such as the Global Positioning System on the phone, where available. Malicious applications can use this to determine where you are and may consume additional battery power." ],
"ACCESS_COARSE_LOCATION" : [ "dangerous" , "coarse (network-based) location" , "Access coarse location sources, such as the mobile network database, to determine an approximate phone location, where available. Malicious applications can use this to determine approximately where you are." ],
"ACCESS_MOCK_LOCATION" : [ "dangerous" , "mock location sources for testing" , "Create mock location sources for testing. Malicious applications can use this to override the location and/or status returned by real-location sources such as GPS or Network providers." ],
"ACCESS_LOCATION_EXTRA_COMMANDS" : [ "normal" , "access extra location provider commands" , "Access extra location provider commands. Malicious applications could use this to interfere with the operation of the GPS or other location sources." ],
"INSTALL_LOCATION_PROVIDER" : [ "signatureOrSystem" , "permission to install a location provider" , "Create mock location sources for testing. Malicious applications can use this to override the location and/or status returned by real-location sources such as GPS or Network providers, or monitor and report your location to an external source." ],
"INTERNET" : [ "dangerous" , "full Internet access" , "Allows an application to create network sockets." ],
"ACCESS_NETWORK_STATE" : [ "normal" , "view network status" , "Allows an application to view the status of all networks." ],
"ACCESS_WIFI_STATE" : [ "normal" , "view Wi-Fi status" , "Allows an application to view the information about the status of Wi-Fi." ],
"BLUETOOTH" : [ "dangerous" , "create Bluetooth connections" , "Allows an application to view configuration of the local Bluetooth phone and to make and accept connections with paired devices." ],
"NFC" : [ "dangerous" , "control Near-Field Communication" , "Allows an application to communicate with Near-Field Communication (NFC) tags, cards and readers." ],
"USE_SIP" : [ "dangerous" , "make/receive Internet calls" , "Allows an application to use the SIP service to make/receive Internet calls." ],
"ACCOUNT_MANAGER" : [ "signature" , "act as the Account Manager Service" , "Allows an application to make calls to Account Authenticators" ],
"GET_ACCOUNTS" : [ "normal" , "discover known accounts" , "Allows an application to access the list of accounts known by the phone." ],
"AUTHENTICATE_ACCOUNTS" : [ "dangerous" , "act as an account authenticator" , "Allows an application to use the account authenticator capabilities of the Account Manager, including creating accounts as well as obtaining and setting their passwords." ],
"USE_CREDENTIALS" : [ "dangerous" , "use the authentication credentials of an account" , "Allows an application to request authentication tokens." ],
"MANAGE_ACCOUNTS" : [ "dangerous" , "manage the accounts list" , "Allows an application to perform operations like adding and removing accounts and deleting their password." ],
"MODIFY_AUDIO_SETTINGS" : [ "dangerous" , "change your audio settings" , "Allows application to modify global audio settings, such as volume and routing." ],
"RECORD_AUDIO" : [ "dangerous" , "record audio" , "Allows application to access the audio record path." ],
"CAMERA" : [ "dangerous" , "take pictures and videos" , "Allows application to take pictures and videos with the camera. This allows the application to collect images that the camera is seeing at any time." ],
"VIBRATE" : [ "normal" , "control vibrator" , "Allows the application to control the vibrator." ],
"FLASHLIGHT" : [ "normal" , "control flashlight" , "Allows the application to control the flashlight." ],
"ACCESS_USB" : [ "signatureOrSystem" , "access USB devices" , "Allows the application to access USB devices." ],
"HARDWARE_TEST" : [ "signature" , "test hardware" , "Allows the application to control various peripherals for the purpose of hardware testing." ],
"PROCESS_OUTGOING_CALLS" : [ "dangerous" , "intercept outgoing calls" , "Allows application to process outgoing calls and change the number to be dialled. Malicious applications may monitor, redirect or prevent outgoing calls." ],
"MODIFY_PHONE_STATE" : [ "signatureOrSystem" , "modify phone status" , "Allows the application to control the phone features of the device. An application with this permission can switch networks, turn the phone radio on and off and the like, without ever notifying you." ],
"READ_PHONE_STATE" : [ "dangerous" , "read phone state and identity" , "Allows the application to access the phone features of the device. An application with this permission can determine the phone number and serial number of this phone, whether a call is active, the number that call is connected to and so on." ],
"WRITE_EXTERNAL_STORAGE" : [ "dangerous" , "modify/delete SD card contents" , "Allows an application to write to the SD card." ],
"WRITE_SETTINGS" : [ "dangerous" , "modify global system settings" , "Allows an application to modify the system\'s settings data. Malicious applications can corrupt your system\'s configuration." ],
"WRITE_SECURE_SETTINGS" : [ "signatureOrSystem" , "modify secure system settings" , "Allows an application to modify the system\'s secure settings data. Not for use by normal applications." ],
"WRITE_GSERVICES" : [ "signatureOrSystem" , "modify the Google services map" , "Allows an application to modify the Google services map. Not for use by normal applications." ],
"EXPAND_STATUS_BAR" : [ "normal" , "expand/collapse status bar" , "Allows application to expand or collapse the status bar." ],
"GET_TASKS" : [ "dangerous" , "retrieve running applications" , "Allows application to retrieve information about currently and recently running tasks. May allow malicious applications to discover private information about other applications." ],
"REORDER_TASKS" : [ "dangerous" , "reorder applications running" , "Allows an application to move tasks to the foreground and background. Malicious applications can force themselves to the front without your control." ],
"CHANGE_CONFIGURATION" : [ "dangerous" , "change your UI settings" , "Allows an application to change the current configuration, such as the locale or overall font size." ],
"RESTART_PACKAGES" : [ "normal" , "kill background processes" , "Allows an application to kill background processes of other applications, even if memory is not low." ],
"KILL_BACKGROUND_PROCESSES" : [ "normal" , "kill background processes" , "Allows an application to kill background processes of other applications, even if memory is not low." ],
"FORCE_STOP_PACKAGES" : [ "signature" , "force-stop other applications" , "Allows an application to stop other applications forcibly." ],
"DUMP" : [ "signatureOrSystem" , "retrieve system internal status" , "Allows application to retrieve internal status of the system. Malicious applications may retrieve a wide variety of private and secure information that they should never normally need." ],
"SYSTEM_ALERT_WINDOW" : [ "dangerous" , "display system-level alerts" , "Allows an application to show system-alert windows. Malicious applications can take over the entire screen of the phone." ],
"SET_ANIMATION_SCALE" : [ "dangerous" , "modify global animation speed" , "Allows an application to change the global animation speed (faster or slower animations) at any time." ],
"PERSISTENT_ACTIVITY" : [ "dangerous" , "make application always run" , "Allows an application to make parts of itself persistent, so that the system can\'t use it for other applications." ],
"GET_PACKAGE_SIZE" : [ "normal" , "measure application storage space" , "Allows an application to retrieve its code, data and cache sizes" ],
"SET_PREFERRED_APPLICATIONS" : [ "signature" , "set preferred applications" , "Allows an application to modify your preferred applications. This can allow malicious applications to silently change the applications that are run, spoofing your existing applications to collect private data from you." ],
"RECEIVE_BOOT_COMPLETED" : [ "normal" , "automatically start at boot" , "Allows an application to start itself as soon as the system has finished booting. This can make it take longer to start the phone and allow the application to slow down the overall phone by always running." ],
"BROADCAST_STICKY" : [ "normal" , "send sticky broadcast" , "Allows an application to send sticky broadcasts, which remain after the broadcast ends. Malicious applications can make the phone slow or unstable by causing it to use too much memory." ],
"WAKE_LOCK" : [ "dangerous" , "prevent phone from sleeping" , "Allows an application to prevent the phone from going to sleep." ],
"SET_WALLPAPER" : [ "normal" , "set wallpaper" , "Allows the application to set the system wallpaper." ],
"SET_WALLPAPER_HINTS" : [ "normal" , "set wallpaper size hints" , "Allows the application to set the system wallpaper size hints." ],
"SET_TIME" : [ "signatureOrSystem" , "set time" , "Allows an application to change the phone\'s clock time." ],
"SET_TIME_ZONE" : [ "dangerous" , "set time zone" , "Allows an application to change the phone\'s time zone." ],
"MOUNT_UNMOUNT_FILESYSTEMS" : [ "dangerous" , "mount and unmount file systems" , "Allows the application to mount and unmount file systems for removable storage." ],
"MOUNT_FORMAT_FILESYSTEMS" : [ "dangerous" , "format external storage" , "Allows the application to format removable storage." ],
"ASEC_ACCESS" : [ "signature" , "get information on internal storage" , "Allows the application to get information on internal storage." ],
"ASEC_CREATE" : [ "signature" , "create internal storage" , "Allows the application to create internal storage." ],
"ASEC_DESTROY" : [ "signature" , "destroy internal storage" , "Allows the application to destroy internal storage." ],
"ASEC_MOUNT_UNMOUNT" : [ "signature" , "mount/unmount internal storage" , "Allows the application to mount/unmount internal storage." ],
"ASEC_RENAME" : [ "signature" , "rename internal storage" , "Allows the application to rename internal storage." ],
"DISABLE_KEYGUARD" : [ "dangerous" , "disable key lock" , "Allows an application to disable the key lock and any associated password security. A legitimate example of this is the phone disabling the key lock when receiving an incoming phone call, then re-enabling the key lock when the call is finished." ],
"READ_SYNC_SETTINGS" : [ "normal" , "read sync settings" , "Allows an application to read the sync settings, such as whether sync is enabled for Contacts." ],
"WRITE_SYNC_SETTINGS" : [ "dangerous" , "write sync settings" , "Allows an application to modify the sync settings, such as whether sync is enabled for Contacts." ],
"READ_SYNC_STATS" : [ "normal" , "read sync statistics" , "Allows an application to read the sync stats; e.g. the history of syncs that have occurred." ],
"WRITE_APN_SETTINGS" : [ "dangerous" , "write Access Point Name settings" , "Allows an application to modify the APN settings, such as Proxy and Port of any APN." ],
"SUBSCRIBED_FEEDS_READ" : [ "normal" , "read subscribed feeds" , "Allows an application to receive details about the currently synced feeds." ],
"SUBSCRIBED_FEEDS_WRITE" : [ "dangerous" , "write subscribed feeds" , "Allows an application to modify your currently synced feeds. This could allow a malicious application to change your synced feeds." ],
"CHANGE_NETWORK_STATE" : [ "dangerous" , "change network connectivity" , "Allows an application to change the state of network connectivity." ],
"CHANGE_WIFI_STATE" : [ "dangerous" , "change Wi-Fi status" , "Allows an application to connect to and disconnect from Wi-Fi access points and to make changes to configured Wi-Fi networks." ],
"CHANGE_WIFI_MULTICAST_STATE" : [ "dangerous" , "allow Wi-Fi Multicast reception" , "Allows an application to receive packets not directly addressed to your device. This can be useful when discovering services offered nearby. It uses more power than the non-multicast mode." ],
"BLUETOOTH_ADMIN" : [ "dangerous" , "bluetooth administration" , "Allows an application to configure the local Bluetooth phone and to discover and pair with remote devices." ],
"CLEAR_APP_CACHE" : [ "dangerous" , "delete all application cache data" , "Allows an application to free phone storage by deleting files in application cache directory. Access is usually very restricted to system process." ],
"READ_LOGS" : [ "dangerous" , "read sensitive log data" , "Allows an application to read from the system\'s various log files. This allows it to discover general information about what you are doing with the phone, potentially including personal or private information." ],
"SET_DEBUG_APP" : [ "dangerous" , "enable application debugging" , "Allows an application to turn on debugging for another application. Malicious applications can use this to kill other applications." ],
"SET_PROCESS_LIMIT" : [ "dangerous" , "limit number of running processes" , "Allows an application to control the maximum number of processes that will run. Never needed for normal applications." ],
"SET_ALWAYS_FINISH" : [ "dangerous" , "make all background applications close" , "Allows an application to control whether activities are always finished as soon as they go to the background. Never needed for normal applications." ],
"SIGNAL_PERSISTENT_PROCESSES" : [ "dangerous" , "send Linux signals to applications" , "Allows application to request that the supplied signal be sent to all persistent processes." ],
"DIAGNOSTIC" : [ "signature" , "read/write to resources owned by diag" , "Allows an application to read and write to any resource owned by the diag group; for example, files in /dev. This could potentially affect system stability and security. This should ONLY be used for hardware-specific diagnostics by the manufacturer or operator." ],
"STATUS_BAR" : [ "signatureOrSystem" , "disable or modify status bar" , "Allows application to disable the status bar or add and remove system icons." ],
"STATUS_BAR_SERVICE" : [ "signature" , "status bar" , "Allows the application to be the status bar." ],
"FORCE_BACK" : [ "signature" , "force application to close" , "Allows an application to force any activity that is in the foreground to close and go back. Should never be needed for normal applications." ],
"UPDATE_DEVICE_STATS" : [ "signatureOrSystem" , "modify battery statistics" , "Allows the modification of collected battery statistics. Not for use by normal applications." ],
"INTERNAL_SYSTEM_WINDOW" : [ "signature" , "display unauthorised windows" , "Allows the creation of windows that are intended to be used by the internal system user interface. Not for use by normal applications." ],
"MANAGE_APP_TOKENS" : [ "signature" , "manage application tokens" , "Allows applications to create and manage their own tokens, bypassing their normal Z-ordering. Should never be needed for normal applications." ],
"INJECT_EVENTS" : [ "signature" , "press keys and control buttons" , "Allows an application to deliver its own input events (key presses, etc.) to other applications. Malicious applications can use this to take over the phone." ],
"SET_ACTIVITY_WATCHER" : [ "signature" , "monitor and control all application launching" , "Allows an application to monitor and control how the system launches activities. Malicious applications may compromise the system completely. This permission is needed only for development, never for normal phone usage." ],
"SHUTDOWN" : [ "signature" , "partial shutdown" , "Puts the activity manager into a shut-down state. Does not perform a complete shut down." ],
"STOP_APP_SWITCHES" : [ "signature" , "prevent app switches" , "Prevents the user from switching to another application." ],
"READ_INPUT_STATE" : [ "signature" , "record what you type and actions that you take" , "Allows applications to watch the keys that you press even when interacting with another application (such as entering a password). Should never be needed for normal applications." ],
"BIND_INPUT_METHOD" : [ "signature" , "bind to an input method" , "Allows the holder to bind to the top-level interface of an input method. Should never be needed for normal applications." ],
"BIND_WALLPAPER" : [ "signatureOrSystem" , "bind to wallpaper" , "Allows the holder to bind to the top-level interface of wallpaper. Should never be needed for normal applications." ],
"BIND_DEVICE_ADMIN" : [ "signature" , "interact with device admin" , "Allows the holder to send intents to a device administrator. Should never be needed for normal applications." ],
"SET_ORIENTATION" : [ "signature" , "change screen orientation" , "Allows an application to change the rotation of the screen at any time. Should never be needed for normal applications." ],
"INSTALL_PACKAGES" : [ "signatureOrSystem" , "directly install applications" , "Allows an application to install new or updated Android packages. Malicious applications can use this to add new applications with arbitrarily powerful permissions." ],
"CLEAR_APP_USER_DATA" : [ "signature" , "delete other applications\' data" , "Allows an application to clear user data." ],
"DELETE_CACHE_FILES" : [ "signatureOrSystem" , "delete other applications\' caches" , "Allows an application to delete cache files." ],
"DELETE_PACKAGES" : [ "signatureOrSystem" , "delete applications" , "Allows an application to delete Android packages. Malicious applications can use this to delete important applications." ],
"MOVE_PACKAGE" : [ "signatureOrSystem" , "Move application resources" , "Allows an application to move application resources from internal to external media and vice versa." ],
"CHANGE_COMPONENT_ENABLED_STATE" : [ "signatureOrSystem" , "enable or disable application components" , "Allows an application to change whether or not a component of another application is enabled. Malicious applications can use this to disable important phone capabilities. It is important to be careful with permission, as it is possible to bring application components into an unusable, inconsistent or unstable state." ],
"ACCESS_SURFACE_FLINGER" : [ "signature" , "access SurfaceFlinger" , "Allows application to use SurfaceFlinger low-level features." ],
"READ_FRAME_BUFFER" : [ "signature" , "read frame buffer" , "Allows application to read the content of the frame buffer." ],
"BRICK" : [ "signature" , "permanently disable phone" , "Allows the application to disable the entire phone permanently. This is very dangerous." ],
"REBOOT" : [ "signatureOrSystem" , "force phone reboot" , "Allows the application to force the phone to reboot." ],
"DEVICE_POWER" : [ "signature" , "turn phone on or off" , "Allows the application to turn the phone on or off." ],
"FACTORY_TEST" : [ "signature" , "run in factory test mode" , "Run as a low-level manufacturer test, allowing complete access to the phone hardware. Only available when a phone is running in manufacturer test mode." ],
"BROADCAST_PACKAGE_REMOVED" : [ "signature" , "send package removed broadcast" , "Allows an application to broadcast a notification that an application package has been removed. Malicious applications may use this to kill any other application running." ],
"BROADCAST_SMS" : [ "signature" , "send SMS-received broadcast" , "Allows an application to broadcast a notification that an SMS message has been received. Malicious applications may use this to forge incoming SMS messages." ],
"BROADCAST_WAP_PUSH" : [ "signature" , "send WAP-PUSH-received broadcast" , "Allows an application to broadcast a notification that a WAP-PUSH message has been received. Malicious applications may use this to forge MMS message receipt or to replace the content of any web page silently with malicious variants." ],
"MASTER_CLEAR" : [ "signatureOrSystem" , "reset system to factory defaults" , "Allows an application to completely reset the system to its factory settings, erasing all data, configuration and installed applications." ],
"CALL_PRIVILEGED" : [ "signatureOrSystem" , "directly call any phone numbers" , "Allows the application to call any phone number, including emergency numbers, without your intervention. Malicious applications may place unnecessary and illegal calls to emergency services." ],
"PERFORM_CDMA_PROVISIONING" : [ "signatureOrSystem" , "directly start CDMA phone setup" , "Allows the application to start CDMA provisioning. Malicious applications may start CDMA provisioning unnecessarily" ],
"CONTROL_LOCATION_UPDATES" : [ "signatureOrSystem" , "control location update notifications" , "Allows enabling/disabling location update notifications from the radio. Not for use by normal applications." ],
"ACCESS_CHECKIN_PROPERTIES" : [ "signatureOrSystem" , "access check-in properties" , "Allows read/write access to properties uploaded by the check-in service. Not for use by normal applications." ],
"PACKAGE_USAGE_STATS" : [ "signature" , "update component usage statistics" , "Allows the modification of collected component usage statistics. Not for use by normal applications." ],
"BATTERY_STATS" : [ "normal" , "modify battery statistics" , "Allows the modification of collected battery statistics. Not for use by normal applications." ],
"BACKUP" : [ "signatureOrSystem" , "control system back up and restore" , "Allows the application to control the system\'s back-up and restore mechanism. Not for use by normal applications." ],
"BIND_APPWIDGET" : [ "signatureOrSystem" , "choose widgets" , "Allows the application to tell the system which widgets can be used by which application. With this permission, applications can give access to personal data to other applications. Not for use by normal applications." ],
"CHANGE_BACKGROUND_DATA_SETTING" : [ "signature" , "change background data usage setting" , "Allows an application to change the background data usage setting." ],
"GLOBAL_SEARCH" : [ "signatureOrSystem" , "" , "" ],
"GLOBAL_SEARCH_CONTROL" : [ "signature" , "" , "" ],
"SET_WALLPAPER_COMPONENT" : [ "signatureOrSystem" , "" , "" ],
"ACCESS_CACHE_FILESYSTEM" : [ "signatureOrSystem" , "access the cache file system" , "Allows an application to read and write the cache file system." ],
"COPY_PROTECTED_DATA" : [ "signature" , "Allows to invoke default container service to copy content. Not for use by normal applications." , "Allows to invoke default container service to copy content. Not for use by normal applications." ],
"C2D_MESSAGE" : [ "signature" , "" , "" ],
"ADD_VOICEMAIL" : [ "dangerous", "add voicemails into the system", "Allows an application to add voicemails into the system." ],
},
"MANIFEST_PERMISSION_GROUP" :
{
"ACCOUNTS" : "Permissions for direct access to the accounts managed by the Account Manager.",
"COST_MONEY" : "Used for permissions that can be used to make the user spend money without their direct involvement.",
"DEVELOPMENT_TOOLS" : "Group of permissions that are related to development features.",
"HARDWARE_CONTROLS" : "Used for permissions that provide direct access to the hardware on the device.",
"LOCATION" : "Used for permissions that allow access to the user's current location.",
"MESSAGES" : "Used for permissions that allow an application to send messages on behalf of the user or intercept messages being received by the user.",
"NETWORK" : "Used for permissions that provide access to networking services.",
"PERSONAL_INFO" : "Used for permissions that provide access to the user's private data, such as contacts, calendar events, e-mail messages, etc.",
"PHONE_CALLS" : "Used for permissions that are associated with accessing and modifyign telephony state: intercepting outgoing calls, reading and modifying the phone state.",
"STORAGE" : "Group of permissions that are related to SD card access.",
"SYSTEM_TOOLS" : "Group of permissions that are related to system APIs.",
},
}
|
steventimberman/masterDebater
|
refs/heads/master
|
env/lib/python2.7/site-packages/django/conf/locale/km/__init__.py
|
12133432
| |
ppinard/matplotlib-scalebar
|
refs/heads/master
|
doc/example_angular.py
|
1
|
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
from matplotlib_scalebar.scalebar import ScaleBar, ANGULAR
delta = 0.025
x = y = np.arange(-3.0, 3.0, delta)
X, Y = np.meshgrid(x, y)
Z1 = np.exp(-(X ** 2) - Y ** 2)
Z2 = np.exp(-((X - 1) ** 2) - (Y - 1) ** 2)
Z = (Z1 - Z2) * 2
fig, axes = plt.subplots(1, 3, figsize=(9, 3))
for ax, dx in zip(axes, [delta, delta / 60, delta / 3600]):
ax.imshow(Z)
scalebar = ScaleBar(dx, "deg", ANGULAR)
ax.add_artist(scalebar)
ax.set_title("dx = {:.6f}deg".format(dx))
fig.savefig("example_angular.png")
|
JarbasAI/JarbasAI
|
refs/heads/patch-15
|
jarbas_utils/RBM_Sampling.py
|
1
|
from __future__ import division
import argparse
import pickle
import numpy as np
import enum
import random
from jarbas_utils import RBM_Utils as Utils
MAX_PROG_SAMPLE_INTERVAL = 10000
# If true, then subtract a constant between iterations when annealing, rather than dividing by a constant.
# Literature seems divided on the best way to do this? Anecdotally, seem to get better results with exp
# decay most of the time, but haven't looked very carefully.
LINEAR_ANNEAL = 0
BIG_NUMBER = 3.0
class shrink_model(object):
def __init__(self, model, min_length, max_length):
assert 1 <= max_length <= model.codec.maxlen
assert 0 <= min_length <= model.codec.maxlen
self.model = model
self.min_length = min_length
self.max_length = max_length
def __enter__(self):
codec = self.model.codec
model = self.model
padidx = codec.char_lookup[codec.filler]
self.prev_biases = [
model.intercept_visible_[codec.nchars * posn + padidx] for posn in
range(codec.maxlen)]
# Force padding character off for all indices up to min length
for posn in range(self.min_length):
model.intercept_visible_[
codec.nchars * posn + padidx] += -1 * BIG_NUMBER
# Force padding character *on* for indices past max length
for posn in range(self.max_length, codec.maxlen):
model.intercept_visible_[codec.nchars * posn + padidx] += BIG_NUMBER
def __exit__(self, *args):
padidx = self.model.codec.char_lookup[self.model.codec.filler]
for posn, bias in enumerate(self.prev_biases):
self.model.intercept_visible_[
self.model.codec.nchars * posn + padidx] = bias
class VisInit(enum.Enum):
"""Ways of initializing visible units before repeated gibbs sampling."""
# All zeros. Should be basically equivalent to deferring to the *hidden* biases.
zeros = 1
# Treat visible biases as softmax
biases = 2
# Turn on each unit (not just each one-hot vector) with p=.5
uniform = 3
spaces = 4
padding = 7 # Old models use ' ' as filler, making this identical to the above
# Training examples
train = 5
# Choose a random length. Fill in that many uniformly random chars. Fill the rest with padding character.
chunks = 6
# Use training examples but randomly mutate non-space/padding characters. Only the "shape" is preserved.
silhouettes = 8
# Valid one-hot vectors, each chosen uniformly at random
uniform_chars = 9
class BadInitMethodException(Exception):
pass
def starting_visible_configs(init_method, n, model,
training_examples_fname=None):
"""Return an ndarray of n visible configurations for the given model
according to the specified init method (which should be a member of the VisInit enum)
"""
vis_shape = (n, model.intercept_visible_.shape[0])
maxlen, nchars = model.codec.maxlen, model.codec.nchars
if init_method == VisInit.biases:
sm = np.tile(model.intercept_visible_, [n, 1]).reshape(
(-1,) + model.codec.shape())
return Utils.softmax_and_sample(sm).reshape(vis_shape)
elif init_method == VisInit.zeros:
return np.zeros(vis_shape)
elif init_method == VisInit.uniform:
return np.random.randint(0, 2, vis_shape)
# This will fail if ' ' isn't in the alphabet of this model
elif init_method == VisInit.spaces or init_method == VisInit.padding:
fillchar = {VisInit.spaces: ' ', VisInit.padding: model.codec.filler}[
init_method]
vis = np.zeros((n,) + model.codec.shape())
try:
fill = model.codec.char_lookup[fillchar]
except KeyError:
raise BadInitMethodException(fillchar + " is not in model alphabet")
vis[:, :, fill] = 1
return vis.reshape(vis_shape)
elif init_method == VisInit.train or init_method == VisInit.silhouettes:
assert training_examples_fname is not None, "No training examples provided to initialize with"
mutagen = model.codec.mutagen_silhouettes if init_method == VisInit.silhouettes else None
examples = Utils.vectors_from_txtfile(training_examples_fname,
model.codec, limit=n,
mutagen=mutagen)
return examples
elif init_method == VisInit.chunks or init_method == VisInit.uniform_chars:
# This works, but probably isn't idiomatic numpy.
# I don't think I'll ever write idiomatic numpy.
# Start w uniform dist
char_indices = np.random.randint(0, nchars, (n, maxlen))
if init_method == VisInit.chunks:
# Choose some random lengths
lengths = np.clip(
maxlen * .25 * np.random.randn(n) + (maxlen * .66), 1, maxlen
).astype('int8').reshape(n, 1)
_, i = np.indices((n, maxlen))
char_indices[i >= lengths] = model.codec.char_lookup[
model.codec.filler]
# TODO: This is a useful little trick. Make it a helper function and reuse it elsewhere?
return np.eye(nchars)[char_indices.ravel()].reshape(vis_shape)
else:
raise ValueError("Unrecognized init method: {}".format(init_method))
def print_sample_callback(sample_strings, i, energy=None, logger=None):
if energy is None:
text = "".join('{} \t {:.2f}'.format(t[0], t[1]) for t in
zip(sample_strings, energy))
else:
text = "".join(sample_strings)
if logger is None:
pass
#print "\n" + text
else:
logger.debug(text)
return text
@Utils.timeit
def sample_model(model, n, iters, sample_iter_indices,
start_temp=1.0, final_temp=1.0,
callback=print_sample_callback, init_method=VisInit.biases,
training_examples=None,
sample_energy=False, starting_vis=None, min_length=0,
max_length=0,
):
if callback is None:
callback = lambda: None
if starting_vis is not None:
vis = starting_vis
else:
vis = starting_visible_configs(init_method, n, model, training_examples)
args = [model, vis, iters, sample_iter_indices, start_temp, final_temp,
callback, sample_energy]
if min_length or max_length:
if max_length == 0:
max_length = model.codec.maxlen
with shrink_model(model, min_length, max_length):
return _sample_model(*args)
else:
return _sample_model(*args)
def _sample_model(model, vis, iters, sample_iter_indices, start_temp,
final_temp, callback,
sample_energy):
temp = start_temp
temp_decay = (final_temp / start_temp) ** (1 / iters)
temp_delta = (final_temp - start_temp) / iters
next_sample_metaindex = 0
samples = []
for i in range(iters):
if i == sample_iter_indices[next_sample_metaindex]:
# Time to take samples
sample_strings = [model.codec.decode(v, pretty=True, strict=False)
for v in vis]
if sample_energy:
energy = model._free_energy(vis)
sample = callback(sample_strings, i, energy)
else:
sample = callback(sample_strings, i)
samples.append(sample_strings)
next_sample_metaindex += 1
if next_sample_metaindex == len(sample_iter_indices):
break
vis = model.gibbs(vis, temp)
if LINEAR_ANNEAL:
temp += temp_delta
else:
temp *= temp_decay
return vis, samples[-1]
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Sample short texts from a pickled model',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('model_fname', metavar='model.pickle', nargs='+',
help='One or more pickled RBM models')
parser.add_argument('-n', '--n-samples', dest='n_samples', type=int,
default=30,
help='How many samples to draw')
parser.add_argument('-i', '--iters', dest='iters', type=int,
default=10 ** 4,
help='How many rounds of Gibbs sampling to perform before generating the outputs')
parser.add_argument('--prog', '--progressively-sample', dest='prog',
action='store_true',
help='Output n samples after 0 rounds of sampling, then 1, 10, 100, 1000... until we reach a power of 10 >=iters')
parser.add_argument('--init', '--init-method', dest='init_method',
default='silhouettes',
help="How to initialize vectors before sampling")
parser.add_argument('--energy', action='store_true',
help='Along with each sample generated, print its free energy')
parser.add_argument('--every', type=int, default=None,
help='Sample once every this many iters. Incompatible with --prog and --table.')
args = parser.parse_args()
args.init_method = VisInit[args.init_method]
for model_fname in args.model_fname:
print "Drawing samples from model defined at {}".format(model_fname)
f = open(model_fname)
model = pickle.load(f)
f.close()
# TODO: add as arg
if 'usgeo' in model_fname:
example_file = 'data/usgeo.txt'
elif 'reponames' in model_fname:
example_file = 'data/reponames.txt'
elif 'names' in model_fname:
example_file = 'data/names2.txt'
|
JAOSP/aosp_platform_external_chromium_org
|
refs/heads/master
|
tools/telemetry/telemetry/core/util.py
|
23
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import inspect
import os
import socket
import sys
import time
class TimeoutException(Exception):
pass
def GetBaseDir():
main_module = sys.modules['__main__']
if hasattr(main_module, '__file__'):
return os.path.dirname(os.path.abspath(main_module.__file__))
else:
return os.getcwd()
def GetTelemetryDir():
return os.path.normpath(os.path.join(
__file__, os.pardir, os.pardir, os.pardir))
def GetUnittestDataDir():
return os.path.join(GetTelemetryDir(), 'unittest_data')
def GetChromiumSrcDir():
return os.path.normpath(os.path.join(GetTelemetryDir(), os.pardir, os.pardir))
def WaitFor(condition,
timeout, poll_interval=0.1,
pass_time_left_to_func=False):
assert isinstance(condition, type(lambda: None)) # is function
start_time = time.time()
while True:
if pass_time_left_to_func:
res = condition(max((start_time + timeout) - time.time(), 0.0))
else:
res = condition()
if res:
break
if time.time() - start_time > timeout:
if condition.__name__ == '<lambda>':
try:
condition_string = inspect.getsource(condition).strip()
except IOError:
condition_string = condition.__name__
else:
condition_string = condition.__name__
raise TimeoutException('Timed out while waiting %ds for %s.' %
(timeout, condition_string))
time.sleep(poll_interval)
def FindElementAndPerformAction(tab, text, callback_code):
"""JavaScript snippet for finding an element with a given text on a page."""
code = """
(function() {
var callback_function = """ + callback_code + """;
function _findElement(element, text) {
if (element.innerHTML == text) {
callback_function
return element;
}
for (var i in element.childNodes) {
var found = _findElement(element.childNodes[i], text);
if (found)
return found;
}
return null;
}
var _element = _findElement(document, \"""" + text + """\");
return callback_function(_element);
})();"""
return tab.EvaluateJavaScript(code)
class PortPair(object):
def __init__(self, local_port, remote_port):
self.local_port = local_port
self.remote_port = remote_port
def GetAvailableLocalPort():
tmp = socket.socket()
tmp.bind(('', 0))
port = tmp.getsockname()[1]
tmp.close()
return port
def CloseConnections(tab):
"""Closes all TCP sockets held open by the browser."""
try:
tab.ExecuteJavaScript("""window.chrome && chrome.benchmarking &&
chrome.benchmarking.closeConnections()""")
except Exception:
pass
def GetBuildDirectories():
"""Yields all combination of Chromium build output directories."""
build_dirs = ['build',
'out',
'sconsbuild',
'xcodebuild']
build_types = ['Debug', 'Debug_x64', 'Release', 'Release_x64']
for build_dir in build_dirs:
for build_type in build_types:
yield build_dir, build_type
|
seann1/portfolio5
|
refs/heads/master
|
.meteor/dev_bundle/python/Lib/test/test_compare.py
|
195
|
import unittest
from test import test_support
class Empty:
def __repr__(self):
return '<Empty>'
class Coerce:
def __init__(self, arg):
self.arg = arg
def __repr__(self):
return '<Coerce %s>' % self.arg
def __coerce__(self, other):
if isinstance(other, Coerce):
return self.arg, other.arg
else:
return self.arg, other
class Cmp:
def __init__(self,arg):
self.arg = arg
def __repr__(self):
return '<Cmp %s>' % self.arg
def __cmp__(self, other):
return cmp(self.arg, other)
class ComparisonTest(unittest.TestCase):
set1 = [2, 2.0, 2L, 2+0j, Coerce(2), Cmp(2.0)]
set2 = [[1], (3,), None, Empty()]
candidates = set1 + set2
def test_comparisons(self):
for a in self.candidates:
for b in self.candidates:
if ((a in self.set1) and (b in self.set1)) or a is b:
self.assertEqual(a, b)
else:
self.assertNotEqual(a, b)
def test_id_comparisons(self):
# Ensure default comparison compares id() of args
L = []
for i in range(10):
L.insert(len(L)//2, Empty())
for a in L:
for b in L:
self.assertEqual(cmp(a, b), cmp(id(a), id(b)),
'a=%r, b=%r' % (a, b))
def test_main():
test_support.run_unittest(ComparisonTest)
if __name__ == '__main__':
test_main()
|
Fendoe/open-hackathon
|
refs/heads/master
|
open-hackathon-server/src/hackathon/health/__init__.py
|
6
|
# -*- coding: utf-8 -*-
#
# -----------------------------------------------------------------------------------
# Copyright (c) Microsoft Open Technologies (Shanghai) Co. Ltd. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# -----------------------------------------------------------------------------------
import sys
sys.path.append("..")
from hackathon.util import get_now
from hackathon import RequiredFeature
from hackathon.constants import HEALTH_STATUS
__all__ = ["report_health"]
# the time when application starts
app_start_time = get_now()
STATUS = "status"
# all available health check items
all_health_items = {
"docker": RequiredFeature("health_check_hosted_docker"),
"alauda": RequiredFeature("health_check_alauda_docker"),
"guacamole": RequiredFeature("health_check_guacamole"),
"azure": RequiredFeature("health_check_azure"),
"storage": RequiredFeature("storage"),
"mongodb": RequiredFeature("health_check_mongodb")
}
# basic health check items which are fundamental for OHP
basic_health_items = {
"mongodb": RequiredFeature("health_check_mongodb"),
"guacamole": RequiredFeature("health_check_guacamole"),
"storage": RequiredFeature("storage")
}
def __report_detail(health, items):
"""Report the details of health check item
:type health: dict
:param health: the overall health status
:type items: dict
:param items: a dict that contains all detail items to check
:rtype dict
:return health status including overall status and details of sub items
"""
for key, value in items.iteritems():
sub_report = value.report_health()
health[key] = sub_report
if sub_report[STATUS] != HEALTH_STATUS.OK and health[STATUS] != HEALTH_STATUS.ERROR:
health[STATUS] = sub_report[STATUS]
return health
def report_health(q):
"""Report health status of open hackathon server
:type q: str|unicode
:param q: the report type. Can be 'all' or None or a key of health item
:rtype dict
:return health status including overall status and details of sub items
"""
items = basic_health_items
if q == "all":
items = all_health_items
elif q in all_health_items.keys():
items = {
q: all_health_items[q]
}
up = get_now() - app_start_time
days, hours, minutes = up.days, up.seconds / 3600, up.seconds % 3600 / 60.0
health = {
STATUS: HEALTH_STATUS.OK,
"start_time": str(app_start_time),
"report_time": str(get_now()),
"up": "%d days %d hours %d minutes" % (days, hours, minutes)
}
return __report_detail(health, items)
|
nvoron23/arangodb
|
refs/heads/devel
|
3rdParty/V8-4.3.61/third_party/python_26/Lib/socket.py
|
49
|
# Wrapper module for _socket, providing some additional facilities
# implemented in Python.
"""\
This module provides socket operations and some related functions.
On Unix, it supports IP (Internet Protocol) and Unix domain sockets.
On other systems, it only supports IP. Functions specific for a
socket are available as methods of the socket object.
Functions:
socket() -- create a new socket object
socketpair() -- create a pair of new socket objects [*]
fromfd() -- create a socket object from an open file descriptor [*]
gethostname() -- return the current hostname
gethostbyname() -- map a hostname to its IP number
gethostbyaddr() -- map an IP number or hostname to DNS info
getservbyname() -- map a service name and a protocol name to a port number
getprotobyname() -- mape a protocol name (e.g. 'tcp') to a number
ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order
htons(), htonl() -- convert 16, 32 bit int from host to network byte order
inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format
inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89)
ssl() -- secure socket layer support (only available if configured)
socket.getdefaulttimeout() -- get the default timeout value
socket.setdefaulttimeout() -- set the default timeout value
create_connection() -- connects to an address, with an optional timeout
[*] not available on all platforms!
Special objects:
SocketType -- type object for socket objects
error -- exception raised for I/O errors
has_ipv6 -- boolean value indicating if IPv6 is supported
Integer constants:
AF_INET, AF_UNIX -- socket domains (first argument to socket() call)
SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument)
Many other constants may be defined; these may be used in calls to
the setsockopt() and getsockopt() methods.
"""
import _socket
from _socket import *
try:
import _ssl
except ImportError:
# no SSL support
pass
else:
def ssl(sock, keyfile=None, certfile=None):
# we do an internal import here because the ssl
# module imports the socket module
import ssl as _realssl
warnings.warn("socket.ssl() is deprecated. Use ssl.wrap_socket() instead.",
DeprecationWarning, stacklevel=2)
return _realssl.sslwrap_simple(sock, keyfile, certfile)
# we need to import the same constants we used to...
from _ssl import SSLError as sslerror
from _ssl import \
RAND_add, \
RAND_egd, \
RAND_status, \
SSL_ERROR_ZERO_RETURN, \
SSL_ERROR_WANT_READ, \
SSL_ERROR_WANT_WRITE, \
SSL_ERROR_WANT_X509_LOOKUP, \
SSL_ERROR_SYSCALL, \
SSL_ERROR_SSL, \
SSL_ERROR_WANT_CONNECT, \
SSL_ERROR_EOF, \
SSL_ERROR_INVALID_ERROR_CODE
import os, sys, warnings
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
from errno import EBADF
except ImportError:
EBADF = 9
__all__ = ["getfqdn", "create_connection"]
__all__.extend(os._get_exports_list(_socket))
_realsocket = socket
# WSA error codes
if sys.platform.lower().startswith("win"):
errorTab = {}
errorTab[10004] = "The operation was interrupted."
errorTab[10009] = "A bad file handle was passed."
errorTab[10013] = "Permission denied."
errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT
errorTab[10022] = "An invalid operation was attempted."
errorTab[10035] = "The socket operation would block"
errorTab[10036] = "A blocking operation is already in progress."
errorTab[10048] = "The network address is in use."
errorTab[10054] = "The connection has been reset."
errorTab[10058] = "The network has been shut down."
errorTab[10060] = "The operation timed out."
errorTab[10061] = "Connection refused."
errorTab[10063] = "The name is too long."
errorTab[10064] = "The host is down."
errorTab[10065] = "The host is unreachable."
__all__.append("errorTab")
def getfqdn(name=''):
"""Get fully qualified domain name from name.
An empty argument is interpreted as meaning the local host.
First the hostname returned by gethostbyaddr() is checked, then
possibly existing aliases. In case no FQDN is available, hostname
from gethostname() is returned.
"""
name = name.strip()
if not name or name == '0.0.0.0':
name = gethostname()
try:
hostname, aliases, ipaddrs = gethostbyaddr(name)
except error:
pass
else:
aliases.insert(0, hostname)
for name in aliases:
if '.' in name:
break
else:
name = hostname
return name
_socketmethods = (
'bind', 'connect', 'connect_ex', 'fileno', 'listen',
'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
'sendall', 'setblocking',
'settimeout', 'gettimeout', 'shutdown')
if os.name == "nt":
_socketmethods = _socketmethods + ('ioctl',)
if sys.platform == "riscos":
_socketmethods = _socketmethods + ('sleeptaskw',)
# All the method names that must be delegated to either the real socket
# object or the _closedsocket object.
_delegate_methods = ("recv", "recvfrom", "recv_into", "recvfrom_into",
"send", "sendto")
class _closedsocket(object):
__slots__ = []
def _dummy(*args):
raise error(EBADF, 'Bad file descriptor')
# All _delegate_methods must also be initialized here.
send = recv = recv_into = sendto = recvfrom = recvfrom_into = _dummy
__getattr__ = _dummy
# Wrapper around platform socket objects. This implements
# a platform-independent dup() functionality. The
# implementation currently relies on reference counting
# to close the underlying socket object.
class _socketobject(object):
__doc__ = _realsocket.__doc__
__slots__ = ["_sock", "__weakref__"] + list(_delegate_methods)
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _sock=None):
if _sock is None:
_sock = _realsocket(family, type, proto)
self._sock = _sock
for method in _delegate_methods:
setattr(self, method, getattr(_sock, method))
def close(self):
self._sock = _closedsocket()
dummy = self._sock._dummy
for method in _delegate_methods:
setattr(self, method, dummy)
close.__doc__ = _realsocket.close.__doc__
def accept(self):
sock, addr = self._sock.accept()
return _socketobject(_sock=sock), addr
accept.__doc__ = _realsocket.accept.__doc__
def dup(self):
"""dup() -> socket object
Return a new socket object connected to the same system resource."""
return _socketobject(_sock=self._sock)
def makefile(self, mode='r', bufsize=-1):
"""makefile([mode[, bufsize]]) -> file object
Return a regular file object corresponding to the socket. The mode
and bufsize arguments are as for the built-in open() function."""
return _fileobject(self._sock, mode, bufsize)
family = property(lambda self: self._sock.family, doc="the socket family")
type = property(lambda self: self._sock.type, doc="the socket type")
proto = property(lambda self: self._sock.proto, doc="the socket protocol")
_s = ("def %s(self, *args): return self._sock.%s(*args)\n\n"
"%s.__doc__ = _realsocket.%s.__doc__\n")
for _m in _socketmethods:
exec _s % (_m, _m, _m, _m)
del _m, _s
socket = SocketType = _socketobject
class _fileobject(object):
"""Faux file object attached to a socket object."""
default_bufsize = 8192
name = "<socket>"
__slots__ = ["mode", "bufsize", "softspace",
# "closed" is a property, see below
"_sock", "_rbufsize", "_wbufsize", "_rbuf", "_wbuf",
"_close"]
def __init__(self, sock, mode='rb', bufsize=-1, close=False):
self._sock = sock
self.mode = mode # Not actually used in this version
if bufsize < 0:
bufsize = self.default_bufsize
self.bufsize = bufsize
self.softspace = False
# _rbufsize is the suggested recv buffer size. It is *strictly*
# obeyed within readline() for recv calls. If it is larger than
# default_bufsize it will be used for recv calls within read().
if bufsize == 0:
self._rbufsize = 1
elif bufsize == 1:
self._rbufsize = self.default_bufsize
else:
self._rbufsize = bufsize
self._wbufsize = bufsize
# We use StringIO for the read buffer to avoid holding a list
# of variously sized string objects which have been known to
# fragment the heap due to how they are malloc()ed and often
# realloc()ed down much smaller than their original allocation.
self._rbuf = StringIO()
self._wbuf = [] # A list of strings
self._close = close
def _getclosed(self):
return self._sock is None
closed = property(_getclosed, doc="True if the file is closed")
def close(self):
try:
if self._sock:
self.flush()
finally:
if self._close:
self._sock.close()
self._sock = None
def __del__(self):
try:
self.close()
except:
# close() may fail if __init__ didn't complete
pass
def flush(self):
if self._wbuf:
buffer = "".join(self._wbuf)
self._wbuf = []
self._sock.sendall(buffer)
def fileno(self):
return self._sock.fileno()
def write(self, data):
data = str(data) # XXX Should really reject non-string non-buffers
if not data:
return
self._wbuf.append(data)
if (self._wbufsize == 0 or
self._wbufsize == 1 and '\n' in data or
self._get_wbuf_len() >= self._wbufsize):
self.flush()
def writelines(self, list):
# XXX We could do better here for very long lists
# XXX Should really reject non-string non-buffers
self._wbuf.extend(filter(None, map(str, list)))
if (self._wbufsize <= 1 or
self._get_wbuf_len() >= self._wbufsize):
self.flush()
def _get_wbuf_len(self):
buf_len = 0
for x in self._wbuf:
buf_len += len(x)
return buf_len
def read(self, size=-1):
# Use max, disallow tiny reads in a loop as they are very inefficient.
# We never leave read() with any leftover data from a new recv() call
# in our internal buffer.
rbufsize = max(self._rbufsize, self.default_bufsize)
# Our use of StringIO rather than lists of string objects returned by
# recv() minimizes memory usage and fragmentation that occurs when
# rbufsize is large compared to the typical return value of recv().
buf = self._rbuf
buf.seek(0, 2) # seek end
if size < 0:
# Read until EOF
self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
while True:
data = self._sock.recv(rbufsize)
if not data:
break
buf.write(data)
return buf.getvalue()
else:
# Read until size bytes or EOF seen, whichever comes first
buf_len = buf.tell()
if buf_len >= size:
# Already have size bytes in our buffer? Extract and return.
buf.seek(0)
rv = buf.read(size)
self._rbuf = StringIO()
self._rbuf.write(buf.read())
return rv
self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
while True:
left = size - buf_len
# recv() will malloc the amount of memory given as its
# parameter even though it often returns much less data
# than that. The returned data string is short lived
# as we copy it into a StringIO and free it. This avoids
# fragmentation issues on many platforms.
data = self._sock.recv(left)
if not data:
break
n = len(data)
if n == size and not buf_len:
# Shortcut. Avoid buffer data copies when:
# - We have no data in our buffer.
# AND
# - Our call to recv returned exactly the
# number of bytes we were asked to read.
return data
if n == left:
buf.write(data)
del data # explicit free
break
assert n <= left, "recv(%d) returned %d bytes" % (left, n)
buf.write(data)
buf_len += n
del data # explicit free
#assert buf_len == buf.tell()
return buf.getvalue()
def readline(self, size=-1):
buf = self._rbuf
buf.seek(0, 2) # seek end
if buf.tell() > 0:
# check if we already have it in our buffer
buf.seek(0)
bline = buf.readline(size)
if bline.endswith('\n') or len(bline) == size:
self._rbuf = StringIO()
self._rbuf.write(buf.read())
return bline
del bline
if size < 0:
# Read until \n or EOF, whichever comes first
if self._rbufsize <= 1:
# Speed up unbuffered case
buf.seek(0)
buffers = [buf.read()]
self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
data = None
recv = self._sock.recv
while data != "\n":
data = recv(1)
if not data:
break
buffers.append(data)
return "".join(buffers)
buf.seek(0, 2) # seek end
self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
while True:
data = self._sock.recv(self._rbufsize)
if not data:
break
nl = data.find('\n')
if nl >= 0:
nl += 1
buf.write(data[:nl])
self._rbuf.write(data[nl:])
del data
break
buf.write(data)
return buf.getvalue()
else:
# Read until size bytes or \n or EOF seen, whichever comes first
buf.seek(0, 2) # seek end
buf_len = buf.tell()
if buf_len >= size:
buf.seek(0)
rv = buf.read(size)
self._rbuf = StringIO()
self._rbuf.write(buf.read())
return rv
self._rbuf = StringIO() # reset _rbuf. we consume it via buf.
while True:
data = self._sock.recv(self._rbufsize)
if not data:
break
left = size - buf_len
# did we just receive a newline?
nl = data.find('\n', 0, left)
if nl >= 0:
nl += 1
# save the excess data to _rbuf
self._rbuf.write(data[nl:])
if buf_len:
buf.write(data[:nl])
break
else:
# Shortcut. Avoid data copy through buf when returning
# a substring of our first recv().
return data[:nl]
n = len(data)
if n == size and not buf_len:
# Shortcut. Avoid data copy through buf when
# returning exactly all of our first recv().
return data
if n >= left:
buf.write(data[:left])
self._rbuf.write(data[left:])
break
buf.write(data)
buf_len += n
#assert buf_len == buf.tell()
return buf.getvalue()
def readlines(self, sizehint=0):
total = 0
list = []
while True:
line = self.readline()
if not line:
break
list.append(line)
total += len(line)
if sizehint and total >= sizehint:
break
return list
# Iterator protocols
def __iter__(self):
return self
def next(self):
line = self.readline()
if not line:
raise StopIteration
return line
_GLOBAL_DEFAULT_TIMEOUT = object()
def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`getdefaulttimeout`
is used.
"""
msg = "getaddrinfo returns an empty list"
host, port = address
for res in getaddrinfo(host, port, 0, SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket(af, socktype, proto)
if timeout is not _GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
sock.connect(sa)
return sock
except error, msg:
if sock is not None:
sock.close()
raise error, msg
|
glennrub/micropython
|
refs/heads/master
|
tests/basics/try_finally_return.py
|
82
|
def func1():
try:
return "it worked"
finally:
print("finally 1")
print(func1())
def func2():
try:
return "it worked"
finally:
print("finally 2")
def func3():
try:
s = func2()
return s + ", did this work?"
finally:
print("finally 3")
print(func3())
# for loop within try-finally
def f():
try:
for i in [1, 2]:
return i
finally:
print('finally')
print(f())
# multiple for loops within try-finally
def f():
try:
for i in [1, 2]:
for j in [3, 4]:
return (i, j)
finally:
print('finally')
print(f())
# multiple for loops and nested try-finally's
def f():
try:
for i in [1, 2]:
for j in [3, 4]:
try:
for k in [5, 6]:
for l in [7, 8]:
return (i, j, k, l)
finally:
print('finally 2')
finally:
print('finally 1')
print(f())
# multiple for loops that are optimised, and nested try-finally's
def f():
try:
for i in range(1, 3):
for j in range(3, 5):
try:
for k in range(5, 7):
for l in range(7, 9):
return (i, j, k, l)
finally:
print('finally 2')
finally:
print('finally 1')
print(f())
|
philipkershaw/ndg_security_server
|
refs/heads/master
|
ndg/security/server/test/integration/pylonsapp/pylonsapp/config/middleware.py
|
5
|
"""Pylons middleware initialization"""
from beaker.middleware import SessionMiddleware
from paste.cascade import Cascade
from paste.registry import RegistryManager
from paste.urlparser import StaticURLParser
from paste.deploy.converters import asbool
from pylons.middleware import ErrorHandler, StatusCodeRedirect
from pylons.wsgiapp import PylonsApp
from routes.middleware import RoutesMiddleware
from pylonsapp.config.environment import load_environment
def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
"""Create a Pylons WSGI application and return it
``global_conf``
The inherited configuration for this application. Normally from
the [DEFAULT] section of the Paste ini file.
``full_stack``
Whether this application provides a full WSGI stack (by default,
meaning it handles its own exceptions and errors). Disable
full_stack when this application is "managed" by another WSGI
middleware.
``static_files``
Whether this application serves its own static files; disable
when another web server is responsible for serving them.
``app_conf``
The application's local configuration. Normally specified in
the [app:<name>] section of the Paste ini file (where <name>
defaults to main).
"""
# Configure the Pylons environment
config = load_environment(global_conf, app_conf)
# The Pylons WSGI app
app = PylonsApp(config=config)
# Routing/Session/Cache Middleware
app = RoutesMiddleware(app, config['routes.map'])
app = SessionMiddleware(app, config)
# CUSTOM MIDDLEWARE HERE (filtered by error handling middlewares)
if asbool(full_stack):
# Handle Python exceptions
app = ErrorHandler(app, global_conf, **config['pylons.errorware'])
# Display error documents for 401, 403, 404 status codes (and
# 500 when debug is disabled)
if asbool(config['debug']):
app = StatusCodeRedirect(app)
else:
app = StatusCodeRedirect(app, [400, 401, 403, 404, 500])
# Establish the Registry for this application
app = RegistryManager(app)
if asbool(static_files):
# Serve static files
static_app = StaticURLParser(config['pylons.paths']['static_files'])
app = Cascade([static_app, app])
app.config = config
return app
|
anandology/pyjamas
|
refs/heads/master
|
library/gwt/ui/PopupPanel.py
|
1
|
# Copyright 2006 James Tauber and contributors
# Copyright (C) 2009, 2010 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
# Copyright (C) 2010 Serge Tarkovski <serge.tarkovski@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyjamas import DOM
from pyjamas import Window
from pyjamas import Factory
from __pyjamas__ import JS, doc
from pyjamas.ui.SimplePanel import SimplePanel
from pyjamas.ui.RootPanel import RootPanel
from pyjamas.ui import MouseListener
from pyjamas.ui import KeyboardListener
class PopupPanel(SimplePanel):
_props = [("modal", "Modal", "Modal", None),
]
def __init__(self, autoHide=False, modal=True, **kwargs):
self.popupListeners = []
self.showing = False
self.autoHide = autoHide
self.glass = None
kwargs['Modal'] = kwargs.get('Modal', modal)
if kwargs.has_key('Element'):
element = kwargs.pop('Element')
else:
element = self.createElement()
DOM.setStyleAttribute(element, "position", "absolute")
SimplePanel.__init__(self, element, **kwargs)
@classmethod
def _getProps(self):
return SimplePanel._getProps() + self._props
def addPopupListener(self, listener):
self.popupListeners.append(listener)
def getPopupLeft(self):
return DOM.getIntAttribute(self.getElement(), "offsetLeft")
def getPopupTop(self):
return DOM.getIntAttribute(self.getElement(), "offsetTop")
# PopupImpl.createElement
def createElement(self):
return DOM.createDiv()
def hide(self, autoClosed=False):
if not self.showing:
return
self.showing = False
if self.glass:
self.hideGlass()
DOM.removeEventPreview(self)
self.rootpanel.remove(self)
self.onHideImpl(self.getElement())
for listener in self.popupListeners:
if hasattr(listener, 'onPopupClosed'):
listener.onPopupClosed(self, autoClosed)
else:
listener(self, autoClosed)
def setModal(self, modal):
self.modal = modal
def getModal(self):
return self.isModal()
def isModal(self):
""" deprecated - please use getModal
"""
return self.modal
def _event_targets_popup(self, event):
target = DOM.eventGetTarget(event)
return target and DOM.isOrHasChild(self.getElement(), target)
def onEventPreview(self, event):
etype = DOM.eventGetType(event)
if etype == "keydown":
return ( self.onKeyDownPreview(
DOM.eventGetKeyCode(event),
KeyboardListener.getKeyboardModifiers(event)
)
and (not self.modal or self._event_targets_popup(event))
)
elif etype == "keyup":
return ( self.onKeyUpPreview(
DOM.eventGetKeyCode(event),
KeyboardListener.getKeyboardModifiers(event)
)
and (not self.modal or self._event_targets_popup(event))
)
elif etype == "keypress":
return ( self.onKeyPressPreview(
DOM.eventGetKeyCode(event),
KeyboardListener.getKeyboardModifiers(event)
)
and (not self.modal or self._event_targets_popup(event))
)
elif ( etype == "mousedown"
or etype == "blur"
):
if DOM.getCaptureElement() is not None:
return True
if self.autoHide and not self._event_targets_popup(event):
self.hide(True)
return True
elif ( etype == "mouseup"
or etype == "click"
or etype == "mousemove"
or type == "dblclick"
):
if DOM.getCaptureElement() is not None:
return True
return not self.modal or self._event_targets_popup(event)
def onKeyDownPreview(self, key, modifiers):
return True
def onKeyPressPreview(self, key, modifiers):
return True
def onKeyUpPreview(self, key, modifiers):
return True
# PopupImpl.onHide
def onHideImpl(self, popup):
pass
# PopupImpl.onShow
def onShowImpl(self, popup):
pass
def removePopupListener(self, listener):
self.popupListeners.remove(listener)
def setPopupPosition(self, left, top):
if isinstance(left, basestring):
if left.endswith('%'):
left = int(left[:-1])
left = int(left * Window.getClientWidth() / 100)
elif left.lower().endswith('px'):
left = int(left[:-2])
if isinstance(top, basestring):
if top.lower().endswith('%'):
top = int(top[:-1])
top = int(top * Window.getClientHeight() / 100)
elif top.endswith('px'):
top = int(top[:-2])
left = max(left, 0)
top = max(top, 0)
# Account for the difference between absolute position and the
# body's positioning context.
left -= DOM.getBodyOffsetLeft()
top -= DOM.getBodyOffsetTop()
element = self.getElement()
DOM.setStyleAttribute(element, "left", "%dpx" % left)
DOM.setStyleAttribute(element, "top", "%dpx" % top)
def isGlassEnabled(self):
return self.glass is not None
def setGlassEnabled(self, enabled):
if enabled:
if self.glass is None:
self.glass = DOM.createDiv()
self.setGlassStyleName()
elif self.glass is not None:
self.hideGlass()
def getGlassElement(self):
return self.glass
def setGlassStyleName(self, style="gwt-PopupPanelGlass"):
if self.glass:
DOM.setAttribute(self.glass, "className", style)
def getGlassStyleName(self):
if self.glass:
return DOM.setAttribute(self.glass, "className")
def setGlassPosition(self):
top = Window.getScrollTop()
left = Window.getScrollLeft()
height = Window.getClientHeight()
width = Window.getClientWidth()
DOM.setStyleAttribute(self.glass, "position", "absolute")
DOM.setStyleAttribute(self.glass, "left", "%s" % \
left if left == 0 else "%spx" % left)
DOM.setStyleAttribute(self.glass, "top", "%s" % \
top if top == 0 else "%spx" % top)
DOM.setStyleAttribute(self.glass, "height", "%spx" % (top + height))
DOM.setStyleAttribute(self.glass, "width", "%spx" % (left + width))
def showGlass(self):
Window.enableScrolling(False)
self.setGlassPosition()
doc().body.appendChild(self.glass)
Window.addWindowResizeListener(self)
def hideGlass(self):
Window.removeWindowResizeListener(self)
doc().body.removeChild(self.glass)
Window.enableScrolling(True)
def onWindowResized(self, width, height):
self.setGlassPosition()
def centerBox(self):
self_width = self.getOffsetWidth()
self_height = self.getOffsetHeight()
height = Window.getClientHeight()
width = Window.getClientWidth()
center_x = int(width) / 2
center_y = int(height) / 2
self_top = center_y - (int(self_height) / 2)
self_left = center_x - (int(self_width) / 2)
self.setPopupPosition(self_left, self_top)
def center(self):
self.centerBox()
self.show()
def add(self, widget):
self.setWidget(widget)
def show(self):
if self.showing:
return
self.showing = True
if self.glass:
self.showGlass()
DOM.addEventPreview(self)
self.rootpanel.add(self)
self.onShowImpl(self.getElement())
Factory.registerClass('gwt.ui.PopupPanel', 'PopupPanel', PopupPanel)
|
archatas/imageuploads
|
refs/heads/master
|
images/views.py
|
1
|
# -*- coding: UTF-8 -*-
import os
import shutil
from django.shortcuts import render, redirect
from django.template.defaultfilters import slugify
from django.conf import settings
from django.core.files import File
from models import Image
from forms import ImageForm
def image_list(request):
images = Image.objects.all()
return render(request, "images/image_list.html", {'images': images})
def image_upload(request, image_id=None):
instance = None
if image_id:
instance = Image.objects.get(pk=image_id)
if request.method == "POST":
form = ImageForm(request.POST, request.FILES, instance=instance)
if form.is_valid():
new_instance = form.save(commit=True) # let's save the instance to get create its primary key
if form.cleaned_data['delete_image'] and new_instance.image:
new_instance.image.delete()
if form.cleaned_data['image_path']:
tmp_path = form.cleaned_data['image_path']
abs_tmp_path = os.path.join(settings.MEDIA_ROOT, tmp_path)
fname, fext = os.path.splitext(os.path.basename(tmp_path))
filename = slugify(fname) + fext
new_instance.image.save(filename, File(open(abs_tmp_path, "rb")), False)
os.remove(abs_tmp_path)
new_instance.save()
return redirect("image_list")
else:
form = ImageForm(instance=instance)
return render(request, "images/image_upload.html", {'instance': instance, 'form': form})
|
jayoshih/content-curation
|
refs/heads/master
|
contentcuration/kolibri_content/migrations/__init__.py
|
12133432
| |
fintech-circle/edx-platform
|
refs/heads/master
|
lms/envs/__init__.py
|
12133432
| |
ahmedaljazzar/edx-platform
|
refs/heads/master
|
openedx/core/djangoapps/schedules/migrations/0006_scheduleexperience.py
|
13
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schedules', '0005_auto_20171010_1722'),
]
operations = [
migrations.CreateModel(
name='ScheduleExperience',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('experience_type', models.PositiveSmallIntegerField(default=0, choices=[(0, b'Recurring Nudge and Upgrade Reminder'), (1, b'Course Updates')])),
('schedule', models.OneToOneField(related_name='experience', to='schedules.Schedule', on_delete=models.CASCADE)),
],
),
]
|
bestvibes/neo4j-social-network
|
refs/heads/master
|
mac_env/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py
|
1730
|
"""A collection of modules for building different kinds of tree from
HTML documents.
To create a treebuilder for a new type of tree, you need to do
implement several things:
1) A set of classes for various types of elements: Document, Doctype,
Comment, Element. These must implement the interface of
_base.treebuilders.Node (although comment nodes have a different
signature for their constructor, see treebuilders.etree.Comment)
Textual content may also be implemented as another node type, or not, as
your tree implementation requires.
2) A treebuilder object (called TreeBuilder by convention) that
inherits from treebuilders._base.TreeBuilder. This has 4 required attributes:
documentClass - the class to use for the bottommost node of a document
elementClass - the class to use for HTML Elements
commentClass - the class to use for comments
doctypeClass - the class to use for doctypes
It also has one required method:
getDocument - Returns the root node of the complete document tree
3) If you wish to run the unit tests, you must also create a
testSerializer method on your treebuilder which accepts a node and
returns a string containing Node and its children serialized according
to the format used in the unittests
"""
from __future__ import absolute_import, division, unicode_literals
from ..utils import default_etree
treeBuilderCache = {}
def getTreeBuilder(treeType, implementation=None, **kwargs):
"""Get a TreeBuilder class for various types of tree with built-in support
treeType - the name of the tree type required (case-insensitive). Supported
values are:
"dom" - A generic builder for DOM implementations, defaulting to
a xml.dom.minidom based implementation.
"etree" - A generic builder for tree implementations exposing an
ElementTree-like interface, defaulting to
xml.etree.cElementTree if available and
xml.etree.ElementTree if not.
"lxml" - A etree-based builder for lxml.etree, handling
limitations of lxml's implementation.
implementation - (Currently applies to the "etree" and "dom" tree types). A
module implementing the tree type e.g.
xml.etree.ElementTree or xml.etree.cElementTree."""
treeType = treeType.lower()
if treeType not in treeBuilderCache:
if treeType == "dom":
from . import dom
# Come up with a sane default (pref. from the stdlib)
if implementation is None:
from xml.dom import minidom
implementation = minidom
# NEVER cache here, caching is done in the dom submodule
return dom.getDomModule(implementation, **kwargs).TreeBuilder
elif treeType == "lxml":
from . import etree_lxml
treeBuilderCache[treeType] = etree_lxml.TreeBuilder
elif treeType == "etree":
from . import etree
if implementation is None:
implementation = default_etree
# NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeBuilder
else:
raise ValueError("""Unrecognised treebuilder "%s" """ % treeType)
return treeBuilderCache.get(treeType)
|
lokirius/python-for-android
|
refs/heads/master
|
python3-alpha/python3-src/Lib/py_compile.py
|
46
|
"""Routine to "compile" a .py file to a .pyc (or .pyo) file.
This module has intimate knowledge of the format of .pyc files.
"""
import builtins
import errno
import imp
import marshal
import os
import sys
import tokenize
import traceback
MAGIC = imp.get_magic()
__all__ = ["compile", "main", "PyCompileError"]
class PyCompileError(Exception):
"""Exception raised when an error occurs while attempting to
compile the file.
To raise this exception, use
raise PyCompileError(exc_type,exc_value,file[,msg])
where
exc_type: exception type to be used in error message
type name can be accesses as class variable
'exc_type_name'
exc_value: exception value to be used in error message
can be accesses as class variable 'exc_value'
file: name of file being compiled to be used in error message
can be accesses as class variable 'file'
msg: string message to be written as error message
If no value is given, a default exception message will be
given, consistent with 'standard' py_compile output.
message (or default) can be accesses as class variable
'msg'
"""
def __init__(self, exc_type, exc_value, file, msg=''):
exc_type_name = exc_type.__name__
if exc_type is SyntaxError:
tbtext = ''.join(traceback.format_exception_only(
exc_type, exc_value))
errmsg = tbtext.replace('File "<string>"', 'File "%s"' % file)
else:
errmsg = "Sorry: %s: %s" % (exc_type_name,exc_value)
Exception.__init__(self,msg or errmsg,exc_type_name,exc_value,file)
self.exc_type_name = exc_type_name
self.exc_value = exc_value
self.file = file
self.msg = msg or errmsg
def __str__(self):
return self.msg
def wr_long(f, x):
"""Internal; write a 32-bit int to a file in little-endian order."""
f.write(bytes([x & 0xff,
(x >> 8) & 0xff,
(x >> 16) & 0xff,
(x >> 24) & 0xff]))
def compile(file, cfile=None, dfile=None, doraise=False, optimize=-1):
"""Byte-compile one Python source file to Python bytecode.
:param file: The source file name.
:param cfile: The target byte compiled file name. When not given, this
defaults to the PEP 3147 location.
:param dfile: Purported file name, i.e. the file name that shows up in
error messages. Defaults to the source file name.
:param doraise: Flag indicating whether or not an exception should be
raised when a compile error is found. If an exception occurs and this
flag is set to False, a string indicating the nature of the exception
will be printed, and the function will return to the caller. If an
exception occurs and this flag is set to True, a PyCompileError
exception will be raised.
:param optimize: The optimization level for the compiler. Valid values
are -1, 0, 1 and 2. A value of -1 means to use the optimization
level of the current interpreter, as given by -O command line options.
:return: Path to the resulting byte compiled file.
Note that it isn't necessary to byte-compile Python modules for
execution efficiency -- Python itself byte-compiles a module when
it is loaded, and if it can, writes out the bytecode to the
corresponding .pyc (or .pyo) file.
However, if a Python installation is shared between users, it is a
good idea to byte-compile all modules upon installation, since
other users may not be able to write in the source directories,
and thus they won't be able to write the .pyc/.pyo file, and then
they would be byte-compiling every module each time it is loaded.
This can slow down program start-up considerably.
See compileall.py for a script/module that uses this module to
byte-compile all installed files (or all files in selected
directories).
"""
with tokenize.open(file) as f:
try:
timestamp = int(os.fstat(f.fileno()).st_mtime)
except AttributeError:
timestamp = int(os.stat(file).st_mtime)
codestring = f.read()
try:
codeobject = builtins.compile(codestring, dfile or file, 'exec',
optimize=optimize)
except Exception as err:
py_exc = PyCompileError(err.__class__, err, dfile or file)
if doraise:
raise py_exc
else:
sys.stderr.write(py_exc.msg + '\n')
return
if cfile is None:
if optimize >= 0:
cfile = imp.cache_from_source(file, debug_override=not optimize)
else:
cfile = imp.cache_from_source(file)
try:
os.makedirs(os.path.dirname(cfile))
except OSError as error:
if error.errno != errno.EEXIST:
raise
with open(cfile, 'wb') as fc:
fc.write(b'\0\0\0\0')
wr_long(fc, timestamp)
marshal.dump(codeobject, fc)
fc.flush()
fc.seek(0, 0)
fc.write(MAGIC)
return cfile
def main(args=None):
"""Compile several source files.
The files named in 'args' (or on the command line, if 'args' is
not specified) are compiled and the resulting bytecode is cached
in the normal manner. This function does not search a directory
structure to locate source files; it only compiles files named
explicitly. If '-' is the only parameter in args, the list of
files is taken from standard input.
"""
if args is None:
args = sys.argv[1:]
rv = 0
if args == ['-']:
while True:
filename = sys.stdin.readline()
if not filename:
break
filename = filename.rstrip('\n')
try:
compile(filename, doraise=True)
except PyCompileError as error:
rv = 1
sys.stderr.write("%s\n" % error.msg)
except IOError as error:
rv = 1
sys.stderr.write("%s\n" % error)
else:
for filename in args:
try:
compile(filename, doraise=True)
except PyCompileError as error:
# return value to indicate at least one failure
rv = 1
sys.stderr.write(error.msg)
return rv
if __name__ == "__main__":
sys.exit(main())
|
tlatzko/spmcluster
|
refs/heads/master
|
.tox/clean/lib/python2.7/site-packages/wheel/test/test_signatures.py
|
565
|
from wheel import signatures
from wheel.signatures import djbec, ed25519py
from wheel.util import binary
def test_getlib():
signatures.get_ed25519ll()
def test_djbec():
djbec.dsa_test()
djbec.dh_test()
def test_ed25519py():
kp0 = ed25519py.crypto_sign_keypair(binary(' '*32))
kp = ed25519py.crypto_sign_keypair()
signed = ed25519py.crypto_sign(binary('test'), kp.sk)
ed25519py.crypto_sign_open(signed, kp.vk)
try:
ed25519py.crypto_sign_open(signed, kp0.vk)
except ValueError:
pass
else:
raise Exception("Expected ValueError")
try:
ed25519py.crypto_sign_keypair(binary(' '*33))
except ValueError:
pass
else:
raise Exception("Expected ValueError")
try:
ed25519py.crypto_sign(binary(''), binary(' ')*31)
except ValueError:
pass
else:
raise Exception("Expected ValueError")
try:
ed25519py.crypto_sign_open(binary(''), binary(' ')*31)
except ValueError:
pass
else:
raise Exception("Expected ValueError")
|
enyx-opensource/yassh
|
refs/heads/master
|
yassh/reactor.py
|
2
|
import logging
import errno
import select
import weakref
LOGGER = logging.getLogger(__name__)
class Reactor(object):
'''
This class is used to execute execution(s) monitor(s).
'''
def __init__(self):
'''
Create a new reactor.
'''
self.poller = select.poll()
self.fd_to_cmd = {}
def register_execution(self, cmd):
'''
Register a new `cmd` on the reactor.
This will allow reactor to monitor `cmd` output
and execute `cmd` monitor accordingly.
:param Execution cmd: The cmd to register
'''
self.poller.register(cmd.fileno(), select.POLLIN | select.POLLPRI)
self.fd_to_cmd[cmd.fileno()] = weakref.ref(cmd)
LOGGER.debug('registered %s', cmd)
def unregister_execution(self, cmd):
'''
Unregister a `cmd`.
:param Execution cmd: The cmd to unregister
'''
del self.fd_to_cmd[cmd.fileno()]
self.poller.unregister(cmd)
LOGGER.debug('unregistered %s', cmd)
def _process_cmd_output(self, handle):
weakcmd = self.fd_to_cmd.get(handle, None)
cmd = weakcmd() if weakcmd else None
if cmd:
LOGGER.debug('%s has new output', cmd)
cmd.process_output()
def run(self, ms_timeout):
'''
Wait `ms_timeout` for some registered execution(s) to generate
output and execute associated monitor(s).
:param int ms_timeout: Duration waited for an event to occur
:rtype: int
:return: The event(s) count
'''
if not self.fd_to_cmd:
return 0
count = self.poller.poll(ms_timeout)
for handle, __ in count:
self._process_cmd_output(handle)
return len(count) or -errno.ETIMEDOUT
|
466152112/scikit-learn
|
refs/heads/master
|
examples/cluster/plot_segmentation_toy.py
|
258
|
"""
===========================================
Spectral clustering for image segmentation
===========================================
In this example, an image with connected circles is generated and
spectral clustering is used to separate the circles.
In these settings, the :ref:`spectral_clustering` approach solves the problem
know as 'normalized graph cuts': the image is seen as a graph of
connected voxels, and the spectral clustering algorithm amounts to
choosing graph cuts defining regions while minimizing the ratio of the
gradient along the cut, and the volume of the region.
As the algorithm tries to balance the volume (ie balance the region
sizes), if we take circles with different sizes, the segmentation fails.
In addition, as there is no useful information in the intensity of the image,
or its gradient, we choose to perform the spectral clustering on a graph
that is only weakly informed by the gradient. This is close to performing
a Voronoi partition of the graph.
In addition, we use the mask of the objects to restrict the graph to the
outline of the objects. In this example, we are interested in
separating the objects one from the other, and not from the background.
"""
print(__doc__)
# Authors: Emmanuelle Gouillart <emmanuelle.gouillart@normalesup.org>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn.feature_extraction import image
from sklearn.cluster import spectral_clustering
###############################################################################
l = 100
x, y = np.indices((l, l))
center1 = (28, 24)
center2 = (40, 50)
center3 = (67, 58)
center4 = (24, 70)
radius1, radius2, radius3, radius4 = 16, 14, 15, 14
circle1 = (x - center1[0]) ** 2 + (y - center1[1]) ** 2 < radius1 ** 2
circle2 = (x - center2[0]) ** 2 + (y - center2[1]) ** 2 < radius2 ** 2
circle3 = (x - center3[0]) ** 2 + (y - center3[1]) ** 2 < radius3 ** 2
circle4 = (x - center4[0]) ** 2 + (y - center4[1]) ** 2 < radius4 ** 2
###############################################################################
# 4 circles
img = circle1 + circle2 + circle3 + circle4
mask = img.astype(bool)
img = img.astype(float)
img += 1 + 0.2 * np.random.randn(*img.shape)
# Convert the image into a graph with the value of the gradient on the
# edges.
graph = image.img_to_graph(img, mask=mask)
# Take a decreasing function of the gradient: we take it weakly
# dependent from the gradient the segmentation is close to a voronoi
graph.data = np.exp(-graph.data / graph.data.std())
# Force the solver to be arpack, since amg is numerically
# unstable on this example
labels = spectral_clustering(graph, n_clusters=4, eigen_solver='arpack')
label_im = -np.ones(mask.shape)
label_im[mask] = labels
plt.matshow(img)
plt.matshow(label_im)
###############################################################################
# 2 circles
img = circle1 + circle2
mask = img.astype(bool)
img = img.astype(float)
img += 1 + 0.2 * np.random.randn(*img.shape)
graph = image.img_to_graph(img, mask=mask)
graph.data = np.exp(-graph.data / graph.data.std())
labels = spectral_clustering(graph, n_clusters=2, eigen_solver='arpack')
label_im = -np.ones(mask.shape)
label_im[mask] = labels
plt.matshow(img)
plt.matshow(label_im)
plt.show()
|
digdoritos/gimp
|
refs/heads/master
|
plug-ins/pygimp/plug-ins/python-console.py
|
17
|
#!/usr/bin/env python
# Gimp-Python - allows the writing of Gimp plugins in Python.
# Copyright (C) 1997 James Henstridge <james@daa.com.au>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gimpfu import *
t = gettext.translation('gimp20-python', gimp.locale_directory, fallback=True)
_ = t.ugettext
PROC_NAME = 'python-fu-console'
RESPONSE_BROWSE, RESPONSE_CLEAR, RESPONSE_SAVE = range(3)
def do_console():
import pygtk
pygtk.require('2.0')
import sys, gobject, gtk, gimpenums, gimpshelf, gimpui, pyconsole
namespace = {'__builtins__': __builtins__,
'__name__': '__main__', '__doc__': None,
'gimp': gimp, 'pdb': gimp.pdb,
'shelf': gimpshelf.shelf}
for s in gimpenums.__dict__.keys():
if s[0] != '_':
namespace[s] = getattr(gimpenums, s)
class GimpConsole(pyconsole.Console):
def __init__(self, quit_func=None):
banner = ('GIMP %s Python Console\nPython %s\n' %
(gimp.pdb.gimp_version(), sys.version))
pyconsole.Console.__init__(self,
locals=namespace, banner=banner,
quit_func=quit_func)
def _commit(self):
pyconsole.Console._commit(self)
gimp.displays_flush()
class ConsoleDialog(gimpui.Dialog):
def __init__(self):
gimpui.Dialog.__init__(self, title=_("Python Console"),
role=PROC_NAME, help_id=PROC_NAME,
buttons=(gtk.STOCK_SAVE, RESPONSE_SAVE,
gtk.STOCK_CLEAR, RESPONSE_CLEAR,
_("_Browse..."), RESPONSE_BROWSE,
gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE))
self.set_alternative_button_order((gtk.RESPONSE_CLOSE,
RESPONSE_BROWSE,
RESPONSE_CLEAR,
RESPONSE_SAVE))
self.cons = GimpConsole(quit_func=lambda: gtk.main_quit())
self.connect('response', self.response)
self.browse_dlg = None
self.save_dlg = None
vbox = gtk.VBox(False, 12)
vbox.set_border_width(12)
self.vbox.pack_start(vbox)
scrl_win = gtk.ScrolledWindow()
scrl_win.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_ALWAYS)
vbox.pack_start(scrl_win)
scrl_win.add(self.cons)
self.set_default_size(500, 500)
def response(self, dialog, response_id):
if response_id == RESPONSE_BROWSE:
self.browse()
elif response_id == RESPONSE_CLEAR:
self.cons.banner = None
self.cons.clear()
elif response_id == RESPONSE_SAVE:
self.save_dialog()
else:
gtk.main_quit()
self.cons.grab_focus()
def browse_response(self, dlg, response_id):
if response_id != gtk.RESPONSE_APPLY:
dlg.hide()
return
proc_name = dlg.get_selected()
if not proc_name:
return
proc = pdb[proc_name]
cmd = ''
if len(proc.return_vals) > 0:
cmd = ', '.join([x[1].replace('-', '_')
for x in proc.return_vals]) + ' = '
cmd = cmd + 'pdb.%s' % proc.proc_name.replace('-', '_')
if len(proc.params) > 0 and proc.params[0][1] == 'run-mode':
params = proc.params[1:]
else:
params = proc.params
cmd = cmd + '(%s)' % ', '.join([x[1].replace('-', '_')
for x in params])
buffer = self.cons.buffer
lines = buffer.get_line_count()
iter = buffer.get_iter_at_line_offset(lines - 1, 4)
buffer.delete(iter, buffer.get_end_iter())
buffer.place_cursor(buffer.get_end_iter())
buffer.insert_at_cursor(cmd)
def browse(self):
if not self.browse_dlg:
dlg = gimpui.ProcBrowserDialog(_("Python Procedure Browser"),
role=PROC_NAME,
buttons=(gtk.STOCK_APPLY,
gtk.RESPONSE_APPLY,
gtk.STOCK_CLOSE,
gtk.RESPONSE_CLOSE))
dlg.set_default_response(gtk.RESPONSE_APPLY)
dlg.set_alternative_button_order((gtk.RESPONSE_CLOSE,
gtk.RESPONSE_APPLY))
dlg.connect('response', self.browse_response)
dlg.connect('row-activated',
lambda dlg: dlg.response(gtk.RESPONSE_APPLY))
self.browse_dlg = dlg
self.browse_dlg.present()
def save_response(self, dlg, response_id):
if response_id == gtk.RESPONSE_DELETE_EVENT:
self.save_dlg = None
return
elif response_id == gtk.RESPONSE_OK:
filename = dlg.get_filename()
try:
logfile = open(filename, 'w')
except IOError, e:
gimp.message(_("Could not open '%s' for writing: %s") %
(filename, e.strerror))
return
buffer = self.cons.buffer
start = buffer.get_start_iter()
end = buffer.get_end_iter()
log = buffer.get_text(start, end, False)
try:
logfile.write(log)
logfile.close()
except IOError, e:
gimp.message(_("Could not write to '%s': %s") %
(filename, e.strerror))
return
dlg.hide()
def save_dialog(self):
if not self.save_dlg:
dlg = gtk.FileChooserDialog(_("Save Python-Fu Console Output"),
parent=self,
action=gtk.FILE_CHOOSER_ACTION_SAVE,
buttons=(gtk.STOCK_CANCEL,
gtk.RESPONSE_CANCEL,
gtk.STOCK_SAVE,
gtk.RESPONSE_OK))
dlg.set_default_response(gtk.RESPONSE_OK)
dlg.set_alternative_button_order((gtk.RESPONSE_OK,
gtk.RESPONSE_CANCEL))
dlg.connect('response', self.save_response)
self.save_dlg = dlg
self.save_dlg.present()
def run(self):
self.show_all()
gtk.main()
ConsoleDialog().run()
register(
PROC_NAME,
N_("Interactive GIMP Python interpreter"),
"Type in commands and see results",
"James Henstridge",
"James Henstridge",
"1997-1999",
N_("_Console"),
"",
[],
[],
do_console,
menu="<Image>/Filters/Languages/Python-Fu",
domain=("gimp20-python", gimp.locale_directory))
main()
|
dusenberrymw/systemml
|
refs/heads/master
|
src/main/python/systemml/random/sampling.py
|
13
|
#-------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#-------------------------------------------------------------
__all__ = ['normal', 'uniform', 'poisson']
from ..defmatrix import *
# Special object used internally to specify the placeholder which will be replaced by output ID
# This helps to provide dml containing output ID in constructSamplingNode
OUTPUT_ID = '$$OutputID$$'
def constructSamplingNode(inputs, dml):
"""
Convenient utility to create an intermediate of AST.
Parameters
----------
inputs = list of input matrix objects and/or DMLOp
dml = list of DML string (which will be eventually joined before execution). To specify out.ID, please use the placeholder
"""
dmlOp = DMLOp(inputs)
out = matrix(None, op=dmlOp)
dmlOp.dml = [out.ID if x==OUTPUT_ID else x for x in dml]
return out
INPUTS = []
def asStr(arg):
"""
Internal use only: Convenient utility to update inputs and return appropriate string value
"""
if isinstance(arg, matrix):
INPUTS = INPUTS + [ arg ]
return arg.ID
else:
return str(arg)
def normal(loc=0.0, scale=1.0, size=(1,1), sparsity=1.0):
"""
Draw random samples from a normal (Gaussian) distribution.
Parameters
----------
loc: Mean ("centre") of the distribution.
scale: Standard deviation (spread or "width") of the distribution.
size: Output shape (only tuple of length 2, i.e. (m, n), supported).
sparsity: Sparsity (between 0.0 and 1.0).
Examples
--------
>>> import systemml as sml
>>> import numpy as np
>>> sml.setSparkContext(sc)
>>> from systemml import random
>>> m1 = sml.random.normal(loc=3, scale=2, size=(3,3))
>>> m1.toNumPy()
array([[ 3.48857226, 6.17261819, 2.51167259],
[ 3.60506708, -1.90266305, 3.97601633],
[ 3.62245706, 5.9430881 , 2.53070413]])
"""
if len(size) != 2:
raise TypeError('Incorrect type for size. Expected tuple of length 2')
INPUTS = []
rows = asStr(size[0])
cols = asStr(size[1])
loc = asStr(loc)
scale = asStr(scale)
sparsity = asStr(sparsity)
# loc + scale*standard normal
return constructSamplingNode(INPUTS, [OUTPUT_ID, ' = ', loc,' + ', scale,' * random.normal(', rows, ',', cols, ',', sparsity, ')\n'])
def uniform(low=0.0, high=1.0, size=(1,1), sparsity=1.0):
"""
Draw samples from a uniform distribution.
Parameters
----------
low: Lower boundary of the output interval.
high: Upper boundary of the output interval.
size: Output shape (only tuple of length 2, i.e. (m, n), supported).
sparsity: Sparsity (between 0.0 and 1.0).
Examples
--------
>>> import systemml as sml
>>> import numpy as np
>>> sml.setSparkContext(sc)
>>> from systemml import random
>>> m1 = sml.random.uniform(size=(3,3))
>>> m1.toNumPy()
array([[ 0.54511396, 0.11937437, 0.72975775],
[ 0.14135946, 0.01944448, 0.52544478],
[ 0.67582422, 0.87068849, 0.02766852]])
"""
if len(size) != 2:
raise TypeError('Incorrect type for size. Expected tuple of length 2')
INPUTS = []
rows = asStr(size[0])
cols = asStr(size[1])
low = asStr(low)
high = asStr(high)
sparsity = asStr(sparsity)
return constructSamplingNode(INPUTS, [OUTPUT_ID, ' = random.uniform(', rows, ',', cols, ',', sparsity, ',', low, ',', high, ')\n'])
def poisson(lam=1.0, size=(1,1), sparsity=1.0):
"""
Draw samples from a Poisson distribution.
Parameters
----------
lam: Expectation of interval, should be > 0.
size: Output shape (only tuple of length 2, i.e. (m, n), supported).
sparsity: Sparsity (between 0.0 and 1.0).
Examples
--------
>>> import systemml as sml
>>> import numpy as np
>>> sml.setSparkContext(sc)
>>> from systemml import random
>>> m1 = sml.random.poisson(lam=1, size=(3,3))
>>> m1.toNumPy()
array([[ 1., 0., 2.],
[ 1., 0., 0.],
[ 0., 0., 0.]])
"""
if len(size) != 2:
raise TypeError('Incorrect type for size. Expected tuple of length 2')
INPUTS = []
rows = asStr(size[0])
cols = asStr(size[1])
lam = asStr(lam)
sparsity = asStr(sparsity)
return constructSamplingNode(INPUTS, [OUTPUT_ID, ' = random.poisson(', rows, ',', cols, ',', sparsity, ',', lam, ')\n'])
|
indictranstech/erpnext
|
refs/heads/develop
|
erpnext/hr/doctype/salary_structure/salary_structure.py
|
15
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt, cint, getdate
from frappe import _
from frappe.model.mapper import get_mapped_doc
from frappe.model.document import Document
from erpnext.hr.utils import set_employee_name
class SalaryStructure(Document):
def validate(self):
self.validate_amount()
for e in self.get('employees'):
set_employee_name(e)
self.validate_date()
self.strip_condition_and_formula_fields()
def get_ss_values(self,employee):
basic_info = frappe.db.sql("""select bank_name, bank_ac_no
from `tabEmployee` where name =%s""", employee)
ret = {'bank_name': basic_info and basic_info[0][0] or '',
'bank_ac_no': basic_info and basic_info[0][1] or ''}
return ret
def validate_amount(self):
if flt(self.net_pay) < 0 and self.salary_slip_based_on_timesheet:
frappe.throw(_("Net pay cannot be negative"))
def validate_date(self):
for employee in self.get('employees'):
joining_date, relieving_date = frappe.db.get_value("Employee", employee.employee,
["date_of_joining", "relieving_date"])
if employee.from_date and joining_date and getdate(employee.from_date) < joining_date:
frappe.throw(_("From Date {0} for Employee {1} cannot be before employee's joining Date {2}")
.format(employee.from_date, employee.employee, joining_date))
st_name = frappe.db.sql("""select parent from `tabSalary Structure Employee`
where
employee=%(employee)s
and (
(%(from_date)s between from_date and ifnull(to_date, '2199-12-31'))
or (%(to_date)s between from_date and ifnull(to_date, '2199-12-31'))
or (from_date between %(from_date)s and %(to_date)s)
)
and (
exists (select name from `tabSalary Structure`
where name = `tabSalary Structure Employee`.parent and is_active = 'Yes')
)
and parent != %(salary_struct)s""",
{
'employee': employee.employee,
'from_date': employee.from_date,
'to_date': (employee.to_date or '2199-12-31'),
'salary_struct': self.name
})
if st_name:
frappe.throw(_("Active Salary Structure {0} found for employee {1} for the given dates")
.format(st_name[0][0], employee.employee))
def strip_condition_and_formula_fields(self):
# remove whitespaces from condition and formula fields
for row in self.earnings:
row.condition = row.condition.strip() if row.condition else ""
row.formula = row.formula.strip() if row.formula else ""
for row in self.deductions:
row.condition = row.condition.strip() if row.condition else ""
row.formula = row.formula.strip() if row.formula else ""
@frappe.whitelist()
def make_salary_slip(source_name, target_doc = None, employee = None, as_print = False, print_format = None):
def postprocess(source, target):
if employee:
employee_details = frappe.db.get_value("Employee", employee,
["employee_name", "branch", "designation", "department"], as_dict=1)
target.employee = employee
target.employee_name = employee_details.employee_name
target.branch = employee_details.branch
target.designation = employee_details.designation
target.department = employee_details.department
target.run_method('process_salary_structure')
doc = get_mapped_doc("Salary Structure", source_name, {
"Salary Structure": {
"doctype": "Salary Slip",
"field_map": {
"total_earning": "gross_pay",
"name": "salary_structure"
}
}
}, target_doc, postprocess, ignore_child_tables=True)
if cint(as_print):
doc.name = 'Preview for {0}'.format(employee)
return frappe.get_print(doc.doctype, doc.name, doc = doc, print_format = print_format)
else:
return doc
@frappe.whitelist()
def get_employees(**args):
return frappe.get_list('Employee',filters=args['filters'], fields=['name', 'employee_name'])
|
tcmitchell/geni-tools
|
refs/heads/develop
|
src/gcf/geni/gch.py
|
3
|
#----------------------------------------------------------------------
# Copyright (c) 2011-2016 Raytheon BBN Technologies
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
"""
Reference GENI GCH Clearinghouse, for talking to the GENI Clearinghouse
via xmlrpc instead of smime (its native interface)
Run from gcf-gch.py
Will produce signed user credentials from a GID, return a
list of aggregates read from a config file, and create a new Slice Credential.
"""
from __future__ import absolute_import
import dateutil.parser
import datetime
import logging
import os
import socket
import traceback
import uuid
from .ch import SampleClearinghouseServer
from .SecureXMLRPCServer import SecureXMLRPCServer
from .util import cred_util
from .util.ch_interface import *
# FIXME: GENI CH APIs have evolved since this was last run
# Clearinghouse interface that communicates with the
# new clearinghouse services (SA, PA, MA, CS, AUTHZ, LOG, etc.)
class GENIClearinghouse(object):
def __init__(self):
self.logger = logging.getLogger('gcf-gch')
def runserver(self, addr, keyfile=None, certfile=None,
ca_certs=None, config=None):
"""Run the clearinghouse server."""
# ca_certs is a dir of several certificates for peering
# If not supplied just use the certfile as the only trusted root
self.keyfile = keyfile
self.certfile = certfile
self.config = config
# Error check the keyfile, certfile all exist
if keyfile is None or not os.path.isfile(os.path.expanduser(keyfile)) or os.path.getsize(os.path.expanduser(keyfile)) < 1:
raise Exception("Missing CH key file %s" % keyfile)
if certfile is None or not os.path.isfile(os.path.expanduser(certfile)) or os.path.getsize(os.path.expanduser(certfile)) < 1:
raise Exception("Missing CH cert file %s" % certfile)
if ca_certs is None:
ca_certs = certfile
self.logger.info("Using only my CH cert as a trusted root cert")
self.trusted_root_files = cred_util.CredentialVerifier(ca_certs).root_cert_files
if not os.path.exists(os.path.expanduser(ca_certs)):
raise Exception("Missing CA cert(s): %s" % ca_certs)
# This is the arg to _make_server
ca_certs_onefname = cred_util.CredentialVerifier.getCAsFileFromDir(ca_certs)
# Set up the URL's to the CH services
self.establish_ch_interface();
# Create the xmlrpc server, load the rootkeys and do the ssl thing.
self._server = self._make_server(addr, keyfile, certfile,
ca_certs_onefname)
self._server.register_instance(SampleGENIClearinghouseServer(self))
self.logger.info('GENI CH Listening on port %d...' % (addr[1]))
self._server.serve_forever()
def _make_server(self, addr, keyfile=None, certfile=None,
ca_certs=None):
"""Creates the XML RPC server."""
# ca_certs is a file of concatenated certs
return SecureXMLRPCServer(addr, keyfile=keyfile, certfile=certfile,
ca_certs=ca_certs)
def GetVersion(self):
self.logger.info("Called GetVersion")
version = dict()
version['gcf-ch_api'] = 2
return version
def CreateProject(self, project_name, lead_id, project_purpose):
self.logger.info("Called CreateProject");
argsdict = dict(project_name=project_name,
lead_id=lead_id,
project_purpose=project_purpose);
result = invokeCH(self.pa_url, 'create_project', self.logger,
argsdict, self.certfile, self.keyfile);
# print("CP.RESULT = " + str(result))
return result
def CreateSlice(self, slice_name, project_id, owner_id):
self.logger.info("Called CreateSlice SN " + slice_name +
" PID " + str(project_id));
project_name = 'Dummy';
argsdict = dict(slice_name=slice_name,
project_id=project_id,
project_name=project_name,
owner_id=owner_id)
key_and_cert_files = get_inside_cert_and_key(self._server.peercert, \
self.ma_url, \
self.logger);
inside_keyfile = key_and_cert_files['key'];
inside_certfile = key_and_cert_files['cert'];
# print("KF = " + inside_keyfile + " CF = " + inside_certfile);
# print("SA_URL = " + self.sa_url);
result = invokeCH(self.sa_url, 'create_slice', self.logger,
argsdict, inside_certfile, inside_certfile);
# print("RES = " + str(result));
os.unlink(inside_certfile);
os.unlink(inside_keyfile);
# Don't understand why, but this returns a 'None' output so I need
# to fill it in with a ''
if(result['output'] == None): result['output'] = '';
# print("CreateSlice RET = " + str(result));
return result;
def GetSliceCredential(self, slice_id, cert, slice_urn=None):
self.logger.info("Called GetSliceCredential (ID=%s URN=%s)", \
slice_id, slice_urn)
key_and_cert_files = get_inside_cert_and_key(self._server.peercert, \
self.ma_url, \
self.logger);
inside_keyfile = key_and_cert_files['key'];
inside_certfile = key_and_cert_files['cert'];
# print("KF = " + inside_keyfile + " CF = " + inside_certfile);
if (slice_urn != None):
argsdict = dict(slice_urn=slice_urn);
row = invokeCH(self.sa_url, 'lookup_slice_by_urn',
self.logger, argsdict,
inside_certfile, inside_keyfile);
# print("Row = " + str(row));
if (row['code'] != 0):
return False;
slice_id = row['value']['slice_id']
# print "SLICE_ID = " + str(slice_id);
argsdict = dict(slice_id=slice_id, experimenter_certificate=cert)
result = invokeCH(self.sa_url, 'get_slice_credential',
self.logger, argsdict, inside_certfile, inside_keyfile);
# print("SC return = " + str(result))
os.unlink(inside_certfile);
os.unlink(inside_keyfile);
return result
def RenewSlice(self, slice_urn, expire_str):
self.logger.info("Called RenewSlice(%s, %s)", slice_urn, expire_str)
return True
def DeleteSlice(self, urn_req):
self.logger.info("Called DeleteSlice %r" % urn_req)
return False
def ListAggregates(self):
self.logger.info("Called ListAggregates")
return None
def CreateUserCredential(self, user_gid):
# print "GID = " + str(user_gid)
argsdict=dict(experimenter_certificate=user_gid);
result = invokeCH(self.sa_url, 'get_user_credential',
self.logger, argsdict, self.certfile, self.keyfile);
if(result['code'] == 0):
result = result['value']['user_credential'];
# print "RES = " + str(result)
return result;
def establish_ch_interface(self):
self.sr_url = "https://" + socket.gethostname() + "/sr/sr_controller.php";
# print("SR_URL = " + self.sr_url);
self.sa_url = self.get_first_service_of_type(1); # SERVICE_AUTHORITY
self.pa_url = self.get_first_service_of_type(2); # PROJECT_AUTHORITY
self.ma_url = self.get_first_service_of_type(3); # MEMBER_AUTHORITY
def get_first_service_of_type(self, service_type):
result = invokeCH(self.sr_url, 'get_services_of_type',
self.logger,
dict(service_type=service_type),
self.certfile, self.keyfile);
# print("GSOT.RESULT = " + str(result))
if(result['code'] != 0):
return None
services = result['value'];
service = services[0];
service_url = service['service_url'];
print("Service of type " + str(service_type) + " = " + service_url);
return service_url;
class SampleGENIClearinghouseServer(object):
"""A sample clearinghouse with barebones functionality."""
def __init__(self, delegate):
self._delegate = delegate
def GetVersion(self):
return self._delegate.GetVersion()
def CreateProject(self, project_name, lead_id, project_purpose):
return self._delegate.CreateProject(project_name,
lead_id, project_purpose);
def CreateSlice(self, slice_name, project_id, owner_id):
return self._delegate.CreateSlice(slice_name, project_id, owner_id);
def GetSliceCredential(self, slice_id, cert, slice_urn=None):
return self._delegate.GetSliceCredential(slice_id, cert, slice_urn);
def RenewSlice(self, urn, expire_str):
try:
return self._delegate.RenewSlice(urn, expire_str)
except:
self._delegate.logger.error(traceback.format_exc())
raise
def DeleteSlice(self, urn):
return self._delegate.DeleteSlice(urn)
def ListAggregates(self):
return self._delegate.ListAggregates()
def CreateUserCredential(self, cert):
return self._delegate.CreateUserCredential(cert)
|
Sendinel/Sendinel
|
refs/heads/master
|
configs/sendinel/local_settings_test.py
|
1
|
AUTHENTICATION_ENABLED = True
AUTHENTICATION_ENABLED =
AUTHENTICATION_ENABLED =
AUTHENTICATION_ENABLED =
AUTHENTICATION_ENABLED =
AUTHENTICATION_ENABLED =
AUTHENTICATION_ENABLED =
AUTHENTICATION_ENABLED =
AUTHENTICATION_ENABLED =
AUTHENTICATION_ENABLED = True
|
jerli/sympy
|
refs/heads/master
|
sympy/functions/special/tests/test_bsplines.py
|
83
|
from sympy.functions import bspline_basis_set
from sympy.core.compatibility import range
from sympy import Piecewise, Interval
from sympy import symbols, Rational
x, y = symbols('x,y')
def test_basic_degree_0():
d = 0
knots = range(5)
splines = bspline_basis_set(d, knots, x)
for i in range(len(splines)):
assert splines[i] == Piecewise((1, Interval(i, i + 1)
.contains(x)), (0, True))
def test_basic_degree_1():
d = 1
knots = range(5)
splines = bspline_basis_set(d, knots, x)
assert splines[0] == Piecewise(
(x, Interval(0, 1, False, True).contains(x)),
(2 - x, Interval(1, 2).contains(x)), (0, True))
assert splines[1] == Piecewise(
(-1 + x, Interval(1, 2, False, True).contains(x)),
(3 - x, Interval(2, 3).contains(x)), (0, True))
assert splines[2] == Piecewise(
(-2 + x, Interval(2, 3, False, True).contains(x)),
(4 - x, Interval(3, 4).contains(x)), (0, True))
def test_basic_degree_2():
d = 2
knots = range(5)
splines = bspline_basis_set(d, knots, x)
b0 = Piecewise((x**2/2, Interval(0, 1, False, True).contains(x)),
(Rational(
-3, 2) + 3*x - x**2, Interval(1, 2, False, True).contains(x)),
(Rational(9, 2) - 3*x + x**2/2, Interval(2, 3).contains(x)), (0, True))
b1 = Piecewise(
(Rational(1, 2) - x + x**2/2, Interval(1, 2, False, True).contains(x)),
(Rational(
-11, 2) + 5*x - x**2, Interval(2, 3, False, True).contains(x)),
(8 - 4*x + x**2/2, Interval(3, 4).contains(x)), (0, True))
assert splines[0] == b0
assert splines[1] == b1
def test_basic_degree_3():
d = 3
knots = range(5)
splines = bspline_basis_set(d, knots, x)
b0 = Piecewise(
(x**3/6, Interval(0, 1, False, True).contains(x)),
(Rational(2, 3) - 2*x + 2*x**2 - x**3/2, Interval(1, 2,
False, True).contains(x)),
(Rational(-22, 3) + 10*x - 4*x**2 + x**3/2, Interval(2, 3,
False, True).contains(x)),
(Rational(32, 3) - 8*x + 2*x**2 - x**3/6, Interval(3, 4).contains(x)),
(0, True)
)
assert splines[0] == b0
def test_repeated_degree_1():
d = 1
knots = [0, 0, 1, 2, 2, 3, 4, 4]
splines = bspline_basis_set(d, knots, x)
assert splines[0] == Piecewise((1 - x, Interval(0, 1).contains(x)),
(0, True))
assert splines[1] == Piecewise(
(x, Interval(0, 1, False, True).contains(x)),
(2 - x, Interval(1, 2).contains(x)), (0, True))
assert splines[2] == Piecewise((-1 + x, Interval(1, 2).contains(x)
), (0, True))
assert splines[3] == Piecewise((3 - x, Interval(2, 3).contains(x)),
(0, True))
assert splines[4] == Piecewise(
(-2 + x, Interval(2, 3, False, True).contains(x)),
(4 - x, Interval(3, 4).contains(x)), (0, True))
assert splines[5] == Piecewise((-3 + x, Interval(3, 4).contains(x)
), (0, True))
|
theguardian/headphones
|
refs/heads/master
|
lib/yaml/serializer.py
|
561
|
__all__ = ['Serializer', 'SerializerError']
from error import YAMLError
from events import *
from nodes import *
class SerializerError(YAMLError):
pass
class Serializer(object):
ANCHOR_TEMPLATE = u'id%03d'
def __init__(self, encoding=None,
explicit_start=None, explicit_end=None, version=None, tags=None):
self.use_encoding = encoding
self.use_explicit_start = explicit_start
self.use_explicit_end = explicit_end
self.use_version = version
self.use_tags = tags
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
self.closed = None
def open(self):
if self.closed is None:
self.emit(StreamStartEvent(encoding=self.use_encoding))
self.closed = False
elif self.closed:
raise SerializerError("serializer is closed")
else:
raise SerializerError("serializer is already opened")
def close(self):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif not self.closed:
self.emit(StreamEndEvent())
self.closed = True
#def __del__(self):
# self.close()
def serialize(self, node):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif self.closed:
raise SerializerError("serializer is closed")
self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
version=self.use_version, tags=self.use_tags))
self.anchor_node(node)
self.serialize_node(node, None, None)
self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
def anchor_node(self, node):
if node in self.anchors:
if self.anchors[node] is None:
self.anchors[node] = self.generate_anchor(node)
else:
self.anchors[node] = None
if isinstance(node, SequenceNode):
for item in node.value:
self.anchor_node(item)
elif isinstance(node, MappingNode):
for key, value in node.value:
self.anchor_node(key)
self.anchor_node(value)
def generate_anchor(self, node):
self.last_anchor_id += 1
return self.ANCHOR_TEMPLATE % self.last_anchor_id
def serialize_node(self, node, parent, index):
alias = self.anchors[node]
if node in self.serialized_nodes:
self.emit(AliasEvent(alias))
else:
self.serialized_nodes[node] = True
self.descend_resolver(parent, index)
if isinstance(node, ScalarNode):
detected_tag = self.resolve(ScalarNode, node.value, (True, False))
default_tag = self.resolve(ScalarNode, node.value, (False, True))
implicit = (node.tag == detected_tag), (node.tag == default_tag)
self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
style=node.style))
elif isinstance(node, SequenceNode):
implicit = (node.tag
== self.resolve(SequenceNode, node.value, True))
self.emit(SequenceStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
index = 0
for item in node.value:
self.serialize_node(item, node, index)
index += 1
self.emit(SequenceEndEvent())
elif isinstance(node, MappingNode):
implicit = (node.tag
== self.resolve(MappingNode, node.value, True))
self.emit(MappingStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
for key, value in node.value:
self.serialize_node(key, node, None)
self.serialize_node(value, node, key)
self.emit(MappingEndEvent())
self.ascend_resolver()
|
voutilad/courtlistener
|
refs/heads/master
|
cl/opinion_page/sitemap.py
|
1
|
from django.conf import settings
from cl.sitemap import make_sitemap_solr_params, make_solr_sitemap
def opinion_sitemap_maker(request):
return make_solr_sitemap(
request,
settings.SOLR_OPINION_URL,
make_sitemap_solr_params('dateFiled asc', 'o_sitemap'),
'yearly',
['pdf', 'doc', 'wpd'],
'absolute_url',
)
def recap_sitemap_maker(request):
return make_solr_sitemap(
request,
settings.SOLR_RECAP_URL,
make_sitemap_solr_params('dateFiled asc', 'r_sitemap'),
'weekly',
[],
'docket_absolute_url',
)
|
EDUlib/edx-platform
|
refs/heads/master
|
lms/djangoapps/discussion/views.py
|
1
|
"""
Views handling read (GET) requests for the Discussion tab and inline discussions.
"""
import logging
from functools import wraps
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User # lint-amnesty, pylint: disable=imported-auth-user
from django.contrib.staticfiles.storage import staticfiles_storage
from django.http import Http404, HttpResponseForbidden, HttpResponseServerError
from django.shortcuts import render_to_response
from django.template.context_processors import csrf
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils.translation import get_language_bidi
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.cache import cache_control
from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_GET, require_http_methods
from edx_django_utils.monitoring import function_trace
from opaque_keys.edx.keys import CourseKey
from rest_framework import status
from web_fragments.fragment import Fragment
import lms.djangoapps.discussion.django_comment_client.utils as utils
import openedx.core.djangoapps.django_comment_common.comment_client as cc
from common.djangoapps.student.models import CourseEnrollment
from common.djangoapps.util.json_request import JsonResponse, expect_json
from lms.djangoapps.courseware.access import has_access
from lms.djangoapps.courseware.courses import get_course_with_access
from lms.djangoapps.courseware.views.views import CourseTabView
from lms.djangoapps.discussion.config.settings import is_forum_daily_digest_enabled
from lms.djangoapps.discussion.django_comment_client.base.views import track_thread_viewed_event
from lms.djangoapps.discussion.django_comment_client.constants import TYPE_ENTRY
from lms.djangoapps.discussion.django_comment_client.permissions import has_permission
from lms.djangoapps.discussion.django_comment_client.utils import (
add_courseware_context,
available_division_schemes,
course_discussion_division_enabled,
extract,
get_group_id_for_comments_service,
get_group_id_for_user,
get_group_names_by_id,
is_commentable_divided,
strip_none
)
from lms.djangoapps.discussion.exceptions import TeamDiscussionHiddenFromUserException
from lms.djangoapps.experiments.utils import get_experiment_user_metadata_context
from lms.djangoapps.teams import api as team_api
from openedx.core.djangoapps.django_comment_common.models import CourseDiscussionSettings
from openedx.core.djangoapps.django_comment_common.utils import (
ThreadContext,
get_course_discussion_settings,
set_course_discussion_settings
)
from openedx.core.djangoapps.plugin_api.views import EdxFragmentView
from openedx.features.course_duration_limits.access import generate_course_expired_fragment
from xmodule.modulestore.django import modulestore
log = logging.getLogger("edx.discussions")
THREADS_PER_PAGE = 20
INLINE_THREADS_PER_PAGE = 20
PAGES_NEARBY_DELTA = 2
BOOTSTRAP_DISCUSSION_CSS_PATH = 'css/discussion/lms-discussion-bootstrap.css'
TEAM_PERMISSION_MESSAGE = _("Access to this discussion is restricted to team members and staff.")
def make_course_settings(course, user, include_category_map=True):
"""
Generate a JSON-serializable model for course settings, which will be used to initialize a
DiscussionCourseSettings object on the client.
"""
course_discussion_settings = get_course_discussion_settings(course.id)
group_names_by_id = get_group_names_by_id(course_discussion_settings)
course_setting = {
'is_discussion_division_enabled': course_discussion_division_enabled(course_discussion_settings),
'allow_anonymous': course.allow_anonymous,
'allow_anonymous_to_peers': course.allow_anonymous_to_peers,
'groups': [
{"id": str(group_id), "name": group_name} for group_id, group_name in group_names_by_id.items()
]
}
if include_category_map:
course_setting['category_map'] = utils.get_discussion_category_map(course, user)
return course_setting
def get_threads(request, course, user_info, discussion_id=None, per_page=THREADS_PER_PAGE):
"""
This may raise an appropriate subclass of cc.utils.CommentClientError
if something goes wrong, or ValueError if the group_id is invalid.
Arguments:
request (WSGIRequest): The user request.
course (CourseBlockWithMixins): The course object.
user_info (dict): The comment client User object as a dict.
discussion_id (unicode): Optional discussion id/commentable id for context.
per_page (int): Optional number of threads per page.
Returns:
(tuple of list, dict): A tuple of the list of threads and a dict of the
query parameters used for the search.
"""
default_query_params = {
'page': 1,
'per_page': per_page,
'sort_key': 'activity',
'text': '',
'course_id': str(course.id),
'user_id': request.user.id,
'context': ThreadContext.COURSE,
'group_id': get_group_id_for_comments_service(request, course.id, discussion_id), # may raise ValueError
}
# If provided with a discussion id, filter by discussion id in the
# comments_service.
if discussion_id is not None:
default_query_params['commentable_id'] = discussion_id
# Use the discussion id/commentable id to determine the context we are going to pass through to the backend.
if team_api.get_team_by_discussion(discussion_id) is not None:
default_query_params['context'] = ThreadContext.STANDALONE
_check_team_discussion_access(request, course, discussion_id)
if not request.GET.get('sort_key'):
# If the user did not select a sort key, use their last used sort key
default_query_params['sort_key'] = user_info.get('default_sort_key') or default_query_params['sort_key']
elif request.GET.get('sort_key') != user_info.get('default_sort_key'):
# If the user clicked a sort key, update their default sort key
cc_user = cc.User.from_django_user(request.user)
cc_user.default_sort_key = request.GET.get('sort_key')
cc_user.save()
#there are 2 dimensions to consider when executing a search with respect to group id
#is user a moderator
#did the user request a group
query_params = default_query_params.copy()
query_params.update(
strip_none(
extract(
request.GET,
[
'page',
'sort_key',
'text',
'commentable_ids',
'flagged',
'unread',
'unanswered',
]
)
)
)
paginated_results = cc.Thread.search(query_params)
threads = paginated_results.collection
# If not provided with a discussion id, filter threads by commentable ids
# which are accessible to the current user.
if discussion_id is None:
discussion_category_ids = set(utils.get_discussion_categories_ids(course, request.user))
threads = [
thread for thread in threads
if thread.get('commentable_id') in discussion_category_ids
]
for thread in threads:
# patch for backward compatibility to comments service
if 'pinned' not in thread:
thread['pinned'] = False
query_params['page'] = paginated_results.page
query_params['num_pages'] = paginated_results.num_pages
query_params['corrected_text'] = paginated_results.corrected_text
return threads, query_params
def use_bulk_ops(view_func):
"""
Wraps internal request handling inside a modulestore bulk op, significantly
reducing redundant database calls. Also converts the course_id parsed from
the request uri to a CourseKey before passing to the view.
"""
@wraps(view_func)
def wrapped_view(request, course_id, *args, **kwargs):
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
return view_func(request, course_key, *args, **kwargs)
return wrapped_view
@login_required
@use_bulk_ops
def inline_discussion(request, course_key, discussion_id):
"""
Renders JSON for DiscussionModules
"""
with function_trace('get_course_and_user_info'):
course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=True)
cc_user = cc.User.from_django_user(request.user)
user_info = cc_user.to_dict()
try:
with function_trace('get_threads'):
threads, query_params = get_threads(
request, course, user_info, discussion_id, per_page=INLINE_THREADS_PER_PAGE
)
except ValueError:
return HttpResponseServerError('Invalid group_id')
except TeamDiscussionHiddenFromUserException:
return HttpResponseForbidden(TEAM_PERMISSION_MESSAGE)
with function_trace('get_metadata_for_threads'):
annotated_content_info = utils.get_metadata_for_threads(course_key, threads, request.user, user_info)
with function_trace('determine_group_permissions'):
is_staff = has_permission(request.user, 'openclose_thread', course.id)
course_discussion_settings = get_course_discussion_settings(course.id)
group_names_by_id = get_group_names_by_id(course_discussion_settings)
course_is_divided = course_discussion_settings.division_scheme is not CourseDiscussionSettings.NONE
with function_trace('prepare_content'):
threads = [
utils.prepare_content(
thread,
course_key,
is_staff,
course_is_divided,
group_names_by_id
) for thread in threads
]
return utils.JsonResponse({
'is_commentable_divided': is_commentable_divided(course_key, discussion_id),
'discussion_data': threads,
'user_info': user_info,
'user_group_id': get_group_id_for_user(request.user, course_discussion_settings),
'annotated_content_info': annotated_content_info,
'page': query_params['page'],
'num_pages': query_params['num_pages'],
'roles': utils.get_role_ids(course_key),
'course_settings': make_course_settings(course, request.user, False)
})
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@login_required
@use_bulk_ops
def forum_form_discussion(request, course_key):
"""
Renders the main Discussion page, potentially filtered by a search query
"""
course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=True)
request.user.is_community_ta = utils.is_user_community_ta(request.user, course.id)
if request.is_ajax():
user = cc.User.from_django_user(request.user)
user_info = user.to_dict()
try:
unsafethreads, query_params = get_threads(request, course, user_info) # This might process a search query
is_staff = has_permission(request.user, 'openclose_thread', course.id)
threads = [utils.prepare_content(thread, course_key, is_staff) for thread in unsafethreads]
except cc.utils.CommentClientMaintenanceError:
return HttpResponseServerError('Forum is in maintenance mode', status=status.HTTP_503_SERVICE_UNAVAILABLE)
except ValueError:
return HttpResponseServerError("Invalid group_id")
with function_trace("get_metadata_for_threads"):
annotated_content_info = utils.get_metadata_for_threads(course_key, threads, request.user, user_info)
with function_trace("add_courseware_context"):
add_courseware_context(threads, course, request.user)
return utils.JsonResponse({
'discussion_data': threads, # TODO: Standardize on 'discussion_data' vs 'threads'
'annotated_content_info': annotated_content_info,
'num_pages': query_params['num_pages'],
'page': query_params['page'],
'corrected_text': query_params['corrected_text'],
})
else:
course_id = str(course.id)
tab_view = CourseTabView()
return tab_view.get(request, course_id, 'discussion')
@require_GET
@login_required
@use_bulk_ops
def single_thread(request, course_key, discussion_id, thread_id):
"""
Renders a response to display a single discussion thread. This could either be a page refresh
after navigating to a single thread, a direct link to a single thread, or an AJAX call from the
discussions UI loading the responses/comments for a single thread.
Depending on the HTTP headers, we'll adjust our response accordingly.
"""
course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=True)
request.user.is_community_ta = utils.is_user_community_ta(request.user, course.id)
if request.is_ajax():
cc_user = cc.User.from_django_user(request.user)
user_info = cc_user.to_dict()
is_staff = has_permission(request.user, 'openclose_thread', course.id)
try:
_check_team_discussion_access(request, course, discussion_id)
except TeamDiscussionHiddenFromUserException:
return HttpResponseForbidden(TEAM_PERMISSION_MESSAGE)
thread = _load_thread_for_viewing(
request,
course,
discussion_id=discussion_id,
thread_id=thread_id,
raise_event=True,
)
with function_trace("get_annotated_content_infos"):
annotated_content_info = utils.get_annotated_content_infos(
course_key,
thread,
request.user,
user_info=user_info
)
content = utils.prepare_content(thread.to_dict(), course_key, is_staff)
with function_trace("add_courseware_context"):
add_courseware_context([content], course, request.user)
return utils.JsonResponse({
'content': content,
'annotated_content_info': annotated_content_info,
})
else:
course_id = str(course.id)
tab_view = CourseTabView()
return tab_view.get(request, course_id, 'discussion', discussion_id=discussion_id, thread_id=thread_id)
def _find_thread(request, course, discussion_id, thread_id):
"""
Finds the discussion thread with the specified ID.
Args:
request: The Django request.
course_id: The ID of the owning course.
discussion_id: The ID of the owning discussion.
thread_id: The ID of the thread.
Returns:
The thread in question if the user can see it, else None.
"""
try:
thread = cc.Thread.find(thread_id).retrieve(
with_responses=request.is_ajax(),
recursive=request.is_ajax(),
user_id=request.user.id,
response_skip=request.GET.get("resp_skip"),
response_limit=request.GET.get("resp_limit")
)
except cc.utils.CommentClientRequestError:
return None
# Verify that the student has access to this thread if belongs to a course discussion module
thread_context = getattr(thread, "context", "course")
if thread_context == "course" and not utils.discussion_category_id_access(course, request.user, discussion_id):
return None
# verify that the thread belongs to the requesting student's group
is_moderator = has_permission(request.user, "see_all_cohorts", course.id)
course_discussion_settings = get_course_discussion_settings(course.id)
if is_commentable_divided(course.id, discussion_id, course_discussion_settings) and not is_moderator:
user_group_id = get_group_id_for_user(request.user, course_discussion_settings)
if getattr(thread, "group_id", None) is not None and user_group_id != thread.group_id:
return None
return thread
def _load_thread_for_viewing(request, course, discussion_id, thread_id, raise_event):
"""
Loads the discussion thread with the specified ID and fires an
edx.forum.thread.viewed event.
Args:
request: The Django request.
course_id: The ID of the owning course.
discussion_id: The ID of the owning discussion.
thread_id: The ID of the thread.
raise_event: Whether an edx.forum.thread.viewed tracking event should
be raised
Returns:
The thread in question if the user can see it.
Raises:
Http404 if the thread does not exist or the user cannot
see it.
"""
thread = _find_thread(request, course, discussion_id=discussion_id, thread_id=thread_id)
if not thread:
raise Http404
if raise_event:
track_thread_viewed_event(request, course, thread)
return thread
def _create_base_discussion_view_context(request, course_key):
"""
Returns the default template context for rendering any discussion view.
"""
user = request.user
cc_user = cc.User.from_django_user(user)
user_info = cc_user.to_dict()
course = get_course_with_access(user, 'load', course_key, check_if_enrolled=True)
course_settings = make_course_settings(course, user)
return {
'csrf': csrf(request)['csrf_token'],
'course': course,
'user': user,
'user_info': user_info,
'staff_access': bool(has_access(user, 'staff', course)),
'roles': utils.get_role_ids(course_key),
'can_create_comment': has_permission(user, "create_comment", course.id),
'can_create_subcomment': has_permission(user, "create_sub_comment", course.id),
'can_create_thread': has_permission(user, "create_thread", course.id),
'flag_moderator': bool(
has_permission(user, 'openclose_thread', course.id) or
has_access(user, 'staff', course)
),
'course_settings': course_settings,
'disable_courseware_js': True,
'uses_bootstrap': True,
}
def _get_discussion_default_topic_id(course):
for topic, entry in course.discussion_topics.items(): # lint-amnesty, pylint: disable=unused-variable
if entry.get('default') is True:
return entry['id']
def _create_discussion_board_context(request, base_context, thread=None):
"""
Returns the template context for rendering the discussion board.
"""
context = base_context.copy()
course = context['course']
course_key = course.id
thread_id = thread.id if thread else None
discussion_id = thread.commentable_id if thread else None
course_settings = context['course_settings']
user = context['user']
cc_user = cc.User.from_django_user(user)
user_info = context['user_info']
if thread:
_check_team_discussion_access(request, course, discussion_id)
# Since we're in page render mode, and the discussions UI will request the thread list itself,
# we need only return the thread information for this one.
threads = [thread.to_dict()]
for thread in threads: # lint-amnesty, pylint: disable=redefined-argument-from-local
# patch for backward compatibility with comments service
if "pinned" not in thread:
thread["pinned"] = False
thread_pages = 1
root_url = reverse('forum_form_discussion', args=[str(course.id)])
else:
threads, query_params = get_threads(request, course, user_info) # This might process a search query
thread_pages = query_params['num_pages']
root_url = request.path
is_staff = has_permission(user, 'openclose_thread', course.id)
threads = [utils.prepare_content(thread, course_key, is_staff) for thread in threads]
with function_trace("get_metadata_for_threads"):
annotated_content_info = utils.get_metadata_for_threads(course_key, threads, user, user_info)
with function_trace("add_courseware_context"):
add_courseware_context(threads, course, user)
with function_trace("get_cohort_info"):
course_discussion_settings = get_course_discussion_settings(course_key)
user_group_id = get_group_id_for_user(user, course_discussion_settings)
context.update({
'root_url': root_url,
'discussion_id': discussion_id,
'thread_id': thread_id,
'threads': threads,
'thread_pages': thread_pages,
'annotated_content_info': annotated_content_info,
'is_moderator': has_permission(user, "see_all_cohorts", course_key),
'groups': course_settings["groups"], # still needed to render _thread_list_template
'user_group_id': user_group_id, # read from container in NewPostView
'sort_preference': cc_user.default_sort_key,
'category_map': course_settings["category_map"],
'course_settings': course_settings,
'is_commentable_divided': is_commentable_divided(course_key, discussion_id, course_discussion_settings),
# If the default topic id is None the front-end code will look for a topic that contains "General"
'discussion_default_topic_id': _get_discussion_default_topic_id(course),
'enable_daily_digest': is_forum_daily_digest_enabled()
})
context.update(
get_experiment_user_metadata_context(
course,
user,
)
)
return context
def create_user_profile_context(request, course_key, user_id):
""" Generate a context dictionary for the user profile. """
user = cc.User.from_django_user(request.user)
course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=True)
# If user is not enrolled in the course, do not proceed.
django_user = User.objects.get(id=user_id)
if not CourseEnrollment.is_enrolled(django_user, course.id):
raise Http404
query_params = {
'page': request.GET.get('page', 1),
'per_page': THREADS_PER_PAGE, # more than threads_per_page to show more activities
}
group_id = get_group_id_for_comments_service(request, course_key)
if group_id is not None:
query_params['group_id'] = group_id
profiled_user = cc.User(id=user_id, course_id=course_key, group_id=group_id)
else:
profiled_user = cc.User(id=user_id, course_id=course_key)
threads, page, num_pages = profiled_user.active_threads(query_params)
query_params['page'] = page
query_params['num_pages'] = num_pages
with function_trace("get_metadata_for_threads"):
user_info = cc.User.from_django_user(request.user).to_dict()
annotated_content_info = utils.get_metadata_for_threads(course_key, threads, request.user, user_info)
is_staff = has_permission(request.user, 'openclose_thread', course.id)
threads = [utils.prepare_content(thread, course_key, is_staff) for thread in threads]
with function_trace("add_courseware_context"):
add_courseware_context(threads, course, request.user)
# TODO: LEARNER-3854: If we actually implement Learner Analytics code, this
# code was original protected to not run in user_profile() if is_ajax().
# Someone should determine if that is still necessary (i.e. was that ever
# called as is_ajax()) and clean this up as necessary.
user_roles = django_user.roles.filter(
course_id=course.id
).order_by("name").values_list("name", flat=True).distinct()
with function_trace("get_cohort_info"):
course_discussion_settings = get_course_discussion_settings(course_key)
user_group_id = get_group_id_for_user(request.user, course_discussion_settings)
context = _create_base_discussion_view_context(request, course_key)
context.update({
'django_user': django_user,
'django_user_roles': user_roles,
'profiled_user': profiled_user.to_dict(),
'threads': threads,
'user_group_id': user_group_id,
'annotated_content_info': annotated_content_info,
'page': query_params['page'],
'num_pages': query_params['num_pages'],
'sort_preference': user.default_sort_key,
'learner_profile_page_url': reverse('learner_profile', kwargs={'username': django_user.username}),
})
return context
@require_GET
@login_required
@use_bulk_ops
def user_profile(request, course_key, user_id):
"""
Renders a response to display the user profile page (shown after clicking
on a post author's username).
"""
try:
context = create_user_profile_context(request, course_key, user_id)
if request.is_ajax():
return utils.JsonResponse({
'discussion_data': context['threads'],
'page': context['page'],
'num_pages': context['num_pages'],
'annotated_content_info': context['annotated_content_info'],
})
else:
tab_view = CourseTabView()
# To avoid mathjax loading from 'mathjax_include.html'
# as that file causes multiple loadings of Mathjax on
# 'user_profile' page
context['load_mathjax'] = False
return tab_view.get(request, str(course_key), 'discussion', profile_page_context=context)
except User.DoesNotExist:
raise Http404 # lint-amnesty, pylint: disable=raise-missing-from
except ValueError:
return HttpResponseServerError("Invalid group_id")
@login_required
@use_bulk_ops
def followed_threads(request, course_key, user_id):
"""
Ajax-only endpoint retrieving the threads followed by a specific user.
"""
course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=True)
try:
profiled_user = cc.User(id=user_id, course_id=course_key)
query_params = {
'page': 1,
'per_page': THREADS_PER_PAGE, # more than threads_per_page to show more activities
'sort_key': 'date',
}
query_params.update(
strip_none(
extract(
request.GET,
[
'page',
'sort_key',
'flagged',
'unread',
'unanswered',
]
)
)
)
try:
group_id = get_group_id_for_comments_service(request, course_key)
except ValueError:
return HttpResponseServerError("Invalid group_id")
if group_id is not None:
query_params['group_id'] = group_id
paginated_results = profiled_user.subscribed_threads(query_params)
print("\n \n \n paginated results \n \n \n ")
print(paginated_results)
query_params['page'] = paginated_results.page
query_params['num_pages'] = paginated_results.num_pages
user_info = cc.User.from_django_user(request.user).to_dict()
with function_trace("get_metadata_for_threads"):
annotated_content_info = utils.get_metadata_for_threads(
course_key,
paginated_results.collection,
request.user, user_info
)
if request.is_ajax():
is_staff = has_permission(request.user, 'openclose_thread', course.id)
return utils.JsonResponse({
'annotated_content_info': annotated_content_info,
'discussion_data': [
utils.prepare_content(thread, course_key, is_staff) for thread in paginated_results.collection
],
'page': query_params['page'],
'num_pages': query_params['num_pages'],
})
#TODO remove non-AJAX support, it does not appear to be used and does not appear to work.
else:
context = {
'course': course,
'user': request.user,
'django_user': User.objects.get(id=user_id),
'profiled_user': profiled_user.to_dict(),
'threads': paginated_results.collection,
'user_info': user_info,
'annotated_content_info': annotated_content_info,
# 'content': content,
}
return render_to_response('discussion/user_profile.html', context)
except User.DoesNotExist:
raise Http404 # lint-amnesty, pylint: disable=raise-missing-from
class DiscussionBoardFragmentView(EdxFragmentView):
"""
Component implementation of the discussion board.
"""
def render_to_fragment( # lint-amnesty, pylint: disable=arguments-differ
self,
request,
course_id=None,
discussion_id=None,
thread_id=None,
profile_page_context=None,
**kwargs
):
"""
Render the discussion board to a fragment.
Args:
request: The Django request.
course_id: The id of the course in question.
discussion_id: An optional discussion ID to be focused upon.
thread_id: An optional ID of the thread to be shown.
Returns:
Fragment: The fragment representing the discussion board
"""
try:
course_key = CourseKey.from_string(course_id)
base_context = _create_base_discussion_view_context(request, course_key)
# Note:
# After the thread is rendered in this fragment, an AJAX
# request is made and the thread is completely loaded again
# (yes, this is something to fix). Because of this, we pass in
# raise_event=False to _load_thread_for_viewing avoid duplicate
# tracking events.
thread = (
_load_thread_for_viewing(
request,
base_context['course'],
discussion_id=discussion_id,
thread_id=thread_id,
raise_event=False,
)
if thread_id
else None
)
context = _create_discussion_board_context(request, base_context, thread=thread)
course_expiration_fragment = generate_course_expired_fragment(request.user, context['course'])
context.update({
'course_expiration_fragment': course_expiration_fragment,
})
if profile_page_context:
# EDUCATOR-2119: styles are hard to reconcile if the profile page isn't also a fragment
html = render_to_string('discussion/discussion_profile_page.html', profile_page_context)
else:
html = render_to_string('discussion/discussion_board_fragment.html', context)
fragment = Fragment(html)
self.add_fragment_resource_urls(fragment)
inline_js = render_to_string('discussion/discussion_board_js.template', context)
fragment.add_javascript(inline_js)
if not settings.REQUIRE_DEBUG:
fragment.add_javascript_url(staticfiles_storage.url('discussion/js/discussion_board_factory.js'))
return fragment
except cc.utils.CommentClientMaintenanceError:
log.warning('Forum is in maintenance mode')
html = render_to_string('discussion/maintenance_fragment.html', {
'disable_courseware_js': True,
'uses_bootstrap': True,
})
fragment = Fragment(html)
self.add_fragment_resource_urls(fragment)
return fragment
except TeamDiscussionHiddenFromUserException:
log.warning(
'User with id={user_id} tried to view private discussion with id={discussion_id}'.format(
user_id=request.user.id,
discussion_id=discussion_id
)
)
html = render_to_string('discussion/discussion_private_fragment.html', {
'disable_courseware_js': True,
'uses_bootstrap': True,
})
fragment = Fragment(html)
self.add_fragment_resource_urls(fragment)
return fragment
def vendor_js_dependencies(self):
"""
Returns list of vendor JS files that this view depends on.
The helper function that it uses to obtain the list of vendor JS files
works in conjunction with the Django pipeline to ensure that in development mode
the files are loaded individually, but in production just the single bundle is loaded.
"""
return list(dict.fromkeys(self.get_js_dependencies('discussion_vendor')))
def js_dependencies(self):
"""
Returns list of JS files that this view depends on.
The helper function that it uses to obtain the list of JS files
works in conjunction with the Django pipeline to ensure that in development mode
the files are loaded individually, but in production just the single bundle is loaded.
"""
return self.get_js_dependencies('discussion')
def css_dependencies(self):
"""
Returns list of CSS files that this view depends on.
The helper function that it uses to obtain the list of CSS files
works in conjunction with the Django pipeline to ensure that in development mode
the files are loaded individually, but in production just the single bundle is loaded.
"""
is_right_to_left = get_language_bidi()
css_file = BOOTSTRAP_DISCUSSION_CSS_PATH
if is_right_to_left:
css_file = css_file.replace('.css', '-rtl.css')
return [css_file]
@expect_json
@login_required
def discussion_topics(request, course_key_string):
"""
The handler for divided discussion categories requests.
This will raise 404 if user is not staff.
Returns the JSON representation of discussion topics w.r.t categories for the course.
Example:
>>> example = {
>>> "course_wide_discussions": {
>>> "entries": {
>>> "General": {
>>> "sort_key": "General",
>>> "is_divided": True,
>>> "id": "i4x-edx-eiorguegnru-course-foobarbaz"
>>> }
>>> }
>>> "children": ["General", "entry"]
>>> },
>>> "inline_discussions" : {
>>> "subcategories": {
>>> "Getting Started": {
>>> "subcategories": {},
>>> "children": [
>>> ["Working with Videos", "entry"],
>>> ["Videos on edX", "entry"]
>>> ],
>>> "entries": {
>>> "Working with Videos": {
>>> "sort_key": None,
>>> "is_divided": False,
>>> "id": "d9f970a42067413cbb633f81cfb12604"
>>> },
>>> "Videos on edX": {
>>> "sort_key": None,
>>> "is_divided": False,
>>> "id": "98d8feb5971041a085512ae22b398613"
>>> }
>>> }
>>> },
>>> "children": ["Getting Started", "subcategory"]
>>> },
>>> }
>>> }
"""
course_key = CourseKey.from_string(course_key_string)
course = get_course_with_access(request.user, 'staff', course_key)
discussion_topics = {} # lint-amnesty, pylint: disable=redefined-outer-name
discussion_category_map = utils.get_discussion_category_map(
course, request.user, divided_only_if_explicit=True, exclude_unstarted=False
)
# We extract the data for the course wide discussions from the category map.
course_wide_entries = discussion_category_map.pop('entries')
course_wide_children = []
inline_children = []
for name, c_type in discussion_category_map['children']:
if name in course_wide_entries and c_type == TYPE_ENTRY:
course_wide_children.append([name, c_type])
else:
inline_children.append([name, c_type])
discussion_topics['course_wide_discussions'] = {
'entries': course_wide_entries,
'children': course_wide_children
}
discussion_category_map['children'] = inline_children
discussion_topics['inline_discussions'] = discussion_category_map
return JsonResponse(discussion_topics)
@require_http_methods(("GET", "PATCH"))
@ensure_csrf_cookie
@expect_json
@login_required
def course_discussions_settings_handler(request, course_key_string):
"""
The restful handler for divided discussion setting requests. Requires JSON.
This will raise 404 if user is not staff.
GET
Returns the JSON representation of divided discussion settings for the course.
PATCH
Updates the divided discussion settings for the course. Returns the JSON representation of updated settings.
"""
course_key = CourseKey.from_string(course_key_string)
course = get_course_with_access(request.user, 'staff', course_key)
discussion_settings = get_course_discussion_settings(course_key)
if request.method == 'PATCH':
divided_course_wide_discussions, divided_inline_discussions = get_divided_discussions(
course, discussion_settings
)
settings_to_change = {}
if 'divided_course_wide_discussions' in request.json or 'divided_inline_discussions' in request.json:
divided_course_wide_discussions = request.json.get(
'divided_course_wide_discussions', divided_course_wide_discussions
)
divided_inline_discussions = request.json.get(
'divided_inline_discussions', divided_inline_discussions
)
settings_to_change['divided_discussions'] = divided_course_wide_discussions + divided_inline_discussions
if 'always_divide_inline_discussions' in request.json:
settings_to_change['always_divide_inline_discussions'] = request.json.get(
'always_divide_inline_discussions'
)
if 'division_scheme' in request.json:
settings_to_change['division_scheme'] = request.json.get(
'division_scheme'
)
if not settings_to_change:
return JsonResponse({"error": "Bad Request"}, 400)
try:
if settings_to_change:
discussion_settings = set_course_discussion_settings(course_key, **settings_to_change)
except ValueError as err:
# Note: error message not translated because it is not exposed to the user (UI prevents this state).
return JsonResponse({"error": str(err)}, 400)
divided_course_wide_discussions, divided_inline_discussions = get_divided_discussions(
course, discussion_settings
)
return JsonResponse({
'id': discussion_settings.id,
'divided_inline_discussions': divided_inline_discussions,
'divided_course_wide_discussions': divided_course_wide_discussions,
'always_divide_inline_discussions': discussion_settings.always_divide_inline_discussions,
'division_scheme': discussion_settings.division_scheme,
'available_division_schemes': available_division_schemes(course_key)
})
def get_divided_discussions(course, discussion_settings):
"""
Returns the course-wide and inline divided discussion ids separately.
"""
divided_course_wide_discussions = []
divided_inline_discussions = []
course_wide_discussions = [topic['id'] for __, topic in course.discussion_topics.items()]
all_discussions = utils.get_discussion_categories_ids(course, None, include_all=True)
for divided_discussion_id in discussion_settings.divided_discussions:
if divided_discussion_id in course_wide_discussions:
divided_course_wide_discussions.append(divided_discussion_id)
elif divided_discussion_id in all_discussions:
divided_inline_discussions.append(divided_discussion_id)
return divided_course_wide_discussions, divided_inline_discussions
def _check_team_discussion_access(request, course, discussion_id):
"""
Helper function to check if the discussion is visible to the user,
if the user is on a team, which has the discussion set to private.
"""
user_is_course_staff = has_access(request.user, "staff", course)
if not user_is_course_staff and not team_api.discussion_visible_by_user(discussion_id, request.user):
raise TeamDiscussionHiddenFromUserException()
|
gwAdvNet2015/adv-net-samples
|
refs/heads/master
|
sdn/pox/tests/unit/__init__.py
|
92
|
# Copyright 2011-2012 Andreas Wundsam
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
pass
|
packethost/packet-python
|
refs/heads/master
|
packet/Project.py
|
1
|
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: LGPL-3.0-only
class Project:
def __init__(self, data, manager):
self.manager = manager
self.id = data.get("id")
self.name = data.get("name")
self.payment_method = data.get("payment_method", [])
self.max_projects = data.get("max_devices")
self.created_at = data.get("created_at")
self.updated_at = data.get("updated_at")
self.devices = data.get("devices")
self.invitations = data.get("invitations")
self.memberships = data.get("memberships")
self.members = data.get("members")
self.ssh_keys = data.get("ssh_keys")
def update(self):
params = {"name": self.name}
return self.manager.call_api(
"projects/%s" % self.id, type="PATCH", params=params
)
def delete(self):
return self.manager.call_api("projects/%s" % self.id, type="DELETE")
def __str__(self):
return "%s" % self.name
def __repr__(self):
return "{}: {}".format(self.__class__.__name__, self.id)
|
leonardowolf/bookfree
|
refs/heads/master
|
flask/lib/python2.7/site-packages/mako/__init__.py
|
22
|
# mako/__init__.py
# Copyright (C) 2006-2016 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
__version__ = '1.0.4'
|
mavit/ansible
|
refs/heads/devel
|
test/sanity/compile/compile.py
|
81
|
#!/usr/bin/env python
"""Python syntax checker with lint friendly output."""
import parser
import sys
def main():
status = 0
for path in sys.argv[1:] or sys.stdin.read().splitlines():
with open(path, 'r') as source_fd:
source = source_fd.read()
try:
parser.suite(source)
except SyntaxError:
ex_type, ex, ex_traceback = sys.exc_info()
status = 1
message = ex.text.splitlines()[0].strip()
sys.stdout.write("%s:%d:%d: SyntaxError: %s\n" % (path, ex.lineno, ex.offset, message))
sys.stdout.flush()
sys.exit(status)
if __name__ == '__main__':
main()
|
adedayo/intellij-community
|
refs/heads/master
|
python/testData/refactoring/move/moveNamespacePackage3/after/src/b.py
|
79
|
import nspkg.a
print(nspkg.a.VAR)
|
tomsilver/nupic
|
refs/heads/master
|
nupic/frameworks/opf/model.py
|
1
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Module defining the OPF Model base class."""
import cPickle as pickle
import os
import shutil
from abc import ABCMeta, abstractmethod
import nupic.frameworks.opf.opfutils as opfutils
###############################################################
class Model(object):
""" This is the base class that all OPF Model implementations should
subclass.
It includes a number of virtual methods, to be overridden by subclasses,
as well as some shared functionality for saving/loading models
"""
__metaclass__ = ABCMeta
def __init__(self, inferenceType):
""" Model constructor.
@param inferenceType (nupic.frameworks.opf.opfutils.InferenceType)
A value that specifies the type of inference (i.e. TemporalNextStep,
Classification, etc.).
"""
self._numPredictions = 0
self.__inferenceType = inferenceType
self.__learningEnabled = True
self.__inferenceEnabled = True
self.__inferenceArgs = {}
def run(self, inputRecord):
""" Run one iteration of this model.
@param inputRecord (object)
A record object formatted according to
nupic.data.record_stream.RecordStreamIface.getNextRecord() or
nupic.data.record_stream.RecordStreamIface.getNextRecordDict()
result format.
@returns (nupic.frameworks.opf.opfutils.ModelResult)
An ModelResult namedtuple. The contents of ModelResult.inferences
depends on the the specific inference type of this model, which
can be queried by getInferenceType()
"""
if hasattr(self, '_numPredictions'):
predictionNumber = self._numPredictions
self._numPredictions += 1
else:
predictionNumber = None
result = opfutils.ModelResult(predictionNumber=predictionNumber,
rawInput=inputRecord)
return result
@abstractmethod
def finishLearning(self):
""" Place the model in a permanent "finished learning" mode.
In such a mode the model will not be able to learn from subsequent input
records.
**NOTE:** Upon completion of this command, learning may not be resumed on
the given instance of the model (e.g., the implementation may optimize
itself by pruning data structures that are necessary for learning).
"""
@abstractmethod
def resetSequenceStates(self):
""" Signal that the input record is the start of a new sequence. """
@abstractmethod
def getFieldInfo(self, includeClassifierOnlyField=False):
""" Return the sequence of FieldMetaInfo objects specifying the format of
Model's output.
This may be different than the list of FieldMetaInfo objects supplied at
initialization (e.g., due to the transcoding of some input fields into
meta-fields, such as datetime -> dayOfWeek, timeOfDay, etc.).
@param includeClassifierOnlyField (bool)
If True, any field which is only sent to the classifier (i.e. not
sent in to the bottom of the network) is also included
@returns (list<nupic.data.fieldmeta.FieldMetaInfo>)
List of FieldMetaInfo objects.
"""
@abstractmethod
def setFieldStatistics(self,fieldStats):
""" Propagate field statistics to the model in case some of its machinery
needs it.
@param fieldStats (dict)
A dict of dicts with first key being the fieldname and the second
key is min,max or other supported statistics
"""
@abstractmethod
def getRuntimeStats(self):
""" Get runtime statistics specific to this model,
i.e. activeCellOverlapAvg.
@returns (dict) A {statistic names: stats} dictionary
"""
@abstractmethod
def _getLogger(self):
""" Get the logger for this object.
This is a protected method that is used by the ModelBase to access the
logger created by the subclass.
@returns (Logger) A Logger object, it should not be None.
"""
###############################################################################
# Common learning/inference methods
###############################################################################
def getInferenceType(self):
""" Return the InferenceType of this model.
This is immutable.
@returns (nupic.frameworks.opf.opfutils.InferenceType) An inference type
"""
return self.__inferenceType
def enableLearning(self):
""" Turn Learning on for the current model. """
self.__learningEnabled = True
return
def disableLearning(self):
""" Turn Learning off for the current model. """
self.__learningEnabled = False
return
def isLearningEnabled(self):
""" Return the Learning state of the current model.
@returns (bool) The learning state
"""
return self.__learningEnabled
def enableInference(self, inferenceArgs=None):
""" Enable inference for this model.
@param inferenceArgs (dict)
A dictionary of arguments required for inference. These depend on
the InferenceType of the current model
"""
self.__inferenceEnabled = True
self.__inferenceArgs = inferenceArgs
def getInferenceArgs(self):
""" Return the dict of arguments for the current inference mode.
@returns (dict) The arguments of the inference mode
"""
return self.__inferenceArgs
def disableInference(self):
""" Turn Inference off for the current model. """
self.__inferenceEnabled = False
def isInferenceEnabled(self):
""" Return the inference state of the current model.
@returns (bool) The inference state
"""
return self.__inferenceEnabled
###############################################################################
# Implementation of common save/load functionality
###############################################################################
def save(self, saveModelDir):
""" Save the model in the given directory.
@param saveModelDir (string)
Absolute directory path for saving the model. This directory should
only be used to store a saved model. If the directory does not exist,
it will be created automatically and populated with model data. A
pre-existing directory will only be accepted if it contains previously
saved model data. If such a directory is given, the full contents of
the directory will be deleted and replaced with current model data.
"""
logger = self._getLogger()
logger.debug("(%s) Creating local checkpoint in %r...",
self, saveModelDir)
modelPickleFilePath = self._getModelPickleFilePath(saveModelDir)
# Clean up old saved state, if any
if os.path.exists(saveModelDir):
if not os.path.isdir(saveModelDir):
raise Exception(("Existing filesystem entry <%s> is not a model"
" checkpoint -- refusing to delete (not a directory)") \
% saveModelDir)
if not os.path.isfile(modelPickleFilePath):
raise Exception(("Existing filesystem entry <%s> is not a model"
" checkpoint -- refusing to delete"\
" (%s missing or not a file)") % \
(saveModelDir, modelPickleFilePath))
shutil.rmtree(saveModelDir)
# Create a new directory for saving state
self.__makeDirectoryFromAbsolutePath(saveModelDir)
with open(modelPickleFilePath, 'wb') as modelPickleFile:
logger.debug("(%s) Pickling Model instance...", self)
pickle.dump(self, modelPickleFile)
logger.debug("(%s) Finished pickling Model instance", self)
# Tell the model to save extra data, if any, that's too big for pickling
self._serializeExtraData(extraDataDir=self._getModelExtraDataDir(saveModelDir))
logger.debug("(%s) Finished creating local checkpoint", self)
return
def _serializeExtraData(self, extraDataDir):
""" Protected method that is called during serialization with an external
directory path. It can be overridden by subclasses to bypass pickle for
saving large binary states.
This is called by ModelBase only.
@param extraDataDir (string) Model's extra data directory path
"""
pass
@classmethod
def load(cls, savedModelDir):
""" Load saved model.
@param savedModelDir (string)
Directory of where the experiment is to be or was saved
@returns (Model) The loaded model instance
"""
logger = opfutils.initLogger(cls)
logger.debug("Loading model from local checkpoint at %r...", savedModelDir)
# Load the model
modelPickleFilePath = Model._getModelPickleFilePath(savedModelDir)
with open(modelPickleFilePath, 'rb') as modelPickleFile:
logger.debug("Unpickling Model instance...")
model = pickle.load(modelPickleFile)
logger.debug("Finished unpickling Model instance")
# Tell the model to load extra data, if any, that was too big for pickling
model._deSerializeExtraData(
extraDataDir=Model._getModelExtraDataDir(savedModelDir))
logger.debug("Finished Loading model from local checkpoint")
return model
def _deSerializeExtraData(self, extraDataDir):
""" Protected method that is called during deserialization
(after __setstate__) with an external directory path.
It can be overridden by subclasses to bypass pickle for loading large
binary states.
This is called by ModelBase only
@param extraDataDir (string) Model's extra data directory path
"""
pass
@staticmethod
def _getModelPickleFilePath(saveModelDir):
""" Return the absolute path of the model's pickle file.
@param saveModelDir (string)
Directory of where the experiment is to be or was saved
@returns (string) An absolute path.
"""
path = os.path.join(saveModelDir, "model.pkl")
path = os.path.abspath(path)
return path
@staticmethod
def _getModelExtraDataDir(saveModelDir):
""" Return the absolute path to the directory where the model's own
"extra data" are stored (i.e., data that's too big for pickling).
@param saveModelDir (string)
Directory of where the experiment is to be or was saved
@returns (string) An absolute path.
"""
path = os.path.join(saveModelDir, "modelextradata")
path = os.path.abspath(path)
return path
@staticmethod
def __makeDirectoryFromAbsolutePath(absDirPath):
""" Make directory for the given directory path if it doesn't already
exist in the filesystem.
@param absDirPath (string) Absolute path of the directory to create
@exception (Exception) OSError if directory creation fails
"""
assert os.path.isabs(absDirPath)
# Create the experiment directory
# TODO Is default mode (0777) appropriate?
try:
os.makedirs(absDirPath)
except OSError as e:
if e.errno != os.errno.EEXIST:
raise
return
|
projectweekend/Cards-API
|
refs/heads/master
|
app/middleware/json_response.py
|
1
|
import json
class JSONResponse(object):
def process_response(self, req, res, resource):
if 'result' in req.context:
res.body = json.dumps(req.context['result'])
else:
res.body = None
|
taojinjing/saogd-1329-gcloud
|
refs/heads/master
|
lib/werkzeug/contrib/jsrouting.py
|
318
|
# -*- coding: utf-8 -*-
"""
werkzeug.contrib.jsrouting
~~~~~~~~~~~~~~~~~~~~~~~~~~
Addon module that allows to create a JavaScript function from a map
that generates rules.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from simplejson import dumps
except ImportError:
try:
from json import dumps
except ImportError:
def dumps(*args):
raise RuntimeError('simplejson required for jsrouting')
from inspect import getmro
from werkzeug.routing import NumberConverter
from werkzeug._compat import iteritems
def render_template(name_parts, rules, converters):
result = u''
if name_parts:
for idx in xrange(0, len(name_parts) - 1):
name = u'.'.join(name_parts[:idx + 1])
result += u"if (typeof %s === 'undefined') %s = {}\n" % (name, name)
result += '%s = ' % '.'.join(name_parts)
result += """(function (server_name, script_name, subdomain, url_scheme) {
var converters = %(converters)s;
var rules = $rules;
function in_array(array, value) {
if (array.indexOf != undefined) {
return array.indexOf(value) != -1;
}
for (var i = 0; i < array.length; i++) {
if (array[i] == value) {
return true;
}
}
return false;
}
function array_diff(array1, array2) {
array1 = array1.slice();
for (var i = array1.length-1; i >= 0; i--) {
if (in_array(array2, array1[i])) {
array1.splice(i, 1);
}
}
return array1;
}
function split_obj(obj) {
var names = [];
var values = [];
for (var name in obj) {
if (typeof(obj[name]) != 'function') {
names.push(name);
values.push(obj[name]);
}
}
return {names: names, values: values, original: obj};
}
function suitable(rule, args) {
var default_args = split_obj(rule.defaults || {});
var diff_arg_names = array_diff(rule.arguments, default_args.names);
for (var i = 0; i < diff_arg_names.length; i++) {
if (!in_array(args.names, diff_arg_names[i])) {
return false;
}
}
if (array_diff(rule.arguments, args.names).length == 0) {
if (rule.defaults == null) {
return true;
}
for (var i = 0; i < default_args.names.length; i++) {
var key = default_args.names[i];
var value = default_args.values[i];
if (value != args.original[key]) {
return false;
}
}
}
return true;
}
function build(rule, args) {
var tmp = [];
var processed = rule.arguments.slice();
for (var i = 0; i < rule.trace.length; i++) {
var part = rule.trace[i];
if (part.is_dynamic) {
var converter = converters[rule.converters[part.data]];
var data = converter(args.original[part.data]);
if (data == null) {
return null;
}
tmp.push(data);
processed.push(part.name);
} else {
tmp.push(part.data);
}
}
tmp = tmp.join('');
var pipe = tmp.indexOf('|');
var subdomain = tmp.substring(0, pipe);
var url = tmp.substring(pipe+1);
var unprocessed = array_diff(args.names, processed);
var first_query_var = true;
for (var i = 0; i < unprocessed.length; i++) {
if (first_query_var) {
url += '?';
} else {
url += '&';
}
first_query_var = false;
url += encodeURIComponent(unprocessed[i]);
url += '=';
url += encodeURIComponent(args.original[unprocessed[i]]);
}
return {subdomain: subdomain, path: url};
}
function lstrip(s, c) {
while (s && s.substring(0, 1) == c) {
s = s.substring(1);
}
return s;
}
function rstrip(s, c) {
while (s && s.substring(s.length-1, s.length) == c) {
s = s.substring(0, s.length-1);
}
return s;
}
return function(endpoint, args, force_external) {
args = split_obj(args);
var rv = null;
for (var i = 0; i < rules.length; i++) {
var rule = rules[i];
if (rule.endpoint != endpoint) continue;
if (suitable(rule, args)) {
rv = build(rule, args);
if (rv != null) {
break;
}
}
}
if (rv == null) {
return null;
}
if (!force_external && rv.subdomain == subdomain) {
return rstrip(script_name, '/') + '/' + lstrip(rv.path, '/');
} else {
return url_scheme + '://'
+ (rv.subdomain ? rv.subdomain + '.' : '')
+ server_name + rstrip(script_name, '/')
+ '/' + lstrip(rv.path, '/');
}
};
})""" % {'converters': u', '.join(converters)}
return result
def generate_map(map, name='url_map'):
"""
Generates a JavaScript function containing the rules defined in
this map, to be used with a MapAdapter's generate_javascript
method. If you don't pass a name the returned JavaScript code is
an expression that returns a function. Otherwise it's a standalone
script that assigns the function with that name. Dotted names are
resolved (so you an use a name like 'obj.url_for')
In order to use JavaScript generation, simplejson must be installed.
Note that using this feature will expose the rules
defined in your map to users. If your rules contain sensitive
information, don't use JavaScript generation!
"""
from warnings import warn
warn(DeprecationWarning('This module is deprecated'))
map.update()
rules = []
converters = []
for rule in map.iter_rules():
trace = [{
'is_dynamic': is_dynamic,
'data': data
} for is_dynamic, data in rule._trace]
rule_converters = {}
for key, converter in iteritems(rule._converters):
js_func = js_to_url_function(converter)
try:
index = converters.index(js_func)
except ValueError:
converters.append(js_func)
index = len(converters) - 1
rule_converters[key] = index
rules.append({
u'endpoint': rule.endpoint,
u'arguments': list(rule.arguments),
u'converters': rule_converters,
u'trace': trace,
u'defaults': rule.defaults
})
return render_template(name_parts=name and name.split('.') or [],
rules=dumps(rules),
converters=converters)
def generate_adapter(adapter, name='url_for', map_name='url_map'):
"""Generates the url building function for a map."""
values = {
u'server_name': dumps(adapter.server_name),
u'script_name': dumps(adapter.script_name),
u'subdomain': dumps(adapter.subdomain),
u'url_scheme': dumps(adapter.url_scheme),
u'name': name,
u'map_name': map_name
}
return u'''\
var %(name)s = %(map_name)s(
%(server_name)s,
%(script_name)s,
%(subdomain)s,
%(url_scheme)s
);''' % values
def js_to_url_function(converter):
"""Get the JavaScript converter function from a rule."""
if hasattr(converter, 'js_to_url_function'):
data = converter.js_to_url_function()
else:
for cls in getmro(type(converter)):
if cls in js_to_url_functions:
data = js_to_url_functions[cls](converter)
break
else:
return 'encodeURIComponent'
return '(function(value) { %s })' % data
def NumberConverter_js_to_url(conv):
if conv.fixed_digits:
return u'''\
var result = value.toString();
while (result.length < %s)
result = '0' + result;
return result;''' % conv.fixed_digits
return u'return value.toString();'
js_to_url_functions = {
NumberConverter: NumberConverter_js_to_url
}
|
feilaoda/CloudChat
|
refs/heads/master
|
wecoders/news/tests.py
|
24123
|
from django.test import TestCase
# Create your tests here.
|
saurabh6790/medlib
|
refs/heads/master
|
webnotes/model/sync.py
|
34
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""
Sync's doctype and docfields from txt files to database
perms will get synced only if none exist
"""
import webnotes
import os
from webnotes.modules.import_file import import_file
from webnotes.utils import get_path, cstr
def sync_all(force=0):
sync_for("lib", force)
sync_for("app", force)
webnotes.clear_cache()
def sync_for(folder, force=0, sync_everything = False, verbose=False):
return walk_and_sync(get_path(folder), force, sync_everything, verbose=verbose)
def walk_and_sync(start_path, force=0, sync_everything = False, verbose=False):
"""walk and sync all doctypes and pages"""
modules = []
document_type = ['doctype', 'page', 'report']
for path, folders, files in os.walk(start_path):
# sort folders so that doctypes are synced before pages or reports
if 'locale' in folders: folders.remove('locale')
folders.sort()
if sync_everything or (os.path.basename(os.path.dirname(path)) in document_type):
for f in files:
f = cstr(f)
if f.endswith(".txt"):
doc_name = f.split(".txt")[0]
if doc_name == os.path.basename(path):
module_name = path.split(os.sep)[-3]
doctype = path.split(os.sep)[-2]
name = path.split(os.sep)[-1]
if import_file(module_name, doctype, name, force=force) and verbose:
print module_name + ' | ' + doctype + ' | ' + name
webnotes.conn.commit()
return modules
|
ekivemark/bofhirdev
|
refs/heads/master
|
appmgmt/views/application.py
|
1
|
# -*- coding: utf-8 -*-
"""
bofhirdev
FILE: application
Created: 11/5/15 10:50 PM
"""
from django.conf import settings
from django.contrib import messages
from django.core.urlresolvers import reverse_lazy
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.views.generic import (DetailView,
UpdateView)
from django.views.generic.edit import CreateView
from django.views.generic.list import ListView
from oauth2_provider.generators import (generate_client_id,
generate_client_secret)
from appmgmt.forms.application import (ApplicationForm,
Application_Secret_Form,
Application_Secret)
from appmgmt.models import BBApplication
__author__ = 'Mark Scrimshire:@ekivemark'
class MyApplicationListView(ListView):
"""
View for Applications
"""
model = BBApplication
template_name = 'appmgmt/application_list.html'
def get_queryset(self):
if settings.DEBUG:
print("Queryset User:", self.request.user)
qs = super(MyApplicationListView, self).get_queryset()
return qs.filter(user=self.request.user).values()
# Application Create
# Application Update
# Application Delete
class MyApplicationUpdateView(UpdateView):
"""
Edit view for Application
"""
model = BBApplication
fields = ['name', 'about',
'privacy_url', 'support_url',
'redirect_uris' , 'client_type',
'logo',]
context_object_name = "application"
def get_context_data(self, **kwargs):
# call the base implementation first to get a context
context = super(MyApplicationUpdateView, self).get_context_data(**kwargs)
# add in a QuerySet of all Applications
if settings.DEBUG:
print("Context:", context)
return context
class MyApplicationCreate(CreateView):
"""
Create for Application
"""
model = BBApplication
form_class = ApplicationForm
fields = ['name', 'about',
'logo', 'privacy_url', 'support_url',
'redirect_uris', 'client_type']
def get_initial(self):
self.initial.update({ 'owner': self.request.user})
return self.initial
# Organization Update
def Application_Update_Secret(request, pk):
"""
Replace client_id and client_secret
:param request:
:param pk:
:return:
"""
if request.method == 'POST':
a=BBApplication.objects.get(pk=pk)
form = Application_Secret(request.POST)
if form.is_valid():
if form.cleaned_data['confirm'] == '1':
a.client_id = generate_client_id()
a.client_secret = generate_client_secret()
a.save()
messages.success(request,"Client Id and Secret updated")
if settings.DEBUG:
print("Confirm:", form.cleaned_data['confirm'])
print("Id:", a.client_id)
print("Secret:", a.client_secret)
return HttpResponseRedirect(reverse_lazy('appmgmt:application_view'))
else:
if settings.DEBUG:
print("form has a problem")
else:
a=BBApplication.objects.get(pk=pk)
if settings.DEBUG:
print("BBApplication:", a)
form = Application_Secret(initial={'confirm': '0'})
return render_to_response('appmgmt/application_secret_form.html',
RequestContext(request,{'form': form, 'application': a,}))
|
harisibrahimkv/django
|
refs/heads/master
|
django/views/generic/detail.py
|
39
|
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.http import Http404
from django.utils.translation import gettext as _
from django.views.generic.base import ContextMixin, TemplateResponseMixin, View
class SingleObjectMixin(ContextMixin):
"""
Provide the ability to retrieve a single object for further manipulation.
"""
model = None
queryset = None
slug_field = 'slug'
context_object_name = None
slug_url_kwarg = 'slug'
pk_url_kwarg = 'pk'
query_pk_and_slug = False
def get_object(self, queryset=None):
"""
Return the object the view is displaying.
Require `self.queryset` and a `pk` or `slug` argument in the URLconf.
Subclasses can override this to return any object.
"""
# Use a custom queryset if provided; this is required for subclasses
# like DateDetailView
if queryset is None:
queryset = self.get_queryset()
# Next, try looking up by primary key.
pk = self.kwargs.get(self.pk_url_kwarg)
slug = self.kwargs.get(self.slug_url_kwarg)
if pk is not None:
queryset = queryset.filter(pk=pk)
# Next, try looking up by slug.
if slug is not None and (pk is None or self.query_pk_and_slug):
slug_field = self.get_slug_field()
queryset = queryset.filter(**{slug_field: slug})
# If none of those are defined, it's an error.
if pk is None and slug is None:
raise AttributeError("Generic detail view %s must be called with "
"either an object pk or a slug."
% self.__class__.__name__)
try:
# Get the single item from the filtered queryset
obj = queryset.get()
except queryset.model.DoesNotExist:
raise Http404(_("No %(verbose_name)s found matching the query") %
{'verbose_name': queryset.model._meta.verbose_name})
return obj
def get_queryset(self):
"""
Return the `QuerySet` that will be used to look up the object.
This method is called by the default implementation of get_object() and
may not be called if get_object() is overridden.
"""
if self.queryset is None:
if self.model:
return self.model._default_manager.all()
else:
raise ImproperlyConfigured(
"%(cls)s is missing a QuerySet. Define "
"%(cls)s.model, %(cls)s.queryset, or override "
"%(cls)s.get_queryset()." % {
'cls': self.__class__.__name__
}
)
return self.queryset.all()
def get_slug_field(self):
"""Get the name of a slug field to be used to look up by slug."""
return self.slug_field
def get_context_object_name(self, obj):
"""Get the name to use for the object."""
if self.context_object_name:
return self.context_object_name
elif isinstance(obj, models.Model):
return obj._meta.model_name
else:
return None
def get_context_data(self, **kwargs):
"""Insert the single object into the context dict."""
context = {}
if self.object:
context['object'] = self.object
context_object_name = self.get_context_object_name(self.object)
if context_object_name:
context[context_object_name] = self.object
context.update(kwargs)
return super().get_context_data(**context)
class BaseDetailView(SingleObjectMixin, View):
"""A base view for displaying a single object."""
def get(self, request, *args, **kwargs):
self.object = self.get_object()
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
class SingleObjectTemplateResponseMixin(TemplateResponseMixin):
template_name_field = None
template_name_suffix = '_detail'
def get_template_names(self):
"""
Return a list of template names to be used for the request. May not be
called if render_to_response() is overridden. Return the following list:
* the value of ``template_name`` on the view (if provided)
* the contents of the ``template_name_field`` field on the
object instance that the view is operating upon (if available)
* ``<app_label>/<model_name><template_name_suffix>.html``
"""
try:
names = super().get_template_names()
except ImproperlyConfigured:
# If template_name isn't specified, it's not a problem --
# we just start with an empty list.
names = []
# If self.template_name_field is set, grab the value of the field
# of that name from the object; this is the most specific template
# name, if given.
if self.object and self.template_name_field:
name = getattr(self.object, self.template_name_field, None)
if name:
names.insert(0, name)
# The least-specific option is the default <app>/<model>_detail.html;
# only use this if the object in question is a model.
if isinstance(self.object, models.Model):
object_meta = self.object._meta
names.append("%s/%s%s.html" % (
object_meta.app_label,
object_meta.model_name,
self.template_name_suffix
))
elif hasattr(self, 'model') and self.model is not None and issubclass(self.model, models.Model):
names.append("%s/%s%s.html" % (
self.model._meta.app_label,
self.model._meta.model_name,
self.template_name_suffix
))
# If we still haven't managed to find any template names, we should
# re-raise the ImproperlyConfigured to alert the user.
if not names:
raise
return names
class DetailView(SingleObjectTemplateResponseMixin, BaseDetailView):
"""
Render a "detail" view of an object.
By default this is a model instance looked up from `self.queryset`, but the
view will support display of *any* object by overriding `self.get_object()`.
"""
|
wolfram74/numerical_methods_iserles_notes
|
refs/heads/master
|
venv/lib/python2.7/site-packages/numpy/matrixlib/defmatrix.py
|
38
|
from __future__ import division, absolute_import, print_function
__all__ = ['matrix', 'bmat', 'mat', 'asmatrix']
import sys
import numpy.core.numeric as N
from numpy.core.numeric import concatenate, isscalar, binary_repr, identity, asanyarray
from numpy.core.numerictypes import issubdtype
# make translation table
_numchars = '0123456789.-+jeEL'
if sys.version_info[0] >= 3:
class _NumCharTable:
def __getitem__(self, i):
if chr(i) in _numchars:
return chr(i)
else:
return None
_table = _NumCharTable()
def _eval(astr):
str_ = astr.translate(_table)
if not str_:
raise TypeError("Invalid data string supplied: " + astr)
else:
return eval(str_)
else:
_table = [None]*256
for k in range(256):
_table[k] = chr(k)
_table = ''.join(_table)
_todelete = []
for k in _table:
if k not in _numchars:
_todelete.append(k)
_todelete = ''.join(_todelete)
del k
def _eval(astr):
str_ = astr.translate(_table, _todelete)
if not str_:
raise TypeError("Invalid data string supplied: " + astr)
else:
return eval(str_)
def _convert_from_string(data):
rows = data.split(';')
newdata = []
count = 0
for row in rows:
trow = row.split(',')
newrow = []
for col in trow:
temp = col.split()
newrow.extend(map(_eval, temp))
if count == 0:
Ncols = len(newrow)
elif len(newrow) != Ncols:
raise ValueError("Rows not the same size.")
count += 1
newdata.append(newrow)
return newdata
def asmatrix(data, dtype=None):
"""
Interpret the input as a matrix.
Unlike `matrix`, `asmatrix` does not make a copy if the input is already
a matrix or an ndarray. Equivalent to ``matrix(data, copy=False)``.
Parameters
----------
data : array_like
Input data.
Returns
-------
mat : matrix
`data` interpreted as a matrix.
Examples
--------
>>> x = np.array([[1, 2], [3, 4]])
>>> m = np.asmatrix(x)
>>> x[0,0] = 5
>>> m
matrix([[5, 2],
[3, 4]])
"""
return matrix(data, dtype=dtype, copy=False)
def matrix_power(M, n):
"""
Raise a square matrix to the (integer) power `n`.
For positive integers `n`, the power is computed by repeated matrix
squarings and matrix multiplications. If ``n == 0``, the identity matrix
of the same shape as M is returned. If ``n < 0``, the inverse
is computed and then raised to the ``abs(n)``.
Parameters
----------
M : ndarray or matrix object
Matrix to be "powered." Must be square, i.e. ``M.shape == (m, m)``,
with `m` a positive integer.
n : int
The exponent can be any integer or long integer, positive,
negative, or zero.
Returns
-------
M**n : ndarray or matrix object
The return value is the same shape and type as `M`;
if the exponent is positive or zero then the type of the
elements is the same as those of `M`. If the exponent is
negative the elements are floating-point.
Raises
------
LinAlgError
If the matrix is not numerically invertible.
See Also
--------
matrix
Provides an equivalent function as the exponentiation operator
(``**``, not ``^``).
Examples
--------
>>> from numpy import linalg as LA
>>> i = np.array([[0, 1], [-1, 0]]) # matrix equiv. of the imaginary unit
>>> LA.matrix_power(i, 3) # should = -i
array([[ 0, -1],
[ 1, 0]])
>>> LA.matrix_power(np.matrix(i), 3) # matrix arg returns matrix
matrix([[ 0, -1],
[ 1, 0]])
>>> LA.matrix_power(i, 0)
array([[1, 0],
[0, 1]])
>>> LA.matrix_power(i, -3) # should = 1/(-i) = i, but w/ f.p. elements
array([[ 0., 1.],
[-1., 0.]])
Somewhat more sophisticated example
>>> q = np.zeros((4, 4))
>>> q[0:2, 0:2] = -i
>>> q[2:4, 2:4] = i
>>> q # one of the three quarternion units not equal to 1
array([[ 0., -1., 0., 0.],
[ 1., 0., 0., 0.],
[ 0., 0., 0., 1.],
[ 0., 0., -1., 0.]])
>>> LA.matrix_power(q, 2) # = -np.eye(4)
array([[-1., 0., 0., 0.],
[ 0., -1., 0., 0.],
[ 0., 0., -1., 0.],
[ 0., 0., 0., -1.]])
"""
M = asanyarray(M)
if len(M.shape) != 2 or M.shape[0] != M.shape[1]:
raise ValueError("input must be a square array")
if not issubdtype(type(n), int):
raise TypeError("exponent must be an integer")
from numpy.linalg import inv
if n==0:
M = M.copy()
M[:] = identity(M.shape[0])
return M
elif n<0:
M = inv(M)
n *= -1
result = M
if n <= 3:
for _ in range(n-1):
result=N.dot(result, M)
return result
# binary decomposition to reduce the number of Matrix
# multiplications for n > 3.
beta = binary_repr(n)
Z, q, t = M, 0, len(beta)
while beta[t-q-1] == '0':
Z = N.dot(Z, Z)
q += 1
result = Z
for k in range(q+1, t):
Z = N.dot(Z, Z)
if beta[t-k-1] == '1':
result = N.dot(result, Z)
return result
class matrix(N.ndarray):
"""
matrix(data, dtype=None, copy=True)
Returns a matrix from an array-like object, or from a string of data.
A matrix is a specialized 2-D array that retains its 2-D nature
through operations. It has certain special operators, such as ``*``
(matrix multiplication) and ``**`` (matrix power).
Parameters
----------
data : array_like or string
If `data` is a string, it is interpreted as a matrix with commas
or spaces separating columns, and semicolons separating rows.
dtype : data-type
Data-type of the output matrix.
copy : bool
If `data` is already an `ndarray`, then this flag determines
whether the data is copied (the default), or whether a view is
constructed.
See Also
--------
array
Examples
--------
>>> a = np.matrix('1 2; 3 4')
>>> print a
[[1 2]
[3 4]]
>>> np.matrix([[1, 2], [3, 4]])
matrix([[1, 2],
[3, 4]])
"""
__array_priority__ = 10.0
def __new__(subtype, data, dtype=None, copy=True):
if isinstance(data, matrix):
dtype2 = data.dtype
if (dtype is None):
dtype = dtype2
if (dtype2 == dtype) and (not copy):
return data
return data.astype(dtype)
if isinstance(data, N.ndarray):
if dtype is None:
intype = data.dtype
else:
intype = N.dtype(dtype)
new = data.view(subtype)
if intype != data.dtype:
return new.astype(intype)
if copy: return new.copy()
else: return new
if isinstance(data, str):
data = _convert_from_string(data)
# now convert data to an array
arr = N.array(data, dtype=dtype, copy=copy)
ndim = arr.ndim
shape = arr.shape
if (ndim > 2):
raise ValueError("matrix must be 2-dimensional")
elif ndim == 0:
shape = (1, 1)
elif ndim == 1:
shape = (1, shape[0])
order = False
if (ndim == 2) and arr.flags.fortran:
order = True
if not (order or arr.flags.contiguous):
arr = arr.copy()
ret = N.ndarray.__new__(subtype, shape, arr.dtype,
buffer=arr,
order=order)
return ret
def __array_finalize__(self, obj):
self._getitem = False
if (isinstance(obj, matrix) and obj._getitem): return
ndim = self.ndim
if (ndim == 2):
return
if (ndim > 2):
newshape = tuple([x for x in self.shape if x > 1])
ndim = len(newshape)
if ndim == 2:
self.shape = newshape
return
elif (ndim > 2):
raise ValueError("shape too large to be a matrix.")
else:
newshape = self.shape
if ndim == 0:
self.shape = (1, 1)
elif ndim == 1:
self.shape = (1, newshape[0])
return
def __getitem__(self, index):
self._getitem = True
try:
out = N.ndarray.__getitem__(self, index)
finally:
self._getitem = False
if not isinstance(out, N.ndarray):
return out
if out.ndim == 0:
return out[()]
if out.ndim == 1:
sh = out.shape[0]
# Determine when we should have a column array
try:
n = len(index)
except:
n = 0
if n > 1 and isscalar(index[1]):
out.shape = (sh, 1)
else:
out.shape = (1, sh)
return out
def __mul__(self, other):
if isinstance(other, (N.ndarray, list, tuple)) :
# This promotes 1-D vectors to row vectors
return N.dot(self, asmatrix(other))
if isscalar(other) or not hasattr(other, '__rmul__') :
return N.dot(self, other)
return NotImplemented
def __rmul__(self, other):
return N.dot(other, self)
def __imul__(self, other):
self[:] = self * other
return self
def __pow__(self, other):
return matrix_power(self, other)
def __ipow__(self, other):
self[:] = self ** other
return self
def __rpow__(self, other):
return NotImplemented
def __repr__(self):
s = repr(self.__array__()).replace('array', 'matrix')
# now, 'matrix' has 6 letters, and 'array' 5, so the columns don't
# line up anymore. We need to add a space.
l = s.splitlines()
for i in range(1, len(l)):
if l[i]:
l[i] = ' ' + l[i]
return '\n'.join(l)
def __str__(self):
return str(self.__array__())
def _align(self, axis):
"""A convenience function for operations that need to preserve axis
orientation.
"""
if axis is None:
return self[0, 0]
elif axis==0:
return self
elif axis==1:
return self.transpose()
else:
raise ValueError("unsupported axis")
def _collapse(self, axis):
"""A convenience function for operations that want to collapse
to a scalar like _align, but are using keepdims=True
"""
if axis is None:
return self[0, 0]
else:
return self
# Necessary because base-class tolist expects dimension
# reduction by x[0]
def tolist(self):
"""
Return the matrix as a (possibly nested) list.
See `ndarray.tolist` for full documentation.
See Also
--------
ndarray.tolist
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.tolist()
[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11]]
"""
return self.__array__().tolist()
# To preserve orientation of result...
def sum(self, axis=None, dtype=None, out=None):
"""
Returns the sum of the matrix elements, along the given axis.
Refer to `numpy.sum` for full documentation.
See Also
--------
numpy.sum
Notes
-----
This is the same as `ndarray.sum`, except that where an `ndarray` would
be returned, a `matrix` object is returned instead.
Examples
--------
>>> x = np.matrix([[1, 2], [4, 3]])
>>> x.sum()
10
>>> x.sum(axis=1)
matrix([[3],
[7]])
>>> x.sum(axis=1, dtype='float')
matrix([[ 3.],
[ 7.]])
>>> out = np.zeros((1, 2), dtype='float')
>>> x.sum(axis=1, dtype='float', out=out)
matrix([[ 3.],
[ 7.]])
"""
return N.ndarray.sum(self, axis, dtype, out, keepdims=True)._collapse(axis)
def mean(self, axis=None, dtype=None, out=None):
"""
Returns the average of the matrix elements along the given axis.
Refer to `numpy.mean` for full documentation.
See Also
--------
numpy.mean
Notes
-----
Same as `ndarray.mean` except that, where that returns an `ndarray`,
this returns a `matrix` object.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3, 4)))
>>> x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.mean()
5.5
>>> x.mean(0)
matrix([[ 4., 5., 6., 7.]])
>>> x.mean(1)
matrix([[ 1.5],
[ 5.5],
[ 9.5]])
"""
return N.ndarray.mean(self, axis, dtype, out, keepdims=True)._collapse(axis)
def std(self, axis=None, dtype=None, out=None, ddof=0):
"""
Return the standard deviation of the array elements along the given axis.
Refer to `numpy.std` for full documentation.
See Also
--------
numpy.std
Notes
-----
This is the same as `ndarray.std`, except that where an `ndarray` would
be returned, a `matrix` object is returned instead.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3, 4)))
>>> x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.std()
3.4520525295346629
>>> x.std(0)
matrix([[ 3.26598632, 3.26598632, 3.26598632, 3.26598632]])
>>> x.std(1)
matrix([[ 1.11803399],
[ 1.11803399],
[ 1.11803399]])
"""
return N.ndarray.std(self, axis, dtype, out, ddof, keepdims=True)._collapse(axis)
def var(self, axis=None, dtype=None, out=None, ddof=0):
"""
Returns the variance of the matrix elements, along the given axis.
Refer to `numpy.var` for full documentation.
See Also
--------
numpy.var
Notes
-----
This is the same as `ndarray.var`, except that where an `ndarray` would
be returned, a `matrix` object is returned instead.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3, 4)))
>>> x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.var()
11.916666666666666
>>> x.var(0)
matrix([[ 10.66666667, 10.66666667, 10.66666667, 10.66666667]])
>>> x.var(1)
matrix([[ 1.25],
[ 1.25],
[ 1.25]])
"""
return N.ndarray.var(self, axis, dtype, out, ddof, keepdims=True)._collapse(axis)
def prod(self, axis=None, dtype=None, out=None):
"""
Return the product of the array elements over the given axis.
Refer to `prod` for full documentation.
See Also
--------
prod, ndarray.prod
Notes
-----
Same as `ndarray.prod`, except, where that returns an `ndarray`, this
returns a `matrix` object instead.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.prod()
0
>>> x.prod(0)
matrix([[ 0, 45, 120, 231]])
>>> x.prod(1)
matrix([[ 0],
[ 840],
[7920]])
"""
return N.ndarray.prod(self, axis, dtype, out, keepdims=True)._collapse(axis)
def any(self, axis=None, out=None):
"""
Test whether any array element along a given axis evaluates to True.
Refer to `numpy.any` for full documentation.
Parameters
----------
axis : int, optional
Axis along which logical OR is performed
out : ndarray, optional
Output to existing array instead of creating new one, must have
same shape as expected output
Returns
-------
any : bool, ndarray
Returns a single bool if `axis` is ``None``; otherwise,
returns `ndarray`
"""
return N.ndarray.any(self, axis, out, keepdims=True)._collapse(axis)
def all(self, axis=None, out=None):
"""
Test whether all matrix elements along a given axis evaluate to True.
Parameters
----------
See `numpy.all` for complete descriptions
See Also
--------
numpy.all
Notes
-----
This is the same as `ndarray.all`, but it returns a `matrix` object.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> y = x[0]; y
matrix([[0, 1, 2, 3]])
>>> (x == y)
matrix([[ True, True, True, True],
[False, False, False, False],
[False, False, False, False]], dtype=bool)
>>> (x == y).all()
False
>>> (x == y).all(0)
matrix([[False, False, False, False]], dtype=bool)
>>> (x == y).all(1)
matrix([[ True],
[False],
[False]], dtype=bool)
"""
return N.ndarray.all(self, axis, out, keepdims=True)._collapse(axis)
def max(self, axis=None, out=None):
"""
Return the maximum value along an axis.
Parameters
----------
See `amax` for complete descriptions
See Also
--------
amax, ndarray.max
Notes
-----
This is the same as `ndarray.max`, but returns a `matrix` object
where `ndarray.max` would return an ndarray.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.max()
11
>>> x.max(0)
matrix([[ 8, 9, 10, 11]])
>>> x.max(1)
matrix([[ 3],
[ 7],
[11]])
"""
return N.ndarray.max(self, axis, out, keepdims=True)._collapse(axis)
def argmax(self, axis=None, out=None):
"""
Indices of the maximum values along an axis.
Parameters
----------
See `numpy.argmax` for complete descriptions
See Also
--------
numpy.argmax
Notes
-----
This is the same as `ndarray.argmax`, but returns a `matrix` object
where `ndarray.argmax` would return an `ndarray`.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.argmax()
11
>>> x.argmax(0)
matrix([[2, 2, 2, 2]])
>>> x.argmax(1)
matrix([[3],
[3],
[3]])
"""
return N.ndarray.argmax(self, axis, out)._align(axis)
def min(self, axis=None, out=None):
"""
Return the minimum value along an axis.
Parameters
----------
See `amin` for complete descriptions.
See Also
--------
amin, ndarray.min
Notes
-----
This is the same as `ndarray.min`, but returns a `matrix` object
where `ndarray.min` would return an ndarray.
Examples
--------
>>> x = -np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, -1, -2, -3],
[ -4, -5, -6, -7],
[ -8, -9, -10, -11]])
>>> x.min()
-11
>>> x.min(0)
matrix([[ -8, -9, -10, -11]])
>>> x.min(1)
matrix([[ -3],
[ -7],
[-11]])
"""
return N.ndarray.min(self, axis, out, keepdims=True)._collapse(axis)
def argmin(self, axis=None, out=None):
"""
Return the indices of the minimum values along an axis.
Parameters
----------
See `numpy.argmin` for complete descriptions.
See Also
--------
numpy.argmin
Notes
-----
This is the same as `ndarray.argmin`, but returns a `matrix` object
where `ndarray.argmin` would return an `ndarray`.
Examples
--------
>>> x = -np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, -1, -2, -3],
[ -4, -5, -6, -7],
[ -8, -9, -10, -11]])
>>> x.argmin()
11
>>> x.argmin(0)
matrix([[2, 2, 2, 2]])
>>> x.argmin(1)
matrix([[3],
[3],
[3]])
"""
return N.ndarray.argmin(self, axis, out)._align(axis)
def ptp(self, axis=None, out=None):
"""
Peak-to-peak (maximum - minimum) value along the given axis.
Refer to `numpy.ptp` for full documentation.
See Also
--------
numpy.ptp
Notes
-----
Same as `ndarray.ptp`, except, where that would return an `ndarray` object,
this returns a `matrix` object.
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.ptp()
11
>>> x.ptp(0)
matrix([[8, 8, 8, 8]])
>>> x.ptp(1)
matrix([[3],
[3],
[3]])
"""
return N.ndarray.ptp(self, axis, out)._align(axis)
def getI(self):
"""
Returns the (multiplicative) inverse of invertible `self`.
Parameters
----------
None
Returns
-------
ret : matrix object
If `self` is non-singular, `ret` is such that ``ret * self`` ==
``self * ret`` == ``np.matrix(np.eye(self[0,:].size)`` all return
``True``.
Raises
------
numpy.linalg.LinAlgError: Singular matrix
If `self` is singular.
See Also
--------
linalg.inv
Examples
--------
>>> m = np.matrix('[1, 2; 3, 4]'); m
matrix([[1, 2],
[3, 4]])
>>> m.getI()
matrix([[-2. , 1. ],
[ 1.5, -0.5]])
>>> m.getI() * m
matrix([[ 1., 0.],
[ 0., 1.]])
"""
M, N = self.shape
if M == N:
from numpy.dual import inv as func
else:
from numpy.dual import pinv as func
return asmatrix(func(self))
def getA(self):
"""
Return `self` as an `ndarray` object.
Equivalent to ``np.asarray(self)``.
Parameters
----------
None
Returns
-------
ret : ndarray
`self` as an `ndarray`
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.getA()
array([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
"""
return self.__array__()
def getA1(self):
"""
Return `self` as a flattened `ndarray`.
Equivalent to ``np.asarray(x).ravel()``
Parameters
----------
None
Returns
-------
ret : ndarray
`self`, 1-D, as an `ndarray`
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4))); x
matrix([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11]])
>>> x.getA1()
array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
"""
return self.__array__().ravel()
def getT(self):
"""
Returns the transpose of the matrix.
Does *not* conjugate! For the complex conjugate transpose, use ``.H``.
Parameters
----------
None
Returns
-------
ret : matrix object
The (non-conjugated) transpose of the matrix.
See Also
--------
transpose, getH
Examples
--------
>>> m = np.matrix('[1, 2; 3, 4]')
>>> m
matrix([[1, 2],
[3, 4]])
>>> m.getT()
matrix([[1, 3],
[2, 4]])
"""
return self.transpose()
def getH(self):
"""
Returns the (complex) conjugate transpose of `self`.
Equivalent to ``np.transpose(self)`` if `self` is real-valued.
Parameters
----------
None
Returns
-------
ret : matrix object
complex conjugate transpose of `self`
Examples
--------
>>> x = np.matrix(np.arange(12).reshape((3,4)))
>>> z = x - 1j*x; z
matrix([[ 0. +0.j, 1. -1.j, 2. -2.j, 3. -3.j],
[ 4. -4.j, 5. -5.j, 6. -6.j, 7. -7.j],
[ 8. -8.j, 9. -9.j, 10.-10.j, 11.-11.j]])
>>> z.getH()
matrix([[ 0. +0.j, 4. +4.j, 8. +8.j],
[ 1. +1.j, 5. +5.j, 9. +9.j],
[ 2. +2.j, 6. +6.j, 10.+10.j],
[ 3. +3.j, 7. +7.j, 11.+11.j]])
"""
if issubclass(self.dtype.type, N.complexfloating):
return self.transpose().conjugate()
else:
return self.transpose()
T = property(getT, None)
A = property(getA, None)
A1 = property(getA1, None)
H = property(getH, None)
I = property(getI, None)
def _from_string(str, gdict, ldict):
rows = str.split(';')
rowtup = []
for row in rows:
trow = row.split(',')
newrow = []
for x in trow:
newrow.extend(x.split())
trow = newrow
coltup = []
for col in trow:
col = col.strip()
try:
thismat = ldict[col]
except KeyError:
try:
thismat = gdict[col]
except KeyError:
raise KeyError("%s not found" % (col,))
coltup.append(thismat)
rowtup.append(concatenate(coltup, axis=-1))
return concatenate(rowtup, axis=0)
def bmat(obj, ldict=None, gdict=None):
"""
Build a matrix object from a string, nested sequence, or array.
Parameters
----------
obj : str or array_like
Input data. Names of variables in the current scope may be
referenced, even if `obj` is a string.
Returns
-------
out : matrix
Returns a matrix object, which is a specialized 2-D array.
See Also
--------
matrix
Examples
--------
>>> A = np.mat('1 1; 1 1')
>>> B = np.mat('2 2; 2 2')
>>> C = np.mat('3 4; 5 6')
>>> D = np.mat('7 8; 9 0')
All the following expressions construct the same block matrix:
>>> np.bmat([[A, B], [C, D]])
matrix([[1, 1, 2, 2],
[1, 1, 2, 2],
[3, 4, 7, 8],
[5, 6, 9, 0]])
>>> np.bmat(np.r_[np.c_[A, B], np.c_[C, D]])
matrix([[1, 1, 2, 2],
[1, 1, 2, 2],
[3, 4, 7, 8],
[5, 6, 9, 0]])
>>> np.bmat('A,B; C,D')
matrix([[1, 1, 2, 2],
[1, 1, 2, 2],
[3, 4, 7, 8],
[5, 6, 9, 0]])
"""
if isinstance(obj, str):
if gdict is None:
# get previous frame
frame = sys._getframe().f_back
glob_dict = frame.f_globals
loc_dict = frame.f_locals
else:
glob_dict = gdict
loc_dict = ldict
return matrix(_from_string(obj, glob_dict, loc_dict))
if isinstance(obj, (tuple, list)):
# [[A,B],[C,D]]
arr_rows = []
for row in obj:
if isinstance(row, N.ndarray): # not 2-d
return matrix(concatenate(obj, axis=-1))
else:
arr_rows.append(concatenate(row, axis=-1))
return matrix(concatenate(arr_rows, axis=0))
if isinstance(obj, N.ndarray):
return matrix(obj)
mat = asmatrix
|
Maccimo/intellij-community
|
refs/heads/master
|
python/testData/codeInsight/smartEnter/multilineTupleLiteralLastElement.py
|
10
|
xs = (
1,
2 <caret>
)
|
MalloyPower/parsing-python
|
refs/heads/master
|
front-end/testsuite-python-lib/Python-3.6.0/Lib/asyncio/sslproto.py
|
1
|
import collections
import warnings
try:
import ssl
except ImportError: # pragma: no cover
ssl = None
from . import base_events
from . import compat
from . import protocols
from . import transports
from .log import logger
def _create_transport_context(server_side, server_hostname):
if server_side:
raise ValueError('Server side SSL needs a valid SSLContext')
# Client side may pass ssl=True to use a default
# context; in that case the sslcontext passed is None.
# The default is secure for client connections.
if hasattr(ssl, 'create_default_context'):
# Python 3.4+: use up-to-date strong settings.
sslcontext = ssl.create_default_context()
if not server_hostname:
sslcontext.check_hostname = False
else:
# Fallback for Python 3.3.
sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext.options |= ssl.OP_NO_SSLv2
sslcontext.options |= ssl.OP_NO_SSLv3
sslcontext.set_default_verify_paths()
sslcontext.verify_mode = ssl.CERT_REQUIRED
return sslcontext
def _is_sslproto_available():
return hasattr(ssl, "MemoryBIO")
# States of an _SSLPipe.
_UNWRAPPED = "UNWRAPPED"
_DO_HANDSHAKE = "DO_HANDSHAKE"
_WRAPPED = "WRAPPED"
_SHUTDOWN = "SHUTDOWN"
class _SSLPipe(object):
"""An SSL "Pipe".
An SSL pipe allows you to communicate with an SSL/TLS protocol instance
through memory buffers. It can be used to implement a security layer for an
existing connection where you don't have access to the connection's file
descriptor, or for some reason you don't want to use it.
An SSL pipe can be in "wrapped" and "unwrapped" mode. In unwrapped mode,
data is passed through untransformed. In wrapped mode, application level
data is encrypted to SSL record level data and vice versa. The SSL record
level is the lowest level in the SSL protocol suite and is what travels
as-is over the wire.
An SslPipe initially is in "unwrapped" mode. To start SSL, call
do_handshake(). To shutdown SSL again, call unwrap().
"""
max_size = 256 * 1024 # Buffer size passed to read()
def __init__(self, context, server_side, server_hostname=None):
"""
The *context* argument specifies the ssl.SSLContext to use.
The *server_side* argument indicates whether this is a server side or
client side transport.
The optional *server_hostname* argument can be used to specify the
hostname you are connecting to. You may only specify this parameter if
the _ssl module supports Server Name Indication (SNI).
"""
self._context = context
self._server_side = server_side
self._server_hostname = server_hostname
self._state = _UNWRAPPED
self._incoming = ssl.MemoryBIO()
self._outgoing = ssl.MemoryBIO()
self._sslobj = None
self._need_ssldata = False
self._handshake_cb = None
self._shutdown_cb = None
@property
def context(self):
"""The SSL context passed to the constructor."""
return self._context
@property
def ssl_object(self):
"""The internal ssl.SSLObject instance.
Return None if the pipe is not wrapped.
"""
return self._sslobj
@property
def need_ssldata(self):
"""Whether more record level data is needed to complete a handshake
that is currently in progress."""
return self._need_ssldata
@property
def wrapped(self):
"""
Whether a security layer is currently in effect.
Return False during handshake.
"""
return self._state == _WRAPPED
def do_handshake(self, callback=None):
"""Start the SSL handshake.
Return a list of ssldata. A ssldata element is a list of buffers
The optional *callback* argument can be used to install a callback that
will be called when the handshake is complete. The callback will be
called with None if successful, else an exception instance.
"""
if self._state != _UNWRAPPED:
raise RuntimeError('handshake in progress or completed')
self._sslobj = self._context.wrap_bio(
self._incoming, self._outgoing,
server_side=self._server_side,
server_hostname=self._server_hostname)
self._state = _DO_HANDSHAKE
self._handshake_cb = callback
ssldata, appdata = self.feed_ssldata(b'', only_handshake=True)
assert len(appdata) == 0
return ssldata
def shutdown(self, callback=None):
"""Start the SSL shutdown sequence.
Return a list of ssldata. A ssldata element is a list of buffers
The optional *callback* argument can be used to install a callback that
will be called when the shutdown is complete. The callback will be
called without arguments.
"""
if self._state == _UNWRAPPED:
raise RuntimeError('no security layer present')
if self._state == _SHUTDOWN:
raise RuntimeError('shutdown in progress')
assert self._state in (_WRAPPED, _DO_HANDSHAKE)
self._state = _SHUTDOWN
self._shutdown_cb = callback
ssldata, appdata = self.feed_ssldata(b'')
assert appdata == [] or appdata == [b'']
return ssldata
def feed_eof(self):
"""Send a potentially "ragged" EOF.
This method will raise an SSL_ERROR_EOF exception if the EOF is
unexpected.
"""
self._incoming.write_eof()
ssldata, appdata = self.feed_ssldata(b'')
assert appdata == [] or appdata == [b'']
def feed_ssldata(self, data, only_handshake=False):
"""Feed SSL record level data into the pipe.
The data must be a bytes instance. It is OK to send an empty bytes
instance. This can be used to get ssldata for a handshake initiated by
this endpoint.
Return a (ssldata, appdata) tuple. The ssldata element is a list of
buffers containing SSL data that needs to be sent to the remote SSL.
The appdata element is a list of buffers containing plaintext data that
needs to be forwarded to the application. The appdata list may contain
an empty buffer indicating an SSL "close_notify" alert. This alert must
be acknowledged by calling shutdown().
"""
if self._state == _UNWRAPPED:
# If unwrapped, pass plaintext data straight through.
if data:
appdata = [data]
else:
appdata = []
return ([], appdata)
self._need_ssldata = False
if data:
self._incoming.write(data)
ssldata = []
appdata = []
try:
if self._state == _DO_HANDSHAKE:
# Call do_handshake() until it doesn't raise anymore.
self._sslobj.do_handshake()
self._state = _WRAPPED
if self._handshake_cb:
self._handshake_cb(None)
if only_handshake:
return (ssldata, appdata)
# Handshake done: execute the wrapped block
if self._state == _WRAPPED:
# Main state: read data from SSL until close_notify
while True:
chunk = self._sslobj.read(self.max_size)
appdata.append(chunk)
if not chunk: # close_notify
break
elif self._state == _SHUTDOWN:
# Call shutdown() until it doesn't raise anymore.
self._sslobj.unwrap()
self._sslobj = None
self._state = _UNWRAPPED
if self._shutdown_cb:
self._shutdown_cb()
elif self._state == _UNWRAPPED:
# Drain possible plaintext data after close_notify.
appdata.append(self._incoming.read())
except (ssl.SSLError, ssl.CertificateError) as exc:
if getattr(exc, 'errno', None) not in (
ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE,
ssl.SSL_ERROR_SYSCALL):
if self._state == _DO_HANDSHAKE and self._handshake_cb:
self._handshake_cb(exc)
raise
self._need_ssldata = (exc.errno == ssl.SSL_ERROR_WANT_READ)
# Check for record level data that needs to be sent back.
# Happens for the initial handshake and renegotiations.
if self._outgoing.pending:
ssldata.append(self._outgoing.read())
return (ssldata, appdata)
def feed_appdata(self, data, offset=0):
"""Feed plaintext data into the pipe.
Return an (ssldata, offset) tuple. The ssldata element is a list of
buffers containing record level data that needs to be sent to the
remote SSL instance. The offset is the number of plaintext bytes that
were processed, which may be less than the length of data.
NOTE: In case of short writes, this call MUST be retried with the SAME
buffer passed into the *data* argument (i.e. the id() must be the
same). This is an OpenSSL requirement. A further particularity is that
a short write will always have offset == 0, because the _ssl module
does not enable partial writes. And even though the offset is zero,
there will still be encrypted data in ssldata.
"""
assert 0 <= offset <= len(data)
if self._state == _UNWRAPPED:
# pass through data in unwrapped mode
if offset < len(data):
ssldata = [data[offset:]]
else:
ssldata = []
return (ssldata, len(data))
ssldata = []
view = memoryview(data)
while True:
self._need_ssldata = False
try:
if offset < len(view):
offset += self._sslobj.write(view[offset:])
except ssl.SSLError as exc:
# It is not allowed to call write() after unwrap() until the
# close_notify is acknowledged. We return the condition to the
# caller as a short write.
if exc.reason == 'PROTOCOL_IS_SHUTDOWN':
exc.errno = ssl.SSL_ERROR_WANT_READ
if exc.errno not in (ssl.SSL_ERROR_WANT_READ,
ssl.SSL_ERROR_WANT_WRITE,
ssl.SSL_ERROR_SYSCALL):
raise
self._need_ssldata = (exc.errno == ssl.SSL_ERROR_WANT_READ)
# See if there's any record level data back for us.
if self._outgoing.pending:
ssldata.append(self._outgoing.read())
if offset == len(view) or self._need_ssldata:
break
return (ssldata, offset)
class _SSLProtocolTransport(transports._FlowControlMixin,
transports.Transport):
def __init__(self, loop, ssl_protocol, app_protocol):
self._loop = loop
# SSLProtocol instance
self._ssl_protocol = ssl_protocol
self._app_protocol = app_protocol
self._closed = False
def get_extra_info(self, name, default=None):
"""Get optional transport information."""
return self._ssl_protocol._get_extra_info(name, default)
def set_protocol(self, protocol):
self._app_protocol = protocol
def get_protocol(self):
return self._app_protocol
def is_closing(self):
return self._closed
def close(self):
"""Close the transport.
Buffered data will be flushed asynchronously. No more data
will be received. After all buffered data is flushed, the
protocol's connection_lost() method will (eventually) called
with None as its argument.
"""
self._closed = True
self._ssl_protocol._start_shutdown()
# On Python 3.3 and older, objects with a destructor part of a reference
# cycle are never destroyed. It's not more the case on Python 3.4 thanks
# to the PEP 442.
if compat.PY34:
def __del__(self):
if not self._closed:
warnings.warn("unclosed transport %r" % self, ResourceWarning,
source=self)
self.close()
def pause_reading(self):
"""Pause the receiving end.
No data will be passed to the protocol's data_received()
method until resume_reading() is called.
"""
self._ssl_protocol._transport.pause_reading()
def resume_reading(self):
"""Resume the receiving end.
Data received will once again be passed to the protocol's
data_received() method.
"""
self._ssl_protocol._transport.resume_reading()
def set_write_buffer_limits(self, high=None, low=None):
"""Set the high- and low-water limits for write flow control.
These two values control when to call the protocol's
pause_writing() and resume_writing() methods. If specified,
the low-water limit must be less than or equal to the
high-water limit. Neither value can be negative.
The defaults are implementation-specific. If only the
high-water limit is given, the low-water limit defaults to an
implementation-specific value less than or equal to the
high-water limit. Setting high to zero forces low to zero as
well, and causes pause_writing() to be called whenever the
buffer becomes non-empty. Setting low to zero causes
resume_writing() to be called only once the buffer is empty.
Use of zero for either limit is generally sub-optimal as it
reduces opportunities for doing I/O and computation
concurrently.
"""
self._ssl_protocol._transport.set_write_buffer_limits(high, low)
def get_write_buffer_size(self):
"""Return the current size of the write buffer."""
return self._ssl_protocol._transport.get_write_buffer_size()
def write(self, data):
"""Write some data bytes to the transport.
This does not block; it buffers the data and arranges for it
to be sent out asynchronously.
"""
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError("data: expecting a bytes-like instance, got {!r}"
.format(type(data).__name__))
if not data:
return
self._ssl_protocol._write_appdata(data)
def can_write_eof(self):
"""Return True if this transport supports write_eof(), False if not."""
return False
def abort(self):
"""Close the transport immediately.
Buffered data will be lost. No more data will be received.
The protocol's connection_lost() method will (eventually) be
called with None as its argument.
"""
self._ssl_protocol._abort()
class SSLProtocol(protocols.Protocol):
"""SSL protocol.
Implementation of SSL on top of a socket using incoming and outgoing
buffers which are ssl.MemoryBIO objects.
"""
def __init__(self, loop, app_protocol, sslcontext, waiter,
server_side=False, server_hostname=None):
if ssl is None:
raise RuntimeError('stdlib ssl module not available')
if not sslcontext:
sslcontext = _create_transport_context(server_side, server_hostname)
self._server_side = server_side
if server_hostname and not server_side:
self._server_hostname = server_hostname
else:
self._server_hostname = None
self._sslcontext = sslcontext
# SSL-specific extra info. More info are set when the handshake
# completes.
self._extra = dict(sslcontext=sslcontext)
# App data write buffering
self._write_backlog = collections.deque()
self._write_buffer_size = 0
self._waiter = waiter
self._loop = loop
self._app_protocol = app_protocol
self._app_transport = _SSLProtocolTransport(self._loop,
self, self._app_protocol)
# _SSLPipe instance (None until the connection is made)
self._sslpipe = None
self._session_established = False
self._in_handshake = False
self._in_shutdown = False
# transport, ex: SelectorSocketTransport
self._transport = None
def _wakeup_waiter(self, exc=None):
if self._waiter is None:
return
if not self._waiter.cancelled():
if exc is not None:
self._waiter.set_exception(exc)
else:
self._waiter.set_result(None)
self._waiter = None
def connection_made(self, transport):
"""Called when the low-level connection is made.
Start the SSL handshake.
"""
self._transport = transport
self._sslpipe = _SSLPipe(self._sslcontext,
self._server_side,
self._server_hostname)
self._start_handshake()
def connection_lost(self, exc):
"""Called when the low-level connection is lost or closed.
The argument is an exception object or None (the latter
meaning a regular EOF is received or the connection was
aborted or closed).
"""
if self._session_established:
self._session_established = False
self._loop.call_soon(self._app_protocol.connection_lost, exc)
self._transport = None
self._app_transport = None
def pause_writing(self):
"""Called when the low-level transport's buffer goes over
the high-water mark.
"""
self._app_protocol.pause_writing()
def resume_writing(self):
"""Called when the low-level transport's buffer drains below
the low-water mark.
"""
self._app_protocol.resume_writing()
def data_received(self, data):
"""Called when some SSL data is received.
The argument is a bytes object.
"""
try:
ssldata, appdata = self._sslpipe.feed_ssldata(data)
except ssl.SSLError as e:
if self._loop.get_debug():
logger.warning('%r: SSL error %s (reason %s)',
self, e.errno, e.reason)
self._abort()
return
for chunk in ssldata:
self._transport.write(chunk)
for chunk in appdata:
if chunk:
self._app_protocol.data_received(chunk)
else:
self._start_shutdown()
break
def eof_received(self):
"""Called when the other end of the low-level stream
is half-closed.
If this returns a false value (including None), the transport
will close itself. If it returns a true value, closing the
transport is up to the protocol.
"""
try:
if self._loop.get_debug():
logger.debug("%r received EOF", self)
self._wakeup_waiter(ConnectionResetError)
if not self._in_handshake:
keep_open = self._app_protocol.eof_received()
if keep_open:
logger.warning('returning true from eof_received() '
'has no effect when using ssl')
finally:
self._transport.close()
def _get_extra_info(self, name, default=None):
if name in self._extra:
return self._extra[name]
else:
return self._transport.get_extra_info(name, default)
def _start_shutdown(self):
if self._in_shutdown:
return
self._in_shutdown = True
self._write_appdata(b'')
def _write_appdata(self, data):
self._write_backlog.append((data, 0))
self._write_buffer_size += len(data)
self._process_write_backlog()
def _start_handshake(self):
if self._loop.get_debug():
logger.debug("%r starts SSL handshake", self)
self._handshake_start_time = self._loop.time()
else:
self._handshake_start_time = None
self._in_handshake = True
# (b'', 1) is a special value in _process_write_backlog() to do
# the SSL handshake
self._write_backlog.append((b'', 1))
self._loop.call_soon(self._process_write_backlog)
def _on_handshake_complete(self, handshake_exc):
self._in_handshake = False
sslobj = self._sslpipe.ssl_object
try:
if handshake_exc is not None:
raise handshake_exc
peercert = sslobj.getpeercert()
if not hasattr(self._sslcontext, 'check_hostname'):
# Verify hostname if requested, Python 3.4+ uses check_hostname
# and checks the hostname in do_handshake()
if (self._server_hostname
and self._sslcontext.verify_mode != ssl.CERT_NONE):
ssl.match_hostname(peercert, self._server_hostname)
except BaseException as exc:
if self._loop.get_debug():
if isinstance(exc, ssl.CertificateError):
logger.warning("%r: SSL handshake failed "
"on verifying the certificate",
self, exc_info=True)
else:
logger.warning("%r: SSL handshake failed",
self, exc_info=True)
self._transport.close()
if isinstance(exc, Exception):
self._wakeup_waiter(exc)
return
else:
raise
if self._loop.get_debug():
dt = self._loop.time() - self._handshake_start_time
logger.debug("%r: SSL handshake took %.1f ms", self, dt * 1e3)
# Add extra info that becomes available after handshake.
self._extra.update(peercert=peercert,
cipher=sslobj.cipher(),
compression=sslobj.compression(),
ssl_object=sslobj,
)
self._app_protocol.connection_made(self._app_transport)
self._wakeup_waiter()
self._session_established = True
# In case transport.write() was already called. Don't call
# immediately _process_write_backlog(), but schedule it:
# _on_handshake_complete() can be called indirectly from
# _process_write_backlog(), and _process_write_backlog() is not
# reentrant.
self._loop.call_soon(self._process_write_backlog)
def _process_write_backlog(self):
# Try to make progress on the write backlog.
if self._transport is None:
return
try:
for i in range(len(self._write_backlog)):
data, offset = self._write_backlog[0]
if data:
ssldata, offset = self._sslpipe.feed_appdata(data, offset)
elif offset:
ssldata = self._sslpipe.do_handshake(
self._on_handshake_complete)
offset = 1
else:
ssldata = self._sslpipe.shutdown(self._finalize)
offset = 1
for chunk in ssldata:
self._transport.write(chunk)
if offset < len(data):
self._write_backlog[0] = (data, offset)
# A short write means that a write is blocked on a read
# We need to enable reading if it is paused!
assert self._sslpipe.need_ssldata
if self._transport._paused:
self._transport.resume_reading()
break
# An entire chunk from the backlog was processed. We can
# delete it and reduce the outstanding buffer size.
del self._write_backlog[0]
self._write_buffer_size -= len(data)
except BaseException as exc:
if self._in_handshake:
# BaseExceptions will be re-raised in _on_handshake_complete.
self._on_handshake_complete(exc)
else:
self._fatal_error(exc, 'Fatal error on SSL transport')
if not isinstance(exc, Exception):
# BaseException
raise
def _fatal_error(self, exc, message='Fatal error on transport'):
# Should be called from exception handler only.
if isinstance(exc, base_events._FATAL_ERROR_IGNORE):
if self._loop.get_debug():
logger.debug("%r: %s", self, message, exc_info=True)
else:
self._loop.call_exception_handler({
'message': message,
'exception': exc,
'transport': self._transport,
'protocol': self,
})
if self._transport:
self._transport._force_close(exc)
def _finalize(self):
if self._transport is not None:
self._transport.close()
def _abort(self):
if self._transport is not None:
try:
self._transport.abort()
finally:
self._finalize()
|
gregn610/workalendar
|
refs/heads/master
|
workalendar/europe/portugal.py
|
1
|
# -*- coding: utf-8 -*-
from datetime import timedelta, date
from workalendar.core import WesternCalendar, ChristianMixin
class Portugal(WesternCalendar, ChristianMixin):
"Portugal"
include_good_friday = True
include_easter_sunday = True
FIXED_HOLIDAYS = WesternCalendar.FIXED_HOLIDAYS + (
(4, 25, "Dia da Liberdade"),
(5, 1, "Dia do Trabalhador"),
(6, 10, "Dia de Portugal"),
(8, 15, "Assunção de Nossa Senhora"),
(12, 8, "Imaculada Conceição"),
)
def get_variable_entrudo(self, year):
easter_sunday = self.get_easter_sunday(year)
return easter_sunday - timedelta(days=47)
def get_variable_days(self, year):
days = super(Portugal, self).get_variable_days(year)
days.append((self.get_variable_entrudo(year), "Entrudo"))
if year < 2013 or year > 2015:
days.append((date(year, 10, 5), "Republic day"))
days.append((self.get_corpus_christi(year), "Corpus Christi"))
days.append((date(year, 11, 1), "All Saints Day"))
days.append((date(year, 12, 1), "Restoration of Independence"))
return days
|
ikalnytskyi/sphinxcontrib-redoc
|
refs/heads/master
|
docs/conf.py
|
1
|
import os
import pkg_resources
project = 'sphinxcontrib-redoc'
copyright = '2017, Ihor Kalnytskyi'
release = pkg_resources.get_distribution('sphinxcontrib-redoc').version
version = '.'.join(release.split('.')[:2])
extensions = ['sphinx.ext.extlinks', 'sphinxcontrib.redoc']
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
extlinks = {
'issue': ('https://github.com/ikalnytskyi/sphinxcontrib-redoc/issues/%s', '#'),
'pr': ('https://github.com/ikalnytskyi/sphinxcontrib-redoc/pull/%s', 'PR #'),
}
redoc = [
{
'name': 'Github API (v3)',
'page': 'api/github/index',
'spec': '_specs/github.yml',
'opts': {
'lazy-rendering': True
},
},
]
if not os.environ.get('READTHEDOCS') == 'True':
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Unfortunately, Sphinx doesn't support code highlighting for standard
# reStructuredText `code` directive. So let's register 'code' directive
# as alias for Sphinx's own implementation.
#
# https://github.com/sphinx-doc/sphinx/issues/2155
from docutils.parsers.rst import directives
from sphinx.directives.code import CodeBlock
directives.register_directive('code', CodeBlock)
# flake8: noqa
|
Sodki/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/panos/panos_admin.py
|
78
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Ansible module to manage PaloAltoNetworks Firewall
# (c) 2016, techbizdev <techbizdev@paloaltonetworks.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: panos_admin
short_description: Add or modify PAN-OS user accounts password.
description:
- PanOS module that allows changes to the user account passwords by doing
API calls to the Firewall using pan-api as the protocol.
author: "Luigi Mori (@jtschichold), Ivan Bojer (@ivanbojer)"
version_added: "2.3"
requirements:
- pan-python
options:
ip_address:
description:
- IP address (or hostname) of PAN-OS device
required: true
password:
description:
- password for authentication
required: true
username:
description:
- username for authentication
required: false
default: "admin"
admin_username:
description:
- username for admin user
required: false
default: "admin"
admin_password:
description:
- password for admin user
required: true
role:
description:
- role for admin user
required: false
default: null
commit:
description:
- commit if changed
required: false
default: true
'''
EXAMPLES = '''
# Set the password of user admin to "badpassword"
# Doesn't commit the candidate config
- name: set admin password
panos_admin:
ip_address: "192.168.1.1"
password: "admin"
admin_username: admin
admin_password: "badpassword"
commit: False
'''
RETURN = '''
status:
description: success status
returned: success
type: string
sample: "okey dokey"
'''
from ansible.module_utils.basic import AnsibleModule
try:
import pan.xapi
HAS_LIB = True
except ImportError:
HAS_LIB = False
_ADMIN_XPATH = "/config/mgt-config/users/entry[@name='%s']"
def admin_exists(xapi, admin_username):
xapi.get(_ADMIN_XPATH % admin_username)
e = xapi.element_root.find('.//entry')
return e
def admin_set(xapi, module, admin_username, admin_password, role):
if admin_password is not None:
xapi.op(cmd='request password-hash password "%s"' % admin_password,
cmd_xml=True)
r = xapi.element_root
phash = r.find('.//phash').text
if role is not None:
rbval = "yes"
if role != "superuser" and role != 'superreader':
rbval = ""
ea = admin_exists(xapi, admin_username)
if ea is not None:
# user exists
changed = False
if role is not None:
rb = ea.find('.//role-based')
if rb is not None:
if rb[0].tag != role:
changed = True
xpath = _ADMIN_XPATH % admin_username
xpath += '/permissions/role-based/%s' % rb[0].tag
xapi.delete(xpath=xpath)
xpath = _ADMIN_XPATH % admin_username
xpath += '/permissions/role-based'
xapi.set(xpath=xpath,
element='<%s>%s</%s>' % (role, rbval, role))
if admin_password is not None:
xapi.edit(xpath=_ADMIN_XPATH % admin_username+'/phash',
element='<phash>%s</phash>' % phash)
changed = True
return changed
# setup the non encrypted part of the monitor
exml = []
exml.append('<phash>%s</phash>' % phash)
exml.append('<permissions><role-based><%s>%s</%s>'
'</role-based></permissions>' % (role, rbval, role))
exml = ''.join(exml)
# module.fail_json(msg=exml)
xapi.set(xpath=_ADMIN_XPATH % admin_username, element=exml)
return True
def main():
argument_spec = dict(
ip_address=dict(),
password=dict(no_log=True),
username=dict(default='admin'),
admin_username=dict(default='admin'),
admin_password=dict(no_log=True),
role=dict(),
commit=dict(type='bool', default=True)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_LIB:
module.fail_json(msg='pan-python required for this module')
ip_address = module.params["ip_address"]
if not ip_address:
module.fail_json(msg="ip_address should be specified")
password = module.params["password"]
if not password:
module.fail_json(msg="password is required")
username = module.params['username']
xapi = pan.xapi.PanXapi(
hostname=ip_address,
api_username=username,
api_password=password
)
admin_username = module.params['admin_username']
if admin_username is None:
module.fail_json(msg="admin_username is required")
admin_password = module.params['admin_password']
role = module.params['role']
commit = module.params['commit']
changed = admin_set(xapi, module, admin_username, admin_password, role)
if changed and commit:
xapi.commit(cmd="<commit></commit>", sync=True, interval=1)
module.exit_json(changed=changed, msg="okey dokey")
if __name__ == '__main__':
main()
|
chengdh/openerp-ktv
|
refs/heads/master
|
openerp/addons/l10n_be/__init__.py
|
8
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import company
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
patilsangram/erpnext
|
refs/heads/develop
|
erpnext/hr/doctype/leave_block_list/leave_block_list_dashboard.py
|
5
|
def get_data():
return {
'fieldname': 'leave_block_list',
'transactions': [
{
'items': ['Department']
}
]
}
|
siosio/intellij-community
|
refs/heads/master
|
python/testData/intentions/convertVariadicParamNoUsages.py
|
21
|
def foo(**kwargs<caret>):
print("ok")
|
patilsangram/erpnext
|
refs/heads/develop
|
erpnext/accounts/report/tds_computation_summary/__init__.py
|
12133432
| |
maximinus/mpquiz
|
refs/heads/master
|
mpquiz/singleton/migrations/__init__.py
|
12133432
| |
madkinder/distcc
|
refs/heads/master
|
include_server/__init__.py
|
12133432
| |
joequery/django
|
refs/heads/master
|
django/conf/locale/hu/__init__.py
|
12133432
| |
MiltosD/CEF-ELRC
|
refs/heads/master
|
lib/python2.7/site-packages/unidecode/x07c.py
|
252
|
data = (
'Ze ', # 0x00
'Xi ', # 0x01
'Guo ', # 0x02
'Yi ', # 0x03
'Hu ', # 0x04
'Chan ', # 0x05
'Kou ', # 0x06
'Cu ', # 0x07
'Ping ', # 0x08
'Chou ', # 0x09
'Ji ', # 0x0a
'Gui ', # 0x0b
'Su ', # 0x0c
'Lou ', # 0x0d
'Zha ', # 0x0e
'Lu ', # 0x0f
'Nian ', # 0x10
'Suo ', # 0x11
'Cuan ', # 0x12
'Sasara ', # 0x13
'Suo ', # 0x14
'Le ', # 0x15
'Duan ', # 0x16
'Yana ', # 0x17
'Xiao ', # 0x18
'Bo ', # 0x19
'Mi ', # 0x1a
'Si ', # 0x1b
'Dang ', # 0x1c
'Liao ', # 0x1d
'Dan ', # 0x1e
'Dian ', # 0x1f
'Fu ', # 0x20
'Jian ', # 0x21
'Min ', # 0x22
'Kui ', # 0x23
'Dai ', # 0x24
'Qiao ', # 0x25
'Deng ', # 0x26
'Huang ', # 0x27
'Sun ', # 0x28
'Lao ', # 0x29
'Zan ', # 0x2a
'Xiao ', # 0x2b
'Du ', # 0x2c
'Shi ', # 0x2d
'Zan ', # 0x2e
'[?] ', # 0x2f
'Pai ', # 0x30
'Hata ', # 0x31
'Pai ', # 0x32
'Gan ', # 0x33
'Ju ', # 0x34
'Du ', # 0x35
'Lu ', # 0x36
'Yan ', # 0x37
'Bo ', # 0x38
'Dang ', # 0x39
'Sai ', # 0x3a
'Ke ', # 0x3b
'Long ', # 0x3c
'Qian ', # 0x3d
'Lian ', # 0x3e
'Bo ', # 0x3f
'Zhou ', # 0x40
'Lai ', # 0x41
'[?] ', # 0x42
'Lan ', # 0x43
'Kui ', # 0x44
'Yu ', # 0x45
'Yue ', # 0x46
'Hao ', # 0x47
'Zhen ', # 0x48
'Tai ', # 0x49
'Ti ', # 0x4a
'Mi ', # 0x4b
'Chou ', # 0x4c
'Ji ', # 0x4d
'[?] ', # 0x4e
'Hata ', # 0x4f
'Teng ', # 0x50
'Zhuan ', # 0x51
'Zhou ', # 0x52
'Fan ', # 0x53
'Sou ', # 0x54
'Zhou ', # 0x55
'Kuji ', # 0x56
'Zhuo ', # 0x57
'Teng ', # 0x58
'Lu ', # 0x59
'Lu ', # 0x5a
'Jian ', # 0x5b
'Tuo ', # 0x5c
'Ying ', # 0x5d
'Yu ', # 0x5e
'Lai ', # 0x5f
'Long ', # 0x60
'Shinshi ', # 0x61
'Lian ', # 0x62
'Lan ', # 0x63
'Qian ', # 0x64
'Yue ', # 0x65
'Zhong ', # 0x66
'Qu ', # 0x67
'Lian ', # 0x68
'Bian ', # 0x69
'Duan ', # 0x6a
'Zuan ', # 0x6b
'Li ', # 0x6c
'Si ', # 0x6d
'Luo ', # 0x6e
'Ying ', # 0x6f
'Yue ', # 0x70
'Zhuo ', # 0x71
'Xu ', # 0x72
'Mi ', # 0x73
'Di ', # 0x74
'Fan ', # 0x75
'Shen ', # 0x76
'Zhe ', # 0x77
'Shen ', # 0x78
'Nu ', # 0x79
'Xie ', # 0x7a
'Lei ', # 0x7b
'Xian ', # 0x7c
'Zi ', # 0x7d
'Ni ', # 0x7e
'Cun ', # 0x7f
'[?] ', # 0x80
'Qian ', # 0x81
'Kume ', # 0x82
'Bi ', # 0x83
'Ban ', # 0x84
'Wu ', # 0x85
'Sha ', # 0x86
'Kang ', # 0x87
'Rou ', # 0x88
'Fen ', # 0x89
'Bi ', # 0x8a
'Cui ', # 0x8b
'[?] ', # 0x8c
'Li ', # 0x8d
'Chi ', # 0x8e
'Nukamiso ', # 0x8f
'Ro ', # 0x90
'Ba ', # 0x91
'Li ', # 0x92
'Gan ', # 0x93
'Ju ', # 0x94
'Po ', # 0x95
'Mo ', # 0x96
'Cu ', # 0x97
'Nian ', # 0x98
'Zhou ', # 0x99
'Li ', # 0x9a
'Su ', # 0x9b
'Tiao ', # 0x9c
'Li ', # 0x9d
'Qi ', # 0x9e
'Su ', # 0x9f
'Hong ', # 0xa0
'Tong ', # 0xa1
'Zi ', # 0xa2
'Ce ', # 0xa3
'Yue ', # 0xa4
'Zhou ', # 0xa5
'Lin ', # 0xa6
'Zhuang ', # 0xa7
'Bai ', # 0xa8
'[?] ', # 0xa9
'Fen ', # 0xaa
'Ji ', # 0xab
'[?] ', # 0xac
'Sukumo ', # 0xad
'Liang ', # 0xae
'Xian ', # 0xaf
'Fu ', # 0xb0
'Liang ', # 0xb1
'Can ', # 0xb2
'Geng ', # 0xb3
'Li ', # 0xb4
'Yue ', # 0xb5
'Lu ', # 0xb6
'Ju ', # 0xb7
'Qi ', # 0xb8
'Cui ', # 0xb9
'Bai ', # 0xba
'Zhang ', # 0xbb
'Lin ', # 0xbc
'Zong ', # 0xbd
'Jing ', # 0xbe
'Guo ', # 0xbf
'Kouji ', # 0xc0
'San ', # 0xc1
'San ', # 0xc2
'Tang ', # 0xc3
'Bian ', # 0xc4
'Rou ', # 0xc5
'Mian ', # 0xc6
'Hou ', # 0xc7
'Xu ', # 0xc8
'Zong ', # 0xc9
'Hu ', # 0xca
'Jian ', # 0xcb
'Zan ', # 0xcc
'Ci ', # 0xcd
'Li ', # 0xce
'Xie ', # 0xcf
'Fu ', # 0xd0
'Ni ', # 0xd1
'Bei ', # 0xd2
'Gu ', # 0xd3
'Xiu ', # 0xd4
'Gao ', # 0xd5
'Tang ', # 0xd6
'Qiu ', # 0xd7
'Sukumo ', # 0xd8
'Cao ', # 0xd9
'Zhuang ', # 0xda
'Tang ', # 0xdb
'Mi ', # 0xdc
'San ', # 0xdd
'Fen ', # 0xde
'Zao ', # 0xdf
'Kang ', # 0xe0
'Jiang ', # 0xe1
'Mo ', # 0xe2
'San ', # 0xe3
'San ', # 0xe4
'Nuo ', # 0xe5
'Xi ', # 0xe6
'Liang ', # 0xe7
'Jiang ', # 0xe8
'Kuai ', # 0xe9
'Bo ', # 0xea
'Huan ', # 0xeb
'[?] ', # 0xec
'Zong ', # 0xed
'Xian ', # 0xee
'Nuo ', # 0xef
'Tuan ', # 0xf0
'Nie ', # 0xf1
'Li ', # 0xf2
'Zuo ', # 0xf3
'Di ', # 0xf4
'Nie ', # 0xf5
'Tiao ', # 0xf6
'Lan ', # 0xf7
'Mi ', # 0xf8
'Jiao ', # 0xf9
'Jiu ', # 0xfa
'Xi ', # 0xfb
'Gong ', # 0xfc
'Zheng ', # 0xfd
'Jiu ', # 0xfe
'You ', # 0xff
)
|
ujvl/ray-ng
|
refs/heads/master
|
rllib/examples/serving/cartpole_server.py
|
2
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
"""Example of running a policy server. Copy this file for your use case.
To try this out, in two separate shells run:
$ python cartpole_server.py
$ python cartpole_client.py
"""
import os
from gym import spaces
import numpy as np
import ray
from ray.rllib.agents.dqn import DQNTrainer
from ray.rllib.env.external_env import ExternalEnv
from ray.rllib.utils.policy_server import PolicyServer
from ray.tune.logger import pretty_print
from ray.tune.registry import register_env
SERVER_ADDRESS = "localhost"
SERVER_PORT = 9900
CHECKPOINT_FILE = "last_checkpoint.out"
class CartpoleServing(ExternalEnv):
def __init__(self):
ExternalEnv.__init__(
self, spaces.Discrete(2),
spaces.Box(low=-10, high=10, shape=(4, ), dtype=np.float32))
def run(self):
print("Starting policy server at {}:{}".format(SERVER_ADDRESS,
SERVER_PORT))
server = PolicyServer(self, SERVER_ADDRESS, SERVER_PORT)
server.serve_forever()
if __name__ == "__main__":
ray.init()
register_env("srv", lambda _: CartpoleServing())
# We use DQN since it supports off-policy actions, but you can choose and
# configure any agent.
dqn = DQNTrainer(
env="srv",
config={
# Use a single process to avoid needing to set up a load balancer
"num_workers": 0,
# Configure the agent to run short iterations for debugging
"exploration_fraction": 0.01,
"learning_starts": 100,
"timesteps_per_iteration": 200,
})
# Attempt to restore from checkpoint if possible.
if os.path.exists(CHECKPOINT_FILE):
checkpoint_path = open(CHECKPOINT_FILE).read()
print("Restoring from checkpoint path", checkpoint_path)
dqn.restore(checkpoint_path)
# Serving and training loop
while True:
print(pretty_print(dqn.train()))
checkpoint_path = dqn.save()
print("Last checkpoint", checkpoint_path)
with open(CHECKPOINT_FILE, "w") as f:
f.write(checkpoint_path)
|
notnola/pyalienfx
|
refs/heads/master
|
usb/__init__.py
|
10
|
# Copyright (C) 2009-2011 Wander Lairson Costa
#
# The following terms apply to all files associated
# with the software unless explicitly disclaimed in individual files.
#
# The authors hereby grant permission to use, copy, modify, distribute,
# and license this software and its documentation for any purpose, provided
# that existing copyright notices are retained in all copies and that this
# notice is included verbatim in any distributions. No written agreement,
# license, or royalty fee is required for any of the authorized uses.
# Modifications to this software may be copyrighted by their authors
# and need not follow the licensing terms described here, provided that
# the new terms are clearly indicated on the first page of each file where
# they apply.
#
# IN NO EVENT SHALL THE AUTHORS OR DISTRIBUTORS BE LIABLE TO ANY PARTY
# FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
# ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY
# DERIVATIVES THEREOF, EVEN IF THE AUTHORS HAVE BEEN ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# THE AUTHORS AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. THIS SOFTWARE
# IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE
# NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR
# MODIFICATIONS.
r"""PyUSB - Easy USB access in Python
This package exports the following modules and subpackages:
core - the main USB implementation
legacy - the compatibility layer with 0.x version
backend - the support for backend implementations.
Since version 1.0, main PyUSB implementation lives in the 'usb.core'
module. New applications are encouraged to use it.
"""
import logging
import os
__author__ = 'Wander Lairson Costa'
__all__ = ['legacy', 'core', 'backend', 'util']
def _setup_log():
logger = logging.getLogger('usb')
debug_level = os.getenv('PYUSB_DEBUG_LEVEL')
if debug_level is not None:
filename = os.getenv('PYUSB_LOG_FILENAME')
LEVELS = {'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL}
level = LEVELS.get(debug_level, logging.CRITICAL + 10)
logger.setLevel(level = level)
try:
handler = logging.FileHandler(filename)
except:
handler = logging.StreamHandler()
fmt = logging.Formatter('%(asctime)s %(levelname)s:%(name)s:%(message)s')
handler.setFormatter(fmt)
logger.addHandler(handler)
else:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logger.addHandler(NullHandler())
_setup_log()
# We import all 'legacy' module symbols to provide compatility
# with applications that use 0.x versions.
from usb.legacy import *
|
maxim-kht/django-rest-auth
|
refs/heads/master
|
rest_auth/tests/urls.py
|
4
|
from django.conf.urls import url, include
from django.views.generic import TemplateView
from . import django_urls
from allauth.socialaccount.providers.facebook.views import FacebookOAuth2Adapter
from allauth.socialaccount.providers.twitter.views import TwitterOAuthAdapter
from rest_framework.decorators import api_view
from rest_auth.urls import urlpatterns
from rest_auth.registration.views import SocialLoginView
from rest_auth.social_serializers import TwitterLoginSerializer
class FacebookLogin(SocialLoginView):
adapter_class = FacebookOAuth2Adapter
class TwitterLogin(SocialLoginView):
adapter_class = TwitterOAuthAdapter
serializer_class = TwitterLoginSerializer
class TwitterLoginSerializerFoo(TwitterLoginSerializer):
pass
@api_view(['POST'])
def twitter_login_view(request):
serializer = TwitterLoginSerializerFoo(
data={'access_token': '11223344', 'token_secret': '55667788'},
context={'request': request}
)
serializer.is_valid(raise_exception=True)
class TwitterLoginNoAdapter(SocialLoginView):
serializer_class = TwitterLoginSerializer
urlpatterns += [
url(r'^rest-registration/', include('rest_auth.registration.urls')),
url(r'^test-admin/', include(django_urls)),
url(r'^account-email-verification-sent/$', TemplateView.as_view(),
name='account_email_verification_sent'),
url(r'^account-confirm-email/(?P<key>[-:\w]+)/$', TemplateView.as_view(),
name='account_confirm_email'),
url(r'^social-login/facebook/$', FacebookLogin.as_view(), name='fb_login'),
url(r'^social-login/twitter/$', TwitterLogin.as_view(), name='tw_login'),
url(r'^social-login/twitter-no-view/$', twitter_login_view, name='tw_login_no_view'),
url(r'^social-login/twitter-no-adapter/$', TwitterLoginNoAdapter.as_view(), name='tw_login_no_adapter'),
url(r'^accounts/', include('allauth.socialaccount.urls'))
]
|
dannyperry571/theapprentice
|
refs/heads/master
|
script.module.youtube.dl/lib/youtube_dl/extractor/playtvak.py
|
36
|
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import (
compat_urlparse,
compat_urllib_parse_urlencode,
)
from ..utils import (
ExtractorError,
int_or_none,
parse_iso8601,
qualities,
)
class PlaytvakIE(InfoExtractor):
IE_DESC = 'Playtvak.cz, iDNES.cz and Lidovky.cz'
_VALID_URL = r'https?://(?:.+?\.)?(?:playtvak|idnes|lidovky|metro)\.cz/.*\?(?:c|idvideo)=(?P<id>[^&]+)'
_TESTS = [{
'url': 'http://www.playtvak.cz/vyzente-vosy-a-srsne-ze-zahrady-dn5-/hodinovy-manzel.aspx?c=A150730_150323_hodinovy-manzel_kuko',
'md5': '4525ae312c324b4be2f4603cc78ceb4a',
'info_dict': {
'id': 'A150730_150323_hodinovy-manzel_kuko',
'ext': 'mp4',
'title': 'Vyžeňte vosy a sršně ze zahrady',
'description': 'md5:f93d398691044d303bc4a3de62f3e976',
'thumbnail': r're:(?i)^https?://.*\.(?:jpg|png)$',
'duration': 279,
'timestamp': 1438732860,
'upload_date': '20150805',
'is_live': False,
}
}, { # live video test
'url': 'http://slowtv.playtvak.cz/planespotting-0pr-/planespotting.aspx?c=A150624_164934_planespotting_cat',
'info_dict': {
'id': 'A150624_164934_planespotting_cat',
'ext': 'flv',
'title': 're:^Přímý přenos iDNES.cz [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'description': 'Sledujte provoz na ranveji Letiště Václava Havla v Praze',
'thumbnail': r're:(?i)^https?://.*\.(?:jpg|png)$',
'is_live': True,
},
'params': {
'skip_download': True, # requires rtmpdump
},
}, { # idnes.cz
'url': 'http://zpravy.idnes.cz/pes-zavreny-v-aute-rozbijeni-okynek-v-aute-fj5-/domaci.aspx?c=A150809_104116_domaci_pku',
'md5': '819832ba33cd7016e58a6658577fe289',
'info_dict': {
'id': 'A150809_104116_domaci_pku',
'ext': 'mp4',
'title': 'Zavřeli jsme mraženou pizzu do auta. Upekla se',
'description': 'md5:01e73f02329e2e5760bd5eed4d42e3c2',
'thumbnail': r're:(?i)^https?://.*\.(?:jpg|png)$',
'duration': 39,
'timestamp': 1438969140,
'upload_date': '20150807',
'is_live': False,
}
}, { # lidovky.cz
'url': 'http://www.lidovky.cz/dalsi-demonstrace-v-praze-o-migraci-duq-/video.aspx?c=A150808_214044_ln-video_ELE',
'md5': 'c7209ac4ba9d234d4ad5bab7485bcee8',
'info_dict': {
'id': 'A150808_214044_ln-video_ELE',
'ext': 'mp4',
'title': 'Táhni! Demonstrace proti imigrantům budila emoce',
'description': 'md5:97c81d589a9491fbfa323c9fa3cca72c',
'thumbnail': r're:(?i)^https?://.*\.(?:jpg|png)$',
'timestamp': 1439052180,
'upload_date': '20150808',
'is_live': False,
}
}, { # metro.cz
'url': 'http://www.metro.cz/video-pod-billboardem-se-na-vltavske-roztocil-kolotoc-deti-vozil-jen-par-hodin-1hx-/metro-extra.aspx?c=A141111_173251_metro-extra_row',
'md5': '84fc1deedcac37b7d4a6ccae7c716668',
'info_dict': {
'id': 'A141111_173251_metro-extra_row',
'ext': 'mp4',
'title': 'Recesisté udělali z billboardu kolotoč',
'description': 'md5:7369926049588c3989a66c9c1a043c4c',
'thumbnail': r're:(?i)^https?://.*\.(?:jpg|png)$',
'timestamp': 1415725500,
'upload_date': '20141111',
'is_live': False,
}
}, {
'url': 'http://www.playtvak.cz/embed.aspx?idvideo=V150729_141549_play-porad_kuko',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
info_url = self._html_search_regex(
r'Misc\.videoFLV\(\s*{\s*data\s*:\s*"([^"]+)"', webpage, 'info url')
parsed_url = compat_urlparse.urlparse(info_url)
qs = compat_urlparse.parse_qs(parsed_url.query)
qs.update({
'reklama': ['0'],
'type': ['js'],
})
info_url = compat_urlparse.urlunparse(
parsed_url._replace(query=compat_urllib_parse_urlencode(qs, True)))
json_info = self._download_json(
info_url, video_id,
transform_source=lambda s: s[s.index('{'):s.rindex('}') + 1])
item = None
for i in json_info['items']:
if i.get('type') == 'video' or i.get('type') == 'stream':
item = i
break
if not item:
raise ExtractorError('No suitable stream found')
quality = qualities(('low', 'middle', 'high'))
formats = []
for fmt in item['video']:
video_url = fmt.get('file')
if not video_url:
continue
format_ = fmt['format']
format_id = '%s_%s' % (format_, fmt['quality'])
preference = None
if format_ in ('mp4', 'webm'):
ext = format_
elif format_ == 'rtmp':
ext = 'flv'
elif format_ == 'apple':
ext = 'mp4'
# Some streams have mp3 audio which does not play
# well with ffmpeg filter aac_adtstoasc
preference = -1
elif format_ == 'adobe': # f4m manifest fails with 404 in 80% of requests
continue
else: # Other formats not supported yet
continue
formats.append({
'url': video_url,
'ext': ext,
'format_id': format_id,
'quality': quality(fmt.get('quality')),
'preference': preference,
})
self._sort_formats(formats)
title = item['title']
is_live = item['type'] == 'stream'
if is_live:
title = self._live_title(title)
description = self._og_search_description(webpage, default=None) or self._html_search_meta(
'description', webpage, 'description')
timestamp = None
duration = None
if not is_live:
duration = int_or_none(item.get('length'))
timestamp = item.get('published')
if timestamp:
timestamp = parse_iso8601(timestamp[:-5])
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': item.get('image'),
'duration': duration,
'timestamp': timestamp,
'is_live': is_live,
'formats': formats,
}
|
jscott413/maidsinharlem
|
refs/heads/master
|
flask/lib/python2.7/site-packages/pip/models/index.py
|
917
|
from pip._vendor.six.moves.urllib import parse as urllib_parse
class Index(object):
def __init__(self, url):
self.url = url
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self.url_to_path('simple')
self.pypi_url = self.url_to_path('pypi')
self.pip_json_url = self.url_to_path('pypi/pip/json')
def url_to_path(self, path):
return urllib_parse.urljoin(self.url, path)
PyPI = Index('https://pypi.python.org/')
|
proximo256/kernel_samsung_exynos7420
|
refs/heads/cm-12.1
|
scripts/rt-tester/rt-tester.py
|
11005
|
#!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <tglx@linutronix.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
|
sgarrity/bedrock
|
refs/heads/master
|
tests/redirects/map_301.py
|
3
|
from .base import flatten, url_test
URLS = flatten((
# from org-urls-301.txt
url_test('/projects/firefox/build.html',
'http://developer.mozilla.org/en/Build_Documentation'),
url_test('/projects/firefox/extensions/index.html',
'http://developer.mozilla.org/en/Extensions'),
url_test('/projects/firefox/extensions/web-api.html',
'http://developer.mozilla.org/en/Installing_Extensions_and_Themes_From_Web_Pages'),
url_test('/projects/firefox/extensions/packaging/extensions.html',
'http://developer.mozilla.org/en/Extension_Packaging'),
url_test('/projects/firefox/extensions/packaging/themes.html',
'http://developer.mozilla.org/en/Theme_Packaging'),
url_test('/projects/firefox/review.html', 'https://wiki.mozilla.org/Firefox/Code_Review'),
url_test('/projects/toolkit/review.html', 'https://wiki.mozilla.org/Toolkit/Code_Review'),
url_test('/about.html', '/about/roles'),
url_test('/about/etiquette.html', '/about/forums/etiquette.html'),
url_test('/about/free.html', '/causes/free.html'),
url_test('/about/manifesto', '/about/manifesto.html'),
url_test('/about/owners.html', 'https://wiki.mozilla.org/Modules'),
url_test('/access/donate.html', 'https://donate.mozilla.org/'),
url_test('/airmozilla/?', 'http://air.mozilla.org/'),
url_test('/binaries.html', '/projects/'),
url_test('/blue-sky.html', '/blue-sky/'),
url_test('/bonsai.html', 'http://developer.mozilla.org/en/Bonsai'),
url_test('/bugs.html', '/bugs/'),
url_test('/bugs/bug-reporting.html',
'/quality/bug-writing-guidelines.html'),
url_test('/bugs/changes.html', 'http://www.bugzilla.org/status/changes.html'),
url_test('/bugs/query.html', '/quality/bug-writing-guidelines.html'),
url_test('/bugs/report.html', 'http://developer.mozilla.org/en/Bug_writing_guidelines'),
url_test('/bugs/source.html', 'http://www.bugzilla.org/'),
url_test('/bugs/text-searching.html',
'/quality/bug-writing-guidelines.html'),
url_test('/build/build-system.html',
'http://developer.mozilla.org/en/How_mozilla%27s_build_system_works'),
url_test('/build/configure-build.html',
'http://developer.mozilla.org/en/Configuring_Build_Options'),
url_test('/build/cross-compiling.html',
'http://developer.mozilla.org/en/Cross-Compiling_Mozilla'),
url_test('/build/cvs-tag.html', 'http://developer.mozilla.org/en/Creating_a_Release_Tag'),
url_test('/build/distribution.html',
'http://developer.mozilla.org/en/Building_a_Mozilla_Distribution'),
url_test('/build/faq.html', 'http://developer.mozilla.org/en/Mozilla_Build_FAQ'),
url_test('/build/', 'http://developer.mozilla.org/en/Build_Documentation'),
url_test('/build/jar-packaging.html', 'http://developer.mozilla.org/en/JAR_Packaging'),
url_test('/build/mac-build-system.html',
'http://developer.mozilla.org/en/Mac_OS_X_Build_Prerequisites'),
url_test('/build/mac.html', 'http://developer.mozilla.org/en/Mac_OS_X_Build_Prerequisites'),
url_test('/build/make-build.html', 'http://developer.mozilla.org/en/Build_and_Install'),
url_test('/build/making-additions.html',
'http://developer.mozilla.org/en/Adding_Files_to_the_Build'),
url_test('/build/release-build-notes.html',
'http://developer.mozilla.org/en/Mozilla_Release_Build_Notes'),
url_test('/build/release-checklist.html',
'http://developer.mozilla.org/en/Mozilla_Release_Checklist'),
url_test('/build/revised-user-agent-strings.html',
'http://developer.mozilla.org/en/User_Agent_Strings_Reference'),
url_test('/build/sheriff-schedule.html', 'http://wiki.mozilla.org/Sheriff_Schedule'),
url_test('/build/sheriff.html', 'http://wiki.mozilla.org/Sheriff_Duty'),
url_test('/build/sheriff/sheriff-schedule.html', 'http://wiki.mozilla.org/Sheriff_Schedule'),
url_test('/build/unix-cheatsheet.html',
'http://developer.mozilla.org/en/Linux_Cheat_Sheet_for_Mac_and_Windows_Programmers'),
url_test('/build/unix-details.html',
'http://developer.mozilla.org/En/Unix_Detailed_Build_Instructions'),
url_test('/build/unix.html', 'http://developer.mozilla.org/en/Linux_Build_Prerequisites'),
url_test('/build/win32-debugging-faq.html',
'http://developer.mozilla.org/en/Debugging_Mozilla_on_Windows_FAQ'),
url_test('/build/win32.html', 'http://developer.mozilla.org/en/Windows_Build_Prerequisites'),
url_test('/build/windbgdlg.html',
'http://developer.mozilla.org/en/'
'Automatically_Handle_Failed_Asserts_in_Debug_Builds'),
url_test('/camino', 'http://caminobrowser.org/'),
url_test('/catalog/development/compiling/',
'http://developer.mozilla.org/en/Build_Documentation'),
url_test('/catalog/development/compiling/cvs-sourcecode.html',
'/cvs.html'),
url_test('/catalog/development/tools/cvs-tarball.html',
'http://developer.mozilla.org/en/Mozilla_Source_Code_(CVS)'),
url_test('/catalog/development/website/cvs-website.html',
'/contribute/writing/cvs'),
url_test('/catalog/end-user/customizing/briefprefs.html',
'http://developer.mozilla.org/En/A_Brief_Guide_to_Mozilla_Preferences'),
url_test('/causes/access.html', '/about/mission.html'),
url_test('/causes/accessibility.html', '/causes/access.html'),
url_test('/causes/better.html', '/about/mission.html'),
url_test('/causes/education.html', '/about/mission.html'),
url_test('/causes/free.html', '/about/mission.html'),
url_test('/causes/openweb.html', '/about/mission.html'),
url_test('/causes/security.html', '/about/mission.html'),
url_test('/classic/nsprdesc.html', 'http://developer.mozilla.org/en/About_NSPR'),
url_test('/contribute/buttons/', 'https://affiliates.mozilla.org/'),
url_test('/contribute/buttons/index.html', 'https://affiliates.mozilla.org/'),
url_test('/contribute/buttons/index.{es,ko,mk,nl}.html', 'https://affiliates.mozilla.org/'),
url_test('/contribute/get-involved.html', '/contribute/'),
url_test('/contribute/writing/cvs.html',
'https://wiki.mozilla.org/Mozilla.org:How_to_Work_with_Site'),
url_test('/contribute/writing/how-to.html',
'https://developer.mozilla.org/Project:en/How_to_document_Mozilla'),
url_test('/contribute/writing/process.html',
'https://developer.mozilla.org/Project:en/Getting_started'),
url_test('/crypto-faq.html', 'http://developer.mozilla.org/en/Mozilla_Crypto_FAQ'),
url_test('/cvs-ssh-faq.html', 'http://developer.mozilla.org/en/Using_SSH_to_connect_to_CVS'),
url_test('/cvs.html', 'http://developer.mozilla.org/en/Mozilla_Source_Code_Via_CVS'),
url_test('/dejanews.gif', '/images/dejanews.gif'),
url_test('/docs.html', '/docs/'),
url_test('/docs/command-line-args.html',
'http://developer.mozilla.org/en/Command_Line_Options'),
url_test('/docs/contribute.html', '/contribute/writing/process'),
url_test('/docs/docshell/mozilla_downloads_path2.png',
'https://developer.mozilla.org/@api/deki/files/279/=Mozilla_downloads_path2.png'),
url_test('/docs/docshell/mozilla_downloads.html',
'http://developer.mozilla.org/en/Overview_of_how_downloads_work'),
url_test('/docs/docshell/mozilla_downloads.png',
'https://developer.mozilla.org/@api/deki/files/278/=Mozilla_downloads.png'),
url_test('/docs/docshell/uri-load-start.html',
'http://developer.mozilla.org/en/'
'Document_Loading_-_From_Load_Start_to_Finding_a_Handler'),
url_test('/docs/dom/about/',
'http://developer.mozilla.org/en/About_the_Document_Object_Model'),
url_test('/docs/dom/dom-talk/',
'http://developer.mozilla.org/en/DOM_Implementation_and_Scriptability'),
url_test('/docs/dom/domref/clientHeight.html',
'http://developer.mozilla.org/en/DOM:element.clientHeight'),
url_test('/docs/dom/domref/clientWidth.html',
'http://developer.mozilla.org/en/DOM:element.clientWidth'),
url_test('/docs/dom/domref/dom_doc_ref.html',
'http://developer.mozilla.org/en/DOM:document'),
url_test('/docs/dom/domref/dom_doc_ref2.html',
'http://developer.mozilla.org/en/DOM:element.attributes'),
url_test('/docs/dom/domref/dom_doc_ref3.html',
'http://developer.mozilla.org/en/DOM:document.alinkColor'),
url_test('/docs/dom/domref/dom_doc_ref4.html',
'http://developer.mozilla.org/en/DOM:document.anchors'),
url_test('/docs/dom/domref/dom_doc_ref5.html',
'http://developer.mozilla.org/en/DOM:document.applets'),
url_test('/docs/dom/domref/dom_doc_ref6.html',
'http://developer.mozilla.org/en/DOM:document.bgColor'),
url_test('/docs/dom/domref/dom_doc_ref7.html',
'http://developer.mozilla.org/en/DOM:document.body'),
url_test('/docs/dom/domref/dom_doc_ref8.html',
'http://developer.mozilla.org/en/DOM:document.characterSet'),
url_test('/docs/dom/domref/dom_doc_ref9.html',
'http://developer.mozilla.org/en/DOM:element.childNodes'),
url_test('/docs/dom/domref/dom_doc_ref10.html',
'http://developer.mozilla.org/en/DOM:document.compatMode'),
url_test('/docs/dom/domref/dom_doc_ref11.html',
'http://developer.mozilla.org/en/DOM:document.cookie'),
url_test('/docs/dom/domref/dom_doc_ref12.html',
'http://developer.mozilla.org/en/DOM:document.contentWindow'),
url_test('/docs/dom/domref/dom_doc_ref13.html',
'http://developer.mozilla.org/en/DOM:document.doctype'),
url_test('/docs/dom/domref/dom_doc_ref14.html',
'http://developer.mozilla.org/en/DOM:document.documentElement'),
url_test('/docs/dom/domref/dom_doc_ref15.html',
'http://developer.mozilla.org/en/DOM:document.domain'),
url_test('/docs/dom/domref/dom_doc_ref16.html',
'http://developer.mozilla.org/en/DOM:document.embeds'),
url_test('/docs/dom/domref/dom_doc_ref17.html',
'http://developer.mozilla.org/en/DOM:document.fgColor'),
url_test('/docs/dom/domref/dom_doc_ref18.html',
'http://developer.mozilla.org/en/DOM:element.firstChild'),
url_test('/docs/dom/domref/dom_doc_ref19.html',
'http://developer.mozilla.org/en/DOM:document.forms'),
url_test('/docs/dom/domref/dom_doc_ref20.html',
'http://developer.mozilla.org/en/DOM:document.height'),
url_test('/docs/dom/domref/dom_doc_ref21.html',
'http://developer.mozilla.org/en/DOM:document.images'),
url_test('/docs/dom/domref/dom_doc_ref22.html',
'http://developer.mozilla.org/en/DOM:document.implementation'),
url_test('/docs/dom/domref/dom_doc_ref23.html',
'http://developer.mozilla.org/en/DOM:document.lastModified'),
url_test('/docs/dom/domref/dom_doc_ref24.html',
'http://developer.mozilla.org/en/DOM:document.linkColor'),
url_test('/docs/dom/domref/dom_doc_ref25.html',
'http://developer.mozilla.org/en/DOM:document.links'),
url_test('/docs/dom/domref/dom_doc_ref26.html',
'http://developer.mozilla.org/en/DOM:document.location'),
url_test('/docs/dom/domref/dom_doc_ref27.html',
'http://developer.mozilla.org/en/DOM:element.namespaceURI'),
url_test('/docs/dom/domref/dom_doc_ref28.html',
'http://developer.mozilla.org/en/DOM:element.nextSibling'),
url_test('/docs/dom/domref/dom_doc_ref29.html',
'http://developer.mozilla.org/en/DOM:element.nodeName'),
url_test('/docs/dom/domref/dom_doc_ref30.html',
'http://developer.mozilla.org/en/DOM:element.nodeType'),
url_test('/docs/dom/domref/dom_doc_ref31.html',
'http://developer.mozilla.org/en/DOM:element.nodeValue'),
url_test('/docs/dom/domref/dom_doc_ref32.html',
'http://developer.mozilla.org/en/DOM:element.ownerDocument'),
url_test('/docs/dom/domref/dom_doc_ref33.html',
'http://developer.mozilla.org/en/DOM:element.parentNode'),
url_test('/docs/dom/domref/dom_doc_ref34.html',
'http://developer.mozilla.org/en/DOM:document.plugins'),
url_test('/docs/dom/domref/dom_doc_ref35.html',
'http://developer.mozilla.org/en/DOM:element.previousSibling'),
url_test('/docs/dom/domref/dom_doc_ref36.html',
'http://developer.mozilla.org/en/DOM:document.referrer'),
url_test('/docs/dom/domref/dom_doc_ref37.html',
'http://developer.mozilla.org/en/DOM:document.styleSheets'),
url_test('/docs/dom/domref/dom_doc_ref38.html',
'http://developer.mozilla.org/en/DOM:document.title'),
url_test('/docs/dom/domref/dom_doc_ref39.html',
'http://developer.mozilla.org/en/DOM:document.URL'),
url_test('/docs/dom/domref/dom_doc_ref40.html',
'http://developer.mozilla.org/en/DOM:document.vlinkColor'),
url_test('/docs/dom/domref/dom_doc_ref41.html',
'http://developer.mozilla.org/en/DOM:document.width'),
url_test('/docs/dom/domref/dom_doc_ref42.html',
'http://developer.mozilla.org/en/DOM:document.clear'),
url_test('/docs/dom/domref/dom_doc_ref43.html',
'http://developer.mozilla.org/en/DOM:document.close'),
url_test('/docs/dom/domref/dom_doc_ref44.html',
'http://developer.mozilla.org/en/DOM:document.createAttribute'),
url_test('/docs/dom/domref/dom_doc_ref45.html',
'http://developer.mozilla.org/en/DOM:document.createDocumentFragment'),
url_test('/docs/dom/domref/dom_doc_ref46.html',
'http://developer.mozilla.org/en/DOM:document.createElement'),
url_test('/docs/dom/domref/dom_doc_ref47.html',
'http://developer.mozilla.org/en/DOM:document.createTextNode'),
url_test('/docs/dom/domref/dom_doc_ref48.html',
'http://developer.mozilla.org/en/DOM:document.getElementById'),
url_test('/docs/dom/domref/dom_doc_ref49.html',
'http://developer.mozilla.org/en/DOM:document.getElementsByName'),
url_test('/docs/dom/domref/dom_doc_ref50.html',
'http://developer.mozilla.org/en/DOM:element.getElementsByTagName'),
url_test('/docs/dom/domref/dom_doc_ref51.html',
'http://developer.mozilla.org/en/DOM:document.open'),
url_test('/docs/dom/domref/dom_doc_ref52.html',
'http://developer.mozilla.org/en/DOM:document.write'),
url_test('/docs/dom/domref/dom_doc_ref53.html',
'http://developer.mozilla.org/en/DOM:document.writeln'),
url_test('/docs/dom/domref/dom_doc_ref54.html',
'http://developer.mozilla.org/en/DOM:element.onblur'),
url_test('/docs/dom/domref/dom_doc_ref55.html',
'http://developer.mozilla.org/en/DOM:element.onclick'),
url_test('/docs/dom/domref/dom_doc_ref56.html',
'http://developer.mozilla.org/en/DOM:element.ondblclick'),
url_test('/docs/dom/domref/dom_doc_ref57.html',
'http://developer.mozilla.org/en/DOM:element.onfocus'),
url_test('/docs/dom/domref/dom_doc_ref58.html',
'http://developer.mozilla.org/en/DOM:element.onkeydown'),
url_test('/docs/dom/domref/dom_doc_ref59.html',
'http://developer.mozilla.org/en/DOM:element.onkeypress'),
url_test('/docs/dom/domref/dom_doc_ref60.html',
'http://developer.mozilla.org/en/DOM:element.onkeyup'),
url_test('/docs/dom/domref/dom_doc_ref61.html',
'http://developer.mozilla.org/en/DOM:element.onmousedown'),
url_test('/docs/dom/domref/dom_doc_ref62.html',
'http://developer.mozilla.org/en/DOM:element.onmousemove'),
url_test('/docs/dom/domref/dom_doc_ref63.html',
'http://developer.mozilla.org/en/DOM:element.onmouseout'),
url_test('/docs/dom/domref/dom_doc_ref64.html',
'http://developer.mozilla.org/en/DOM:element.onmouseover'),
url_test('/docs/dom/domref/dom_doc_ref65.html',
'http://developer.mozilla.org/en/DOM:element.onmouseup'),
url_test('/docs/dom/domref/dom_doc_ref66.html',
'http://developer.mozilla.org/en/DOM:element.onresize'),
url_test('/docs/dom/domref/dom_doc_ref67.html',
'http://developer.mozilla.org/en/DOM:element.onresize'),
url_test('/docs/dom/domref/dom_el_ref.html', 'http://developer.mozilla.org/en/DOM:element'),
url_test('/docs/dom/domref/dom_el_ref2.html',
'http://developer.mozilla.org/en/DOM:element.attributes'),
url_test('/docs/dom/domref/dom_el_ref3.html',
'http://developer.mozilla.org/en/DOM:element.childNodes'),
url_test('/docs/dom/domref/dom_el_ref4.html',
'http://developer.mozilla.org/en/DOM:element.className'),
url_test('/docs/dom/domref/dom_el_ref5.html',
'http://developer.mozilla.org/en/DOM:element.dir'),
url_test('/docs/dom/domref/dom_el_ref6.html',
'http://developer.mozilla.org/en/DOM:element.firstChild'),
url_test('/docs/dom/domref/dom_el_ref7.html',
'http://developer.mozilla.org/en/DOM:element.id'),
url_test('/docs/dom/domref/dom_el_ref8.html',
'http://developer.mozilla.org/en/DOM:element.innerHTML'),
url_test('/docs/dom/domref/dom_el_ref9.html',
'http://developer.mozilla.org/en/DOM:element.lang'),
url_test('/docs/dom/domref/dom_el_ref10.html',
'http://developer.mozilla.org/en/DOM:element.lastChild'),
url_test('/docs/dom/domref/dom_el_ref11.html',
'http://developer.mozilla.org/en/DOM:element.length'),
url_test('/docs/dom/domref/dom_el_ref12.html',
'http://developer.mozilla.org/en/DOM:element.localName'),
url_test('/docs/dom/domref/dom_el_ref13.html',
'http://developer.mozilla.org/en/DOM:element.namespaceURI'),
url_test('/docs/dom/domref/dom_el_ref14.html',
'http://developer.mozilla.org/en/DOM:element.nextSibling'),
url_test('/docs/dom/domref/dom_el_ref15.html',
'http://developer.mozilla.org/en/DOM:element.nodeName'),
url_test('/docs/dom/domref/dom_el_ref16.html',
'http://developer.mozilla.org/en/DOM:element.nodeType'),
url_test('/docs/dom/domref/dom_el_ref17.html',
'http://developer.mozilla.org/en/DOM:element.nodeValue'),
url_test('/docs/dom/domref/dom_el_ref18.html',
'http://developer.mozilla.org/en/DOM:element.offsetHeight'),
url_test('/docs/dom/domref/dom_el_ref19.html',
'http://developer.mozilla.org/en/DOM:element.offsetLeft'),
url_test('/docs/dom/domref/dom_el_ref20.html',
'http://developer.mozilla.org/en/DOM:element.offsetParent'),
url_test('/docs/dom/domref/dom_el_ref21.html',
'http://developer.mozilla.org/en/DOM:element.offsetTop'),
url_test('/docs/dom/domref/dom_el_ref22.html',
'http://developer.mozilla.org/en/DOM:element.offsetWidth'),
url_test('/docs/dom/domref/dom_el_ref23.html',
'http://developer.mozilla.org/en/DOM:element.ownerDocument'),
url_test('/docs/dom/domref/dom_el_ref24.html',
'http://developer.mozilla.org/en/DOM:element.parentNode'),
url_test('/docs/dom/domref/dom_el_ref25.html',
'http://developer.mozilla.org/en/DOM:element.prefix'),
url_test('/docs/dom/domref/dom_el_ref26.html',
'http://developer.mozilla.org/en/DOM:element.previousSibling'),
url_test('/docs/dom/domref/dom_el_ref27.html',
'http://developer.mozilla.org/en/DOM:element.style'),
url_test('/docs/dom/domref/dom_el_ref28.html',
'http://developer.mozilla.org/en/DOM:element.tabIndex'),
url_test('/docs/dom/domref/dom_el_ref29.html',
'http://developer.mozilla.org/en/DOM:element.tagName'),
url_test('/docs/dom/domref/dom_el_ref30.html',
'http://developer.mozilla.org/en/DOM:document.title'),
url_test('/docs/dom/domref/dom_el_ref31.html',
'http://developer.mozilla.org/en/DOM:element.addEventListener'),
url_test('/docs/dom/domref/dom_el_ref32.html',
'http://developer.mozilla.org/en/DOM:element.appendChild'),
url_test('/docs/dom/domref/dom_el_ref33.html',
'http://developer.mozilla.org/en/DOM:element.blur'),
url_test('/docs/dom/domref/dom_el_ref34.html',
'http://developer.mozilla.org/en/DOM:element.click'),
url_test('/docs/dom/domref/dom_el_ref35.html',
'http://developer.mozilla.org/en/DOM:element.cloneNode'),
url_test('/docs/dom/domref/dom_el_ref36.html',
'http://developer.mozilla.org/en/DOM:element.dispatchEvent'),
url_test('/docs/dom/domref/dom_el_ref37.html',
'http://developer.mozilla.org/en/DOM:element.focus'),
url_test('/docs/dom/domref/dom_el_ref38.html',
'http://developer.mozilla.org/en/DOM:element.getAttribute'),
url_test('/docs/dom/domref/dom_el_ref39.html',
'http://developer.mozilla.org/en/DOM:element.getAttributeNS'),
url_test('/docs/dom/domref/dom_el_ref40.html',
'http://developer.mozilla.org/en/DOM:element.getAttributeNode'),
url_test('/docs/dom/domref/dom_el_ref41.html',
'http://developer.mozilla.org/en/DOM:element.getAttributeNodeNS'),
url_test('/docs/dom/domref/dom_el_ref42.html',
'http://developer.mozilla.org/en/DOM:element.getElementsByTagName'),
url_test('/docs/dom/domref/dom_el_ref43.html',
'http://developer.mozilla.org/en/DOM:element.hasAttribute'),
url_test('/docs/dom/domref/dom_el_ref44.html',
'http://developer.mozilla.org/en/DOM:element.hasAttributeNS'),
url_test('/docs/dom/domref/dom_el_ref45.html',
'http://developer.mozilla.org/en/DOM:element.hasAttributes'),
url_test('/docs/dom/domref/dom_el_ref46.html',
'http://developer.mozilla.org/en/DOM:element.hasChildNodes'),
url_test('/docs/dom/domref/dom_el_ref47.html',
'http://developer.mozilla.org/en/DOM:element.insertBefore'),
url_test('/docs/dom/domref/dom_el_ref48.html',
'http://developer.mozilla.org/en/DOM:element.item'),
url_test('/docs/dom/domref/dom_el_ref49.html',
'http://developer.mozilla.org/en/DOM:element.nextSibling'),
url_test('/docs/dom/domref/dom_el_ref50.html',
'http://developer.mozilla.org/en/DOM:element.normalize'),
url_test('/docs/dom/domref/dom_el_ref51.html',
'http://developer.mozilla.org/en/DOM:element.removeAttribute'),
url_test('/docs/dom/domref/dom_el_ref52.html',
'http://developer.mozilla.org/en/DOM:element.removeAttributeNS'),
url_test('/docs/dom/domref/dom_el_ref53.html',
'http://developer.mozilla.org/en/DOM:element.removeAttributeNode'),
url_test('/docs/dom/domref/dom_el_ref54.html',
'http://developer.mozilla.org/en/DOM:element.removeChild'),
url_test('/docs/dom/domref/dom_el_ref55.html',
'http://developer.mozilla.org/en/DOM:element.removeEventListener'),
url_test('/docs/dom/domref/dom_el_ref56.html',
'http://developer.mozilla.org/en/DOM:element.replaceChild'),
url_test('/docs/dom/domref/dom_el_ref57.html',
'http://developer.mozilla.org/en/DOM:element.setAttribute'),
url_test('/docs/dom/domref/dom_el_ref58.html',
'http://developer.mozilla.org/en/DOM:element.setAttributeNS'),
url_test('/docs/dom/domref/dom_el_ref59.html',
'http://developer.mozilla.org/en/DOM:element.setAttributeNode'),
url_test('/docs/dom/domref/dom_el_ref60.html',
'http://developer.mozilla.org/en/DOM:element.setAttributeNodeNS'),
url_test('/docs/dom/domref/dom_el_ref61.html',
'http://developer.mozilla.org/en/DOM:element.supports'),
url_test('/docs/dom/domref/dom_el_ref62.html',
'http://developer.mozilla.org/en/DOM:element.onblur'),
url_test('/docs/dom/domref/dom_el_ref63.html',
'http://developer.mozilla.org/en/DOM:element.onclick'),
url_test('/docs/dom/domref/dom_el_ref64.html',
'http://developer.mozilla.org/en/DOM:element.ondblclick'),
url_test('/docs/dom/domref/dom_el_ref65.html',
'http://developer.mozilla.org/en/DOM:element.onfocus'),
url_test('/docs/dom/domref/dom_el_ref66.html',
'http://developer.mozilla.org/en/DOM:element.onkeydown'),
url_test('/docs/dom/domref/dom_el_ref67.html',
'http://developer.mozilla.org/en/DOM:element.onkeypress'),
url_test('/docs/dom/domref/dom_el_ref68.html',
'http://developer.mozilla.org/en/DOM:element.onkeyup'),
url_test('/docs/dom/domref/dom_el_ref69.html',
'http://developer.mozilla.org/en/DOM:element.onmousedown'),
url_test('/docs/dom/domref/dom_el_ref70.html',
'http://developer.mozilla.org/en/DOM:element.onmousemove'),
url_test('/docs/dom/domref/dom_el_ref71.html',
'http://developer.mozilla.org/en/DOM:element.onmouseout'),
url_test('/docs/dom/domref/dom_el_ref72.html',
'http://developer.mozilla.org/en/DOM:element.onmouseover'),
url_test('/docs/dom/domref/dom_el_ref73.html',
'http://developer.mozilla.org/en/DOM:element.onmouseup'),
url_test('/docs/dom/domref/dom_el_ref74.html',
'http://developer.mozilla.org/en/DOM:element.onresize'),
url_test('/docs/dom/domref/dom_event_ref.html', 'http://developer.mozilla.org/en/DOM:event'),
url_test('/docs/dom/domref/dom_event_ref2.html',
'http://developer.mozilla.org/en/DOM:event.altKey'),
url_test('/docs/dom/domref/dom_event_ref3.html',
'http://developer.mozilla.org/en/DOM:event.bubbles'),
url_test('/docs/dom/domref/dom_event_ref4.html',
'http://developer.mozilla.org/en/DOM:event.cancelBubble'),
url_test('/docs/dom/domref/dom_event_ref5.html',
'http://developer.mozilla.org/en/DOM:event.cancelable'),
url_test('/docs/dom/domref/dom_event_ref6.html',
'http://developer.mozilla.org/en/DOM:event.charCode'),
url_test('/docs/dom/domref/dom_event_ref7.html',
'http://developer.mozilla.org/en/DOM:event.clientX'),
url_test('/docs/dom/domref/dom_event_ref8.html',
'http://developer.mozilla.org/en/DOM:event.clientY'),
url_test('/docs/dom/domref/dom_event_ref9.html',
'http://developer.mozilla.org/en/DOM:event.ctrlKey'),
url_test('/docs/dom/domref/dom_event_ref10.html',
'http://developer.mozilla.org/en/DOM:event.currentTarget'),
url_test('/docs/dom/domref/dom_event_ref11.html',
'http://developer.mozilla.org/en/DOM:event.detail'),
url_test('/docs/dom/domref/dom_event_ref12.html',
'http://developer.mozilla.org/en/DOM:event.eventPhase'),
url_test('/docs/dom/domref/dom_event_ref13.html',
'http://developer.mozilla.org/en/DOM:event.isChar'),
url_test('/docs/dom/domref/dom_event_ref14.html',
'http://developer.mozilla.org/en/DOM:event.keyCode'),
url_test('/docs/dom/domref/dom_event_ref15.html',
'http://developer.mozilla.org/en/DOM:event.layerX'),
url_test('/docs/dom/domref/dom_event_ref16.html',
'http://developer.mozilla.org/en/DOM:event.layerY'),
url_test('/docs/dom/domref/dom_event_ref17.html',
'http://developer.mozilla.org/en/DOM:event.metaKey'),
url_test('/docs/dom/domref/dom_event_ref18.html',
'http://developer.mozilla.org/en/DOM:event.pageX'),
url_test('/docs/dom/domref/dom_event_ref19.html',
'http://developer.mozilla.org/en/DOM:event.pageY'),
url_test('/docs/dom/domref/dom_event_ref20.html',
'http://developer.mozilla.org/en/DOM:event.relatedTarget'),
url_test('/docs/dom/domref/dom_event_ref21.html',
'http://developer.mozilla.org/en/DOM:event.screenX'),
url_test('/docs/dom/domref/dom_event_ref22.html',
'http://developer.mozilla.org/en/DOM:event.screenY'),
url_test('/docs/dom/domref/dom_event_ref23.html',
'http://developer.mozilla.org/en/DOM:event.shiftKey'),
url_test('/docs/dom/domref/dom_event_ref24.html',
'http://developer.mozilla.org/en/DOM:event.target'),
url_test('/docs/dom/domref/dom_event_ref25.html',
'http://developer.mozilla.org/en/DOM:event.timeStamp'),
url_test('/docs/dom/domref/dom_event_ref26.html',
'http://developer.mozilla.org/en/DOM:event.type'),
url_test('/docs/dom/domref/dom_event_ref27.html',
'http://developer.mozilla.org/en/DOM:event.view'),
url_test('/docs/dom/domref/dom_event_ref28.html',
'http://developer.mozilla.org/en/DOM:event.initEvent'),
url_test('/docs/dom/domref/dom_event_ref29.html',
'http://developer.mozilla.org/en/DOM:event.initMouseEvent'),
url_test('/docs/dom/domref/dom_event_ref30.html',
'http://developer.mozilla.org/en/DOM:event.initUIEvent'),
url_test('/docs/dom/domref/dom_event_ref31.html',
'http://developer.mozilla.org/en/DOM:event.preventDefault'),
url_test('/docs/dom/domref/dom_event_ref32.html',
'http://developer.mozilla.org/en/DOM:event.stopPropagation'),
url_test('/docs/dom/domref/dom_html_ref2.html',
'http://developer.mozilla.org/en/DOM:form.elements'),
url_test('/docs/dom/domref/dom_html_ref3.html',
'http://developer.mozilla.org/en/DOM:form.length'),
url_test('/docs/dom/domref/dom_html_ref4.html',
'http://developer.mozilla.org/en/DOM:form.name'),
url_test('/docs/dom/domref/dom_html_ref5.html',
'http://developer.mozilla.org/en/DOM:form.acceptCharset'),
url_test('/docs/dom/domref/dom_html_ref6.html',
'http://developer.mozilla.org/en/DOM:form.action'),
url_test('/docs/dom/domref/dom_html_ref7.html',
'http://developer.mozilla.org/en/DOM:form.enctype'),
url_test('/docs/dom/domref/dom_html_ref8.html',
'http://developer.mozilla.org/en/DOM:form.encoding'),
url_test('/docs/dom/domref/dom_html_ref9.html',
'http://developer.mozilla.org/en/DOM:form.method'),
url_test('/docs/dom/domref/dom_html_ref10.html',
'http://developer.mozilla.org/en/DOM:form.target'),
url_test('/docs/dom/domref/dom_html_ref11.html',
'http://developer.mozilla.org/en/DOM:form.submit'),
url_test('/docs/dom/domref/dom_html_ref12.html',
'http://developer.mozilla.org/en/DOM:table'),
url_test('/docs/dom/domref/dom_html_ref13.html',
'http://developer.mozilla.org/en/DOM:table.caption'),
url_test('/docs/dom/domref/dom_html_ref14.html',
'http://developer.mozilla.org/en/DOM:table.tHead'),
url_test('/docs/dom/domref/dom_html_ref15.html',
'http://developer.mozilla.org/en/DOM:table.tFoot'),
url_test('/docs/dom/domref/dom_html_ref16.html',
'http://developer.mozilla.org/en/DOM:table.rows'),
url_test('/docs/dom/domref/dom_html_ref17.html',
'http://developer.mozilla.org/en/DOM:table.tBodies'),
url_test('/docs/dom/domref/dom_html_ref18.html',
'http://developer.mozilla.org/en/DOM:table.align'),
url_test('/docs/dom/domref/dom_html_ref19.html',
'http://developer.mozilla.org/en/DOM:table.bgColor'),
url_test('/docs/dom/domref/dom_html_ref20.html',
'http://developer.mozilla.org/en/DOM:table.border'),
url_test('/docs/dom/domref/dom_html_ref21.html',
'http://developer.mozilla.org/en/DOM:table.cellPadding'),
url_test('/docs/dom/domref/dom_html_ref22.html',
'http://developer.mozilla.org/en/DOM:table.frame'),
url_test('/docs/dom/domref/dom_html_ref23.html',
'http://developer.mozilla.org/en/DOM:table.rules'),
url_test('/docs/dom/domref/dom_html_ref24.html',
'http://developer.mozilla.org/en/DOM:table.summary'),
url_test('/docs/dom/domref/dom_html_ref25.html',
'http://developer.mozilla.org/en/DOM:table.width'),
url_test('/docs/dom/domref/dom_html_ref26.html',
'http://developer.mozilla.org/en/DOM:table.deleteTHead'),
url_test('/docs/dom/domref/dom_html_ref27.html',
'http://developer.mozilla.org/en/DOM:table.createTFoot'),
url_test('/docs/dom/domref/dom_html_ref28.html',
'http://developer.mozilla.org/en/DOM:table.deleteTFoot'),
url_test('/docs/dom/domref/dom_html_ref29.html',
'http://developer.mozilla.org/en/DOM:table.createCaption'),
url_test('/docs/dom/domref/dom_html_ref30.html',
'http://developer.mozilla.org/en/DOM:table.deleteCaption'),
url_test('/docs/dom/domref/dom_html_ref31.html',
'http://developer.mozilla.org/en/DOM:table.insertRow'),
url_test('/docs/dom/domref/dom_html_ref32.html',
'http://developer.mozilla.org/en/DOM:table.deleteRow'),
url_test('/docs/dom/domref/dom_html_ref33.html',
'http://developer.mozilla.org/en/DOM:table.insertRow'),
url_test('/docs/dom/domref/dom_html_ref34.html',
'http://developer.mozilla.org/en/DOM:table.deleteRow'),
url_test('/docs/dom/domref/dom_intro.html',
'http://developer.mozilla.org/en/Gecko_DOM_Reference/Introduction'),
url_test('/docs/dom/domref/dom_range_ref.html', 'http://developer.mozilla.org/en/DOM:range'),
url_test('/docs/dom/domref/dom_range_ref2.html',
'http://developer.mozilla.org/en/DOM:range.collapsed'),
url_test('/docs/dom/domref/dom_range_ref3.html',
'http://developer.mozilla.org/en/DOM:range.commonAncestorContainer'),
url_test('/docs/dom/domref/dom_range_ref4.html',
'http://developer.mozilla.org/en/DOM:range.endContainer'),
url_test('/docs/dom/domref/dom_range_ref5.html',
'http://developer.mozilla.org/en/DOM:range.endOffset'),
url_test('/docs/dom/domref/dom_range_ref6.html',
'http://developer.mozilla.org/en/DOM:range.startContainer'),
url_test('/docs/dom/domref/dom_range_ref7.html',
'http://developer.mozilla.org/en/DOM:range.startOffset'),
url_test('/docs/dom/domref/dom_range_ref8.html',
'http://developer.mozilla.org/en/DOM:document.createRange'),
url_test('/docs/dom/domref/dom_range_ref9.html',
'http://developer.mozilla.org/en/DOM:range.setStart'),
url_test('/docs/dom/domref/dom_range_ref10.html',
'http://developer.mozilla.org/en/DOM:range.setEnd'),
url_test('/docs/dom/domref/dom_range_ref11.html',
'http://developer.mozilla.org/en/DOM:range.setStartBefore'),
url_test('/docs/dom/domref/dom_range_ref12.html',
'http://developer.mozilla.org/en/DOM:range.setStartAfter'),
url_test('/docs/dom/domref/dom_range_ref13.html',
'http://developer.mozilla.org/en/DOM:range.setEndBefore'),
url_test('/docs/dom/domref/dom_range_ref14.html',
'http://developer.mozilla.org/en/DOM:range.setEndAfter'),
url_test('/docs/dom/domref/dom_range_ref15.html',
'http://developer.mozilla.org/en/DOM:range.selectNode'),
url_test('/docs/dom/domref/dom_range_ref16.html',
'http://developer.mozilla.org/en/DOM:range.selectNodeContents'),
url_test('/docs/dom/domref/dom_range_ref17.html',
'http://developer.mozilla.org/en/DOM:range.collapse'),
url_test('/docs/dom/domref/dom_range_ref18.html',
'http://developer.mozilla.org/en/DOM:range.cloneContents'),
url_test('/docs/dom/domref/dom_range_ref19.html',
'http://developer.mozilla.org/en/DOM:range.deleteContents'),
url_test('/docs/dom/domref/dom_range_ref20.html',
'http://developer.mozilla.org/en/DOM:range.extractContents'),
url_test('/docs/dom/domref/dom_range_ref21.html',
'http://developer.mozilla.org/en/DOM:range.insertNode'),
url_test('/docs/dom/domref/dom_range_ref22.html',
'http://developer.mozilla.org/en/DOM:range.surroundContents'),
url_test('/docs/dom/domref/dom_range_ref23.html',
'http://developer.mozilla.org/en/DOM:range.compareBoundaryPoints'),
url_test('/docs/dom/domref/dom_range_ref24.html',
'http://developer.mozilla.org/en/DOM:range.cloneRange'),
url_test('/docs/dom/domref/dom_range_ref25.html',
'http://developer.mozilla.org/en/DOM:range.detach'),
url_test('/docs/dom/domref/dom_range_ref26.html',
'http://developer.mozilla.org/en/DOM:range.toString'),
url_test('/docs/dom/domref/dom_shortTOC.html',
'http://developer.mozilla.org/en/Gecko_DOM_Reference'),
url_test('/docs/dom/domref/dom_style_ref.html', 'http://developer.mozilla.org/en/DOM:style'),
url_test('/docs/dom/domref/dom_style_ref2.html',
'http://developer.mozilla.org/en/DOM:style.media'),
url_test('/docs/dom/domref/dom_style_ref3.html',
'http://developer.mozilla.org/en/DOM:stylesheet'),
url_test('/docs/dom/domref/dom_style_ref4.html',
'http://developer.mozilla.org/en/DOM:stylesheet.cssRules'),
url_test('/docs/dom/domref/dom_style_ref5.html',
'http://developer.mozilla.org/en/DOM:stylesheet.disabled'),
url_test('/docs/dom/domref/dom_style_ref6.html',
'http://developer.mozilla.org/en/DOM:stylesheet.href'),
url_test('/docs/dom/domref/dom_style_ref7.html',
'http://developer.mozilla.org/en/DOM:stylesheet.media'),
url_test('/docs/dom/domref/dom_style_ref8.html',
'http://developer.mozilla.org/en/DOM:stylesheet.ownerNode'),
url_test('/docs/dom/domref/dom_style_ref9.html',
'http://developer.mozilla.org/en/DOM:stylesheet.ownerRule'),
url_test('/docs/dom/domref/dom_style_ref10.html',
'http://developer.mozilla.org/en/DOM:stylesheet.parentStyleSheet'),
url_test('/docs/dom/domref/dom_style_ref11.html',
'http://developer.mozilla.org/en/DOM:stylesheet.title'),
url_test('/docs/dom/domref/dom_style_ref12.html',
'http://developer.mozilla.org/en/DOM:stylesheet.type'),
url_test('/docs/dom/domref/dom_style_ref13.html',
'http://developer.mozilla.org/en/DOM:stylesheet.deleteRule'),
url_test('/docs/dom/domref/dom_style_ref14.html',
'http://developer.mozilla.org/en/DOM:stylesheet.insertRule'),
url_test('/docs/dom/domref/dom_style_ref15.html',
'http://developer.mozilla.org/en/DOM:cssRule.cssText'),
url_test('/docs/dom/domref/dom_style_ref16.html',
'http://developer.mozilla.org/en/DOM:cssRule.parentStyleSheet'),
url_test('/docs/dom/domref/dom_style_ref17.html',
'http://developer.mozilla.org/en/DOM:cssRule.selectorText'),
url_test('/docs/dom/domref/dom_style_ref18.html',
'http://developer.mozilla.org/en/DOM:cssRule.style'),
url_test('/docs/dom/domref/dom_style_ref19.html',
'http://developer.mozilla.org/en/DOM:cssRule.parentStyleSheet'),
url_test('/docs/dom/domref/dom_style_ref20.html',
'http://developer.mozilla.org/en/DOM:cssRule.selectorText'),
url_test('/docs/dom/domref/dom_style_ref21.html',
'http://developer.mozilla.org/en/DOM:cssRule.style'),
url_test('/docs/dom/domref/dom_style_ref22.html', 'http://developer.mozilla.org/en/DOM:CSS'),
url_test('/docs/dom/domref/dom_window_ref.html',
'http://developer.mozilla.org/en/DOM:window'),
url_test('/docs/dom/domref/dom_window_ref2.html',
'http://developer.mozilla.org/en/DOM:window.alert'),
url_test('/docs/dom/domref/dom_window_ref3.html',
'http://developer.mozilla.org/en/DOM:window.content'),
url_test('/docs/dom/domref/dom_window_ref4.html',
'http://developer.mozilla.org/en/DOM:window.back'),
url_test('/docs/dom/domref/dom_window_ref5.html',
'http://developer.mozilla.org/en/DOM:window.blur'),
url_test('/docs/dom/domref/dom_window_ref6.html',
'http://developer.mozilla.org/en/DOM:window.captureEvents'),
url_test('/docs/dom/domref/dom_window_ref7.html',
'http://developer.mozilla.org/en/DOM:window.clearInterval'),
url_test('/docs/dom/domref/dom_window_ref8.html',
'http://developer.mozilla.org/en/DOM:window.clearTimeout'),
url_test('/docs/dom/domref/dom_window_ref9.html',
'http://developer.mozilla.org/en/DOM:window.close'),
url_test('/docs/dom/domref/dom_window_ref10.html',
'http://developer.mozilla.org/en/DOM:window.closed'),
url_test('/docs/dom/domref/dom_window_ref11.html',
'http://developer.mozilla.org/en/DOM:window.Components'),
url_test('/docs/dom/domref/dom_window_ref12.html',
'http://developer.mozilla.org/en/DOM:window.confirm'),
url_test('/docs/dom/domref/dom_window_ref13.html',
'http://developer.mozilla.org/en/DOM:window.controllers'),
url_test('/docs/dom/domref/dom_window_ref14.html',
'http://developer.mozilla.org/en/DOM:window.crypto'),
url_test('/docs/dom/domref/dom_window_ref15.html',
'http://developer.mozilla.org/en/DOM:window.defaultStatus'),
url_test('/docs/dom/domref/dom_window_ref16.html',
'http://developer.mozilla.org/en/DOM:window.directories'),
url_test('/docs/dom/domref/dom_window_ref17.html',
'http://developer.mozilla.org/en/DOM:window.document'),
url_test('/docs/dom/domref/dom_window_ref18.html',
'http://developer.mozilla.org/en/DOM:window.dump'),
url_test('/docs/dom/domref/dom_window_ref19.html',
'http://developer.mozilla.org/en/DOM:window.escape'),
url_test('/docs/dom/domref/dom_window_ref20.html',
'http://developer.mozilla.org/en/DOM:window.focus'),
url_test('/docs/dom/domref/dom_window_ref21.html',
'http://developer.mozilla.org/en/DOM:window.forward'),
url_test('/docs/dom/domref/dom_window_ref22.html',
'http://developer.mozilla.org/en/DOM:window.frames'),
url_test('/docs/dom/domref/dom_window_ref23.html',
'http://developer.mozilla.org/en/DOM:window.getAttention'),
url_test('/docs/dom/domref/dom_window_ref24.html',
'http://developer.mozilla.org/en/DOM:window.getSelection'),
url_test('/docs/dom/domref/dom_window_ref25.html',
'http://developer.mozilla.org/en/DOM:window.history'),
url_test('/docs/dom/domref/dom_window_ref26.html',
'http://developer.mozilla.org/en/DOM:window.home'),
url_test('/docs/dom/domref/dom_window_ref27.html',
'http://developer.mozilla.org/en/DOM:window.innerHeight'),
url_test('/docs/dom/domref/dom_window_ref28.html',
'http://developer.mozilla.org/en/DOM:window.innerWidth'),
url_test('/docs/dom/domref/dom_window_ref29.html',
'http://developer.mozilla.org/en/DOM:window.length'),
url_test('/docs/dom/domref/dom_window_ref30.html',
'http://developer.mozilla.org/en/DOM:window.location'),
url_test('/docs/dom/domref/dom_window_ref31.html',
'http://developer.mozilla.org/en/DOM:window.locationbar'),
url_test('/docs/dom/domref/dom_window_ref32.html',
'http://developer.mozilla.org/en/DOM:window.menubar'),
url_test('/docs/dom/domref/dom_window_ref33.html',
'http://developer.mozilla.org/en/DOM:window.moveBy'),
url_test('/docs/dom/domref/dom_window_ref34.html',
'http://developer.mozilla.org/en/DOM:window.moveTo'),
url_test('/docs/dom/domref/dom_window_ref35.html',
'http://developer.mozilla.org/en/DOM:window.name'),
url_test('/docs/dom/domref/dom_window_ref36.html',
'http://developer.mozilla.org/en/DOM:window.navigator'),
url_test('/docs/dom/domref/dom_window_ref37.html',
'http://developer.mozilla.org/en/DOM:window.navigator.appCodeName'),
url_test('/docs/dom/domref/dom_window_ref38.html',
'http://developer.mozilla.org/en/DOM:window.navigator.appName'),
url_test('/docs/dom/domref/dom_window_ref39.html',
'http://developer.mozilla.org/en/DOM:window.navigator.appVersion'),
url_test('/docs/dom/domref/dom_window_ref40.html',
'http://developer.mozilla.org/en/DOM:window.navigator.cookieEnabled'),
url_test('/docs/dom/domref/dom_window_ref41.html',
'http://developer.mozilla.org/en/DOM:window.navigator.javaEnabled'),
url_test('/docs/dom/domref/dom_window_ref42.html',
'http://developer.mozilla.org/en/DOM:window.navigator.language'),
url_test('/docs/dom/domref/dom_window_ref43.html',
'http://developer.mozilla.org/en/DOM:window.navigator.mimeTypes'),
url_test('/docs/dom/domref/dom_window_ref44.html',
'http://developer.mozilla.org/en/DOM:window.navigator.oscpu'),
url_test('/docs/dom/domref/dom_window_ref45.html',
'http://developer.mozilla.org/en/DOM:window.navigator.platform'),
url_test('/docs/dom/domref/dom_window_ref46.html',
'http://developer.mozilla.org/en/DOM:window.navigator.plugins'),
url_test('/docs/dom/domref/dom_window_ref47.html',
'http://developer.mozilla.org/en/DOM:window.navigator.product'),
url_test('/docs/dom/domref/dom_window_ref48.html',
'http://developer.mozilla.org/en/DOM:window.navigator.productSub'),
url_test('/docs/dom/domref/dom_window_ref49.html',
'http://developer.mozilla.org/en/DOM:window.navigator.userAgent'),
url_test('/docs/dom/domref/dom_window_ref50.html',
'http://developer.mozilla.org/en/DOM:window.navigator.vendor'),
url_test('/docs/dom/domref/dom_window_ref51.html',
'http://developer.mozilla.org/en/DOM:window.navigator.vendorSub'),
url_test('/docs/dom/domref/dom_window_ref52.html',
'http://developer.mozilla.org/en/DOM:window.onabort'),
url_test('/docs/dom/domref/dom_window_ref53.html',
'http://developer.mozilla.org/en/DOM:window.onblur'),
url_test('/docs/dom/domref/dom_window_ref54.html',
'http://developer.mozilla.org/en/DOM:window.onchange'),
url_test('/docs/dom/domref/dom_window_ref55.html',
'http://developer.mozilla.org/en/DOM:window.onclick'),
url_test('/docs/dom/domref/dom_window_ref56.html',
'http://developer.mozilla.org/en/DOM:window.onclose'),
url_test('/docs/dom/domref/dom_window_ref57.html',
'http://developer.mozilla.org/en/DOM:window.ondragdrop'),
url_test('/docs/dom/domref/dom_window_ref58.html',
'http://developer.mozilla.org/en/DOM:window.onerror'),
url_test('/docs/dom/domref/dom_window_ref59.html',
'http://developer.mozilla.org/en/DOM:window.onfocus'),
url_test('/docs/dom/domref/dom_window_ref60.html',
'http://developer.mozilla.org/en/DOM:window.onkeydown'),
url_test('/docs/dom/domref/dom_window_ref61.html',
'http://developer.mozilla.org/en/DOM:window.onkeypress'),
url_test('/docs/dom/domref/dom_window_ref62.html',
'http://developer.mozilla.org/en/DOM:window.onkeyup'),
url_test('/docs/dom/domref/dom_window_ref63.html',
'http://developer.mozilla.org/en/DOM:window.onload'),
url_test('/docs/dom/domref/dom_window_ref64.html',
'http://developer.mozilla.org/en/DOM:window.onmousedown'),
url_test('/docs/dom/domref/dom_window_ref65.html',
'http://developer.mozilla.org/en/DOM:window.onmousemove'),
url_test('/docs/dom/domref/dom_window_ref66.html',
'http://developer.mozilla.org/en/DOM:window.onmouseout'),
url_test('/docs/dom/domref/dom_window_ref67.html',
'http://developer.mozilla.org/en/DOM:window.onmouseover'),
url_test('/docs/dom/domref/dom_window_ref68.html',
'http://developer.mozilla.org/en/DOM:window.onmouseup'),
url_test('/docs/dom/domref/dom_window_ref69.html',
'http://developer.mozilla.org/en/DOM:window.onpaint'),
url_test('/docs/dom/domref/dom_window_ref70.html',
'http://developer.mozilla.org/en/DOM:window.onreset'),
url_test('/docs/dom/domref/dom_window_ref71.html',
'http://developer.mozilla.org/en/DOM:window.onresize'),
url_test('/docs/dom/domref/dom_window_ref72.html',
'http://developer.mozilla.org/en/DOM:window.onscroll'),
url_test('/docs/dom/domref/dom_window_ref73.html',
'http://developer.mozilla.org/en/DOM:window.onselect'),
url_test('/docs/dom/domref/dom_window_ref74.html',
'http://developer.mozilla.org/en/DOM:window.onsubmit'),
url_test('/docs/dom/domref/dom_window_ref75.html',
'http://developer.mozilla.org/en/DOM:window.onunload'),
url_test('/docs/dom/domref/dom_window_ref76.html',
'http://developer.mozilla.org/en/DOM:window.open'),
url_test('/docs/dom/domref/dom_window_ref77.html',
'http://developer.mozilla.org/en/DOM:window.opener'),
url_test('/docs/dom/domref/dom_window_ref78.html',
'http://developer.mozilla.org/en/DOM:window.outerHeight'),
url_test('/docs/dom/domref/dom_window_ref79.html',
'http://developer.mozilla.org/en/DOM:window.outerWidth'),
url_test('/docs/dom/domref/dom_window_ref80.html',
'http://developer.mozilla.org/en/DOM:window.pageXOffset'),
url_test('/docs/dom/domref/dom_window_ref81.html',
'http://developer.mozilla.org/en/DOM:window.pageYOffset'),
url_test('/docs/dom/domref/dom_window_ref82.html',
'http://developer.mozilla.org/en/DOM:window.parent'),
url_test('/docs/dom/domref/dom_window_ref83.html',
'http://developer.mozilla.org/en/DOM:window.personalbar'),
url_test('/docs/dom/domref/dom_window_ref84.html',
'http://developer.mozilla.org/en/DOM:window.pkcs11'),
url_test('/docs/dom/domref/dom_window_ref85.html',
'http://developer.mozilla.org/en/DOM:window.print'),
url_test('/docs/dom/domref/dom_window_ref86.html',
'http://developer.mozilla.org/en/DOM:window.prompt'),
url_test('/docs/dom/domref/dom_window_ref87.html',
'http://developer.mozilla.org/en/DOM:window.prompter'),
url_test('/docs/dom/domref/dom_window_ref88.html',
'http://developer.mozilla.org/en/DOM:window.releaseEvents'),
url_test('/docs/dom/domref/dom_window_ref89.html',
'http://developer.mozilla.org/en/DOM:window.resizeBy'),
url_test('/docs/dom/domref/dom_window_ref90.html',
'http://developer.mozilla.org/en/DOM:window.resizeTo'),
url_test('/docs/dom/domref/dom_window_ref91.html',
'http://developer.mozilla.org/en/DOM:window.screen'),
url_test('/docs/dom/domref/dom_window_ref92.html',
'http://developer.mozilla.org/en/DOM:window.screen.availHeight'),
url_test('/docs/dom/domref/dom_window_ref93.html',
'http://developer.mozilla.org/en/DOM:window.screen.availLeft'),
url_test('/docs/dom/domref/dom_window_ref94.html',
'http://developer.mozilla.org/en/DOM:window.screen.availTop'),
url_test('/docs/dom/domref/dom_window_ref95.html',
'http://developer.mozilla.org/en/DOM:window.screen.availWidth'),
url_test('/docs/dom/domref/dom_window_ref96.html',
'http://developer.mozilla.org/en/DOM:window.screen.colorDepth'),
url_test('/docs/dom/domref/dom_window_ref97.html',
'http://developer.mozilla.org/en/DOM:window.screen.height'),
url_test('/docs/dom/domref/dom_window_ref98.html',
'http://developer.mozilla.org/en/DOM:window.screen.left'),
url_test('/docs/dom/domref/dom_window_ref99.html',
'http://developer.mozilla.org/en/DOM:window.screen.pixelDepth'),
url_test('/docs/dom/domref/dom_window_ref100.html',
'http://developer.mozilla.org/en/DOM:window.screen.top'),
url_test('/docs/dom/domref/dom_window_ref101.html',
'http://developer.mozilla.org/en/DOM:window.screen.width'),
url_test('/docs/dom/domref/dom_window_ref102.html',
'http://developer.mozilla.org/en/DOM:window.screenX'),
url_test('/docs/dom/domref/dom_window_ref103.html',
'http://developer.mozilla.org/en/DOM:window.screenY'),
url_test('/docs/dom/domref/dom_window_ref104.html',
'http://developer.mozilla.org/en/DOM:window.scrollbars'),
url_test('/docs/dom/domref/dom_window_ref105.html',
'http://developer.mozilla.org/en/DOM:window.scroll'),
url_test('/docs/dom/domref/dom_window_ref106.html',
'http://developer.mozilla.org/en/DOM:window.scrollBy'),
url_test('/docs/dom/domref/dom_window_ref107.html',
'http://developer.mozilla.org/en/DOM:window.scrollByLines'),
url_test('/docs/dom/domref/dom_window_ref108.html',
'http://developer.mozilla.org/en/DOM:window.scrollByPages'),
url_test('/docs/dom/domref/dom_window_ref109.html',
'http://developer.mozilla.org/en/DOM:window.scrollTo'),
url_test('/docs/dom/domref/dom_window_ref110.html',
'http://developer.mozilla.org/en/DOM:window.scrollX'),
url_test('/docs/dom/domref/dom_window_ref111.html',
'http://developer.mozilla.org/en/DOM:window.scrollY'),
url_test('/docs/dom/domref/dom_window_ref112.html',
'http://developer.mozilla.org/en/DOM:window.self'),
url_test('/docs/dom/domref/dom_window_ref113.html',
'http://developer.mozilla.org/en/DOM:window.setCursor'),
url_test('/docs/dom/domref/dom_window_ref114.html',
'http://developer.mozilla.org/en/DOM:window.setInterval'),
url_test('/docs/dom/domref/dom_window_ref115.html',
'http://developer.mozilla.org/en/DOM:window.setTimeout'),
url_test('/docs/dom/domref/dom_window_ref116.html',
'http://developer.mozilla.org/en/DOM:window.sidebar'),
url_test('/docs/dom/domref/dom_window_ref117.html',
'http://developer.mozilla.org/en/DOM:window.sizeToContent'),
url_test('/docs/dom/domref/dom_window_ref118.html',
'http://developer.mozilla.org/en/DOM:window.status'),
url_test('/docs/dom/domref/dom_window_ref119.html',
'http://developer.mozilla.org/en/DOM:window.statusbar'),
url_test('/docs/dom/domref/dom_window_ref120.html',
'http://developer.mozilla.org/en/DOM:window.stop'),
url_test('/docs/dom/domref/dom_window_ref121.html',
'http://developer.mozilla.org/en/DOM:window.toolbar'),
url_test('/docs/dom/domref/dom_window_ref122.html',
'http://developer.mozilla.org/en/DOM:window.top'),
url_test('/docs/dom/domref/dom_window_ref123.html',
'http://developer.mozilla.org/en/DOM:window.unescape'),
url_test('/docs/dom/domref/dom_window_ref124.html',
'http://developer.mozilla.org/en/DOM:window.updateCommands'),
url_test('/docs/dom/domref/dom_window_ref125.html',
'http://developer.mozilla.org/en/DOM:window.window'),
url_test('/docs/dom/domref/dom_window_ref126.html',
'http://developer.mozilla.org/en/DOM:window.window'),
url_test('/docs/dom/domref/dom_window_ref127.html',
'http://developer.mozilla.org/en/DOM:window.window'),
url_test('/docs/dom/domref/dom_window_refa2.html',
'http://developer.mozilla.org/en/DOM:window.alert'),
url_test('/docs/dom/domref/dom_window_refa3.html',
'http://developer.mozilla.org/en/DOM:window.content'),
url_test('/docs/dom/domref/examples.html',
'http://developer.mozilla.org/en/Gecko_DOM_Reference:Examples'),
url_test('/docs/dom/domref/examples2.html',
'http://developer.mozilla.org/en/Gecko_DOM_Reference:Examples'),
url_test('/docs/dom/domref/examples3.html',
'http://developer.mozilla.org/en/Gecko_DOM_Reference:Examples'),
url_test('/docs/dom/domref/examples4.html',
'http://developer.mozilla.org/en/Gecko_DOM_Reference:Examples'),
url_test('/docs/dom/domref/examples5.html',
'http://developer.mozilla.org/en/Gecko_DOM_Reference:Examples'),
url_test('/docs/dom/domref/examples6.html',
'http://developer.mozilla.org/en/Gecko_DOM_Reference:Examples'),
url_test('/docs/dom/domref/examples7_res.html',
'http://developer.mozilla.org/en/Gecko_DOM_Reference:Examples'),
url_test('/docs/dom/domref/examples7.html',
'http://developer.mozilla.org/en/Gecko_DOM_Reference:Examples'),
url_test('/docs/dom/domref/examples8.html',
'http://developer.mozilla.org/en/Gecko_DOM_Reference:Examples'),
url_test('/docs/dom/domref/images/alert.gif',
'http://developer.mozilla.org/wiki-images/en/6/6c/domref-alert.gif'),
url_test('/docs/dom/domref/images/backgrnd.gif',
'http://developer.mozilla.org/wiki-images/en/f/fe/domref-backgrnd.gif'),
url_test('/docs/dom/domref/images/cat.jpg',
'http://developer.mozilla.org/wiki-images/en/1/17/domref-cat.jpg'),
url_test('/docs/dom/domref/images/clientHeight.png',
'http://developer.mozilla.org/wiki-images/en/0/09/domref-clientHeight.png'),
url_test('/docs/dom/domref/images/clientWidth.png',
'http://developer.mozilla.org/wiki-images/en/a/a3/domref-clientWidth.png'),
url_test('/docs/dom/domref/images/confirm.gif',
'http://developer.mozilla.org/wiki-images/en/6/64/domref-confirm.gif'),
url_test('/docs/dom/domref/images/dom_window_ref2.gif',
'http://developer.mozilla.org/wiki-images/en/2/20/domref-dom_window_ref2.gif'),
url_test('/docs/dom/domref/images/dom_window_ref3.gif',
'http://developer.mozilla.org/wiki-images/en/6/65/domref-dom_window_ref3.gif'),
url_test('/docs/dom/domref/images/dom_window_refa.gif',
'http://developer.mozilla.org/wiki-images/en/0/0f/domref-dom_window_refa.gif'),
url_test('/docs/dom/domref/images/dom_window_refa2.gif',
'http://developer.mozilla.org/wiki-images/en/2/2b/domref-dom_window_refa2.gif'),
url_test('/docs/dom/domref/images/dom_window_refa3.gif',
'http://developer.mozilla.org/wiki-images/en/5/53/domref-dom_window_refa3.gif'),
url_test('/docs/dom/domref/images/domref.gif',
'http://developer.mozilla.org/wiki-images/en/e/ed/domref.gif'),
url_test('/docs/dom/domref/images/navidx.gif',
'http://developer.mozilla.org/wiki-images/en/f/f7/domref-navidx.gif'),
url_test('/docs/dom/domref/images/navidxx.gif',
'http://developer.mozilla.org/wiki-images/en/6/66/domref-navidxx.gif'),
url_test('/docs/dom/domref/images/navnext.gif',
'http://developer.mozilla.org/wiki-images/en/5/5a/domref-navnext.gif'),
url_test('/docs/dom/domref/images/navnextx.gif',
'http://developer.mozilla.org/wiki-images/en/e/e2/domref-navnextx.gif'),
url_test('/docs/dom/domref/images/navprev.gif',
'http://developer.mozilla.org/wiki-images/en/2/20/domref-navprev.gif'),
url_test('/docs/dom/domref/images/navprevx.gif',
'http://developer.mozilla.org/wiki-images/en/4/47/domref-navprevx.gif'),
url_test('/docs/dom/domref/images/navtoc.gif',
'http://developer.mozilla.org/wiki-images/en/0/0f/domref-navtoc.gif'),
url_test('/docs/dom/domref/images/navtocx.gif',
'http://developer.mozilla.org/wiki-images/en/6/63/domref-navtocx.gif'),
url_test('/docs/dom/domref/images/offsetHeight.png',
'http://developer.mozilla.org/wiki-images/en/3/35/domref-offsetHeight.png'),
url_test('/docs/dom/domref/images/offsetWidth.png',
'http://developer.mozilla.org/wiki-images/en/0/08/domref-offsetWidth.png'),
url_test('/docs/dom/domref/images/pdf.gif',
'http://developer.mozilla.org/wiki-images/en/3/36/domref-pdf.gif'),
url_test('/docs/dom/domref/images/preface2.gif',
'http://developer.mozilla.org/wiki-images/en/3/3d/domref-preface2.gif'),
url_test('/docs/dom/domref/images/prefacea.gif',
'http://developer.mozilla.org/wiki-images/en/4/4e/domref-prefacea.gif'),
url_test('/docs/dom/domref/images/prompt.gif',
'http://developer.mozilla.org/wiki-images/en/8/84/domref-prompt.gif'),
url_test('/docs/dom/domref/images/scrollHeight.png',
'http://developer.mozilla.org/wiki-images/en/8/8c/domref-scrollHeight.png'),
url_test('/docs/dom/domref/images/scrollTop.png',
'http://developer.mozilla.org/wiki-images/en/6/68/domref-scrollTop.png'),
url_test('/docs/dom/domref/images/test_page.gif',
'http://developer.mozilla.org/wiki-images/en/5/53/domref-test_page.gif'),
url_test('/docs/dom/domref/images/webworks.gif',
'http://developer.mozilla.org/wiki-images/en/f/fe/domref-webworks.gif'),
url_test('/docs/dom/domref/images/window-chrome.gif',
'http://developer.mozilla.org/wiki-images/en/0/08/domref-window-chrome.gif'),
url_test('/docs/dom/domref/preface.html',
'http://developer.mozilla.org/en/Gecko_DOM_Reference:Preface'),
url_test('/docs/dom/domref/scrollHeight.html',
'http://developer.mozilla.org/en/DOM:element.scrollHeight'),
url_test('/docs/dom/domref/scrollTop.html',
'http://developer.mozilla.org/en/DOM:element.scrollTop'),
url_test('/docs/dom/', 'http://developer.mozilla.org/en/DOM'),
url_test('/docs/dom/mozilla/hacking.html',
'http://developer.mozilla.org/en/Mozilla_DOM_Hacking_Guide'),
url_test('/docs/dom/mozilla/protodoc.html',
'http://developer.mozilla.org/en/JavaScript-DOM_Prototypes_in_Mozilla'),
url_test('/docs/dom/mozilla/xpcomintro.html',
'http://developer.mozilla.org/en/Introduction_to_XPCOM_for_the_DOM'),
url_test('/docs/dom/reference/javascript.html',
'http://developer.mozilla.org/en/The_DOM_and_JavaScript'),
url_test('/docs/dom/reference/levels.html', 'http://developer.mozilla.org/en/DOM_Levels'),
url_test('/docs/dom/technote/intro/example.html',
'http://developer.mozilla.org/@api/deki/files/2866/=example.html'),
url_test('/docs/dom/technote/intro/',
'http://developer.mozilla.org/en/Using_the_W3C_DOM_Level_1_Core'),
url_test('/docs/dom/technote/tn-dom-table/',
'http://developer.mozilla.org/en/'
'Traversing_an_HTML_table_with_JavaScript_and_DOM_Interfaces'),
url_test('/docs/dom/technote/whitespace/',
'http://developer.mozilla.org/en/Whitespace_in_the_DOM'),
url_test('/docs/extendmoz.html', 'https://developer.mozilla.org/En/Plugins'),
url_test('/docs/how-to-document.html', '/contribute/writing/how-to'),
url_test('/docs/hybrid-cd.html', 'http://developer.mozilla.org/en/Creating_a_hybrid_CD'),
url_test('/docs/jargon.html', 'http://developer.mozilla.org/en/Glossary'),
url_test('/docs/mdp/', '/contribute/writing/'),
url_test('/docs/modunote.htm', 'http://developer.mozilla.org/en/Modularization_Techniques'),
url_test('/docs/mozilla-faq.html', 'http://developer.mozilla.org/en/Mozilla_Release_FAQ'),
url_test('/docs/netlib/{necko,new-handler}.html',
'https://developer.mozilla.org/en-US/docs/Mozilla/Projects/Necko'),
url_test('/docs/plugin.html', 'https://developer.mozilla.org/En/Plugins'),
url_test('/docs/refList/refNSPR/', '/projects/nspr/reference/html/'),
url_test('/docs/scripting-plugins.html',
'http://developer.mozilla.org/en/Scripting_Plugins_in_Mozilla'),
url_test('/docs/source-directories-overview.html',
'http://developer.mozilla.org/en/Source_code_directories_overview'),
url_test('/docs/tplist/catBuild/portable-cpp.html',
'/hacking/portable-cpp.html'),
url_test('/docs/tplist/catFAQ', '/classic'),
url_test('/docs/tplist/tplist.html', 'https://developer.mozilla.org/'),
url_test('/docs/tutorials/sitenav/', 'http://developer.mozilla.org/en/Using_Remote_XUL'),
url_test('/docs/tutorials/tinderstatus/tinderstatus.xpi',
'https://addons.mozilla.org/en-US/seamonkey/addon/832'),
url_test('/docs/url_load.dia',
'http://developer.mozilla.org/@api/deki/files/2893/=url_load.dia'),
url_test('/docs/url_load.gif',
'http://developer.mozilla.org/@api/deki/files/920/=Url_load.gif'),
url_test('/docs/url_load.html',
'http://developer.mozilla.org/en/The_life_of_an_HTML_HTTP_request'),
url_test('/docs/web-developer/faq.html',
'http://developer.mozilla.org/en/Mozilla_Web_Developer_FAQ'),
url_test('/docs/web-developer/mimetypes.html',
'http://developer.mozilla.org/en/How_Mozilla_determines_MIME_Types'),
url_test('/docs/web-developer/quirks/quirklist.html',
'http://developer.mozilla.org/en/Mozilla_Quirks_Mode_Behavior'),
url_test('/docs/web-developer/quirks/',
'http://developer.mozilla.org/en/Mozilla%27s_Quirks_Mode'),
url_test('/docs/web-developer/quirks/doctypes.html',
'http://developer.mozilla.org/en/Mozilla%27s_DOCTYPE_sniffing'),
url_test('/docs/web-developer/sniffer/browser_type.html',
'https://developer.mozilla.org/En/Browser_Detection_and_Cross_Browser_Support'),
url_test('/docs/web-developer/upgrade_2.html',
'http://developer.mozilla.org/en/Using_Web_Standards_in_your_Web_Pages'),
url_test('/docs/xul/xulnotes/bubble.xul',
'http://developer.mozilla.org/@api/deki/files/2865/=bubble.xul'),
url_test('/docs/xul/xulnotes/template-bindings.html',
'http://developer.mozilla.org/en/XUL_Template_Primer_-_Bindings'),
url_test('/docs/xul/xulnotes/xulnote_beasts.html',
'http://developer.mozilla.org/en/A_XUL_Bestiary'),
url_test('/docs/xul/xulnotes/xulnote_diagnostic.html',
'http://developer.mozilla.org/en/XUL_Parser_in_Python'),
url_test('/docs/xul/xulnotes/xulnote_events.html',
'http://developer.mozilla.org/en/XUL_Event_Propagation'),
url_test('/docs/xul/xulnotes/xulnote_oven.html',
'http://developer.mozilla.org/en/My_Chrome_Oven:_Generating_XUL_with_Python'),
url_test('/docs/xul/xulnotes/xulnote_packages.html',
'http://developer.mozilla.org/en/Creating_XPI_Installer_Modules'),
url_test('/docs/xul/xulnotes/xulnote_skins.html',
'http://developer.mozilla.org/en/Skinning_XUL_Files_by_Hand'),
url_test('/docs/xul/xulnotes/xulnote_xml.html',
'http://developer.mozilla.org/en/XUL_Genealogy:_XML'),
url_test('/docs/xul/xulnotes/xulnote_xpconnect.html',
'http://developer.mozilla.org/en/Fun_With_XBL_and_XPConnect'),
url_test('/donate_faq.html', 'https://wiki.mozilla.org/Donate'),
url_test('/donate_form.pdf', 'https://donate.mozilla.org/'),
url_test('/donate.html', 'https://donate.mozilla.org/'),
url_test('/download-mozilla.html',
'http://developer.mozilla.org/en/Download_Mozilla_Source_Code'),
url_test('/feedback.html', '/contact/'),
url_test('/firebird', 'http://www.firefox.com'),
url_test('/foundation/privacy-policy.html', '/privacy/websites/'),
url_test('/get-involved.html', '/contribute/'),
url_test('/glimpsesearch.html', 'https://dxr.mozilla.org/'),
url_test('/hacking/bonsai.html', 'http://developer.mozilla.org/en/Hacking_with_Bonsai'),
url_test('/hacking/code-review-faq.html', 'http://developer.mozilla.org/en/Code_Review_FAQ'),
url_test('/hacking/coding-introduction.html',
'http://developer.mozilla.org/en/Mozilla_Hacker%27s_Getting_Started_Guide'),
url_test('/hacking/cvs_over_ssh_plan.html',
'http://developer.mozilla.org/En/Using_SSH_to_connect_to_CVS'),
url_test('/hacking/development-strategies.html',
'http://developer.mozilla.org/en/Mozilla_Development_Strategies'),
url_test('/hacking/life-cycle.html', 'http://developer.mozilla.org/en/Hacking_Mozilla'),
url_test('/hacking/mozilla-style-guide.html',
'http://developer.mozilla.org/En/Mozilla_Coding_Style_Guide'),
url_test('/hacking/new-features.html',
'http://developer.mozilla.org/en/Developing_New_Mozilla_Features'),
url_test('/index2.html', '/'),
url_test('/js/spidermonkey/apidoc/complete-frameset.html',
'http://developer.mozilla.org/en/JSAPI_Reference'),
url_test('/js/spidermonkey/apidoc/guide.html',
'http://developer.mozilla.org/en/Embedding_SpiderMonkey'),
url_test('/js/spidermonkey/apidoc/jsguide.html',
'http://developer.mozilla.org/en/JavaScript_C_Engine_Embedder%27s_Guide'),
url_test('/js/spidermonkey/apidoc/jsref.htm',
'http://developer.mozilla.org/en/JSAPI_Reference'),
url_test('/js/spidermonkey/apidoc/sparse-frameset.html',
'http://developer.mozilla.org/en/JSAPI_Reference'),
url_test('/js/spidermonkey/apidoc/gen/api-BOOLEAN_TO_JSVAL.html',
'http://developer.mozilla.org/en/BOOLEAN_TO_JSVAL'),
url_test('/js/spidermonkey/apidoc/gen/api-DOUBLE_TO_JSVAL.html',
'http://developer.mozilla.org/en/DOUBLE_TO_JSVAL'),
url_test('/js/spidermonkey/apidoc/gen/api-INT_FITS_IN_JSVAL.html',
'http://developer.mozilla.org/en/INT_FITS_IN_JSVAL'),
url_test('/js/spidermonkey/apidoc/gen/api-INT_TO_JSVAL.html',
'http://developer.mozilla.org/en/INT_TO_JSVAL'),
url_test('/js/spidermonkey/apidoc/gen/api-JSCLASS_HAS_PRIVATE.html',
'http://developer.mozilla.org/en/JSCLASS_HAS_PRIVATE'),
url_test('/js/spidermonkey/apidoc/gen/api-JSCLASS_NEW_ENUMERATE.html',
'http://developer.mozilla.org/en/JSCLASS_NEW_ENUMERATE'),
url_test('/js/spidermonkey/apidoc/gen/api-JSCLASS_NEW_RESOLVE.html',
'http://developer.mozilla.org/en/JSCLASS_NEW_RESOLVE'),
url_test('/js/spidermonkey/apidoc/gen/api-JSClass.html',
'http://developer.mozilla.org/en/JSClass'),
url_test('/js/spidermonkey/apidoc/gen/api-JSConstDoubleSpec.html',
'http://developer.mozilla.org/en/JSConstDoubleSpec'),
url_test('/js/spidermonkey/apidoc/gen/api-JSErrorReport.html',
'http://developer.mozilla.org/en/JSErrorReport'),
url_test('/js/spidermonkey/apidoc/gen/api-JSFUN_BOUND_METHOD.html',
'http://developer.mozilla.org/en/JSFUN_BOUND_METHOD'),
url_test('/js/spidermonkey/apidoc/gen/api-JSFUN_GLOBAL_PARENT.html',
'http://developer.mozilla.org/en/JSFUN_GLOBAL_PARENT'),
url_test('/js/spidermonkey/apidoc/gen/api-JSFunctionSpec.html',
'http://developer.mozilla.org/en/JSFunctionSpec'),
url_test('/js/spidermonkey/apidoc/gen/api-JSIdArray.html',
'http://developer.mozilla.org/en/JSIdArray'),
url_test('/js/spidermonkey/apidoc/gen/api-JSObjectOps.html',
'http://developer.mozilla.org/en/JSObjectOps'),
url_test('/js/spidermonkey/apidoc/gen/api-JSPRINCIPALS_DROP.html',
'http://developer.mozilla.org/en/JSPRINCIPALS_DROP'),
url_test('/js/spidermonkey/apidoc/gen/api-JSPRINCIPALS_HOLD.html',
'http://developer.mozilla.org/en/JSPRINCIPALS_HOLD'),
url_test('/js/spidermonkey/apidoc/gen/api-JSPROP_ENUMERATE.html',
'http://developer.mozilla.org/en/JSPROP_ENUMERATE'),
url_test('/js/spidermonkey/apidoc/gen/api-JSPROP_EXPORTED.html',
'http://developer.mozilla.org/en/JSPROP_EXPORTED'),
url_test('/js/spidermonkey/apidoc/gen/api-JSPROP_INDEX.html',
'http://developer.mozilla.org/en/JSPROP_INDEX'),
url_test('/js/spidermonkey/apidoc/gen/api-JSPROP_PERMANENT.html',
'http://developer.mozilla.org/en/JSPROP_PERMANENT'),
url_test('/js/spidermonkey/apidoc/gen/api-JSPROP_READONLY.html',
'http://developer.mozilla.org/en/JSPROP_READONLY'),
url_test('/js/spidermonkey/apidoc/gen/api-JSPrincipals.html',
'http://developer.mozilla.org/en/JSPrincipals'),
url_test('/js/spidermonkey/apidoc/gen/api-JSProperty.html',
'http://developer.mozilla.org/en/JSProperty'),
url_test('/js/spidermonkey/apidoc/gen/api-JSPropertySpec.html',
'http://developer.mozilla.org/en/JSPropertySpec'),
url_test('/js/spidermonkey/apidoc/gen/api-JSRESOLVE_ASSIGNING.html',
'http://developer.mozilla.org/en/JSRESOLVE_ASSIGNING'),
url_test('/js/spidermonkey/apidoc/gen/api-JSRESOLVE_QUALIFIED.html',
'http://developer.mozilla.org/en/JSRESOLVE_QUALIFIED'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_FALSE.html',
'http://developer.mozilla.org/en/JSVAL_FALSE'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_IS_BOOLEAN.html',
'http://developer.mozilla.org/en/JSVAL_IS_BOOLEAN'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_IS_DOUBLE.html',
'http://developer.mozilla.org/en/JSVAL_IS_DOUBLE'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_IS_GCTHING.html',
'http://developer.mozilla.org/en/JSVAL_IS_GCTHING'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_IS_INT.html',
'http://developer.mozilla.org/en/JSVAL_IS_INT'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_IS_NULL.html',
'http://developer.mozilla.org/en/JSVAL_IS_NULL'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_IS_NUMBER.html',
'http://developer.mozilla.org/en/JSVAL_IS_NUMBER'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_IS_OBJECT.html',
'http://developer.mozilla.org/en/JSVAL_IS_OBJECT'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_IS_PRIMITIVE.html',
'http://developer.mozilla.org/en/JSVAL_IS_PRIMITIVE'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_IS_STRING.html',
'http://developer.mozilla.org/en/JSVAL_IS_STRING'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_IS_VOID.html',
'http://developer.mozilla.org/en/JSVAL_IS_VOID'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_LOCK.html',
'http://developer.mozilla.org/en/JSVAL_LOCK'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_NULL.html',
'http://developer.mozilla.org/en/JSVAL_NULL'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_ONE.html',
'http://developer.mozilla.org/en/JSVAL_ONE'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_TO_BOOLEAN.html',
'http://developer.mozilla.org/en/JSVAL_TO_BOOLEAN'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_TO_DOUBLE.html',
'http://developer.mozilla.org/en/JSVAL_TO_DOUBLE'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_TO_GCTHING.html',
'http://developer.mozilla.org/en/JSVAL_TO_GCTHING'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_TO_INT.html',
'http://developer.mozilla.org/en/JSVAL_TO_INT'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_TO_OBJECT.html',
'http://developer.mozilla.org/en/JSVAL_TO_OBJECT'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_TO_PRIVATE.html',
'http://developer.mozilla.org/en/JSVAL_TO_PRIVATE'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_TO_STRING.html',
'http://developer.mozilla.org/en/JSVAL_TO_STRING'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_TRUE.html',
'http://developer.mozilla.org/en/JSVAL_TRUE'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_UNLOCK.html',
'http://developer.mozilla.org/en/JSVAL_UNLOCK'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_VOID.html',
'http://developer.mozilla.org/en/JSVAL_VOID'),
url_test('/js/spidermonkey/apidoc/gen/api-JSVAL_ZERO.html',
'http://developer.mozilla.org/en/JSVAL_ZERO'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_AddNamedRoot.html',
'http://developer.mozilla.org/en/JS_AddNamedRoot'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_AddRoot.html',
'http://developer.mozilla.org/en/JS_AddRoot'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_AliasElement.html',
'http://developer.mozilla.org/en/JS_AliasElement'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_AliasProperty.html',
'http://developer.mozilla.org/en/JS_AliasProperty'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_BeginRequest.html',
'http://developer.mozilla.org/en/JS_BeginRequest'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CallFunction.html',
'http://developer.mozilla.org/en/JS_CallFunction'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CallFunctionName.html',
'http://developer.mozilla.org/en/JS_CallFunctionName'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CallFunctionValue.html',
'http://developer.mozilla.org/en/JS_CallFunctionValue'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CheckAccess.html',
'http://developer.mozilla.org/en/JS_CheckAccess'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ClearContextThread.html',
'http://developer.mozilla.org/en/JS_ClearContextThread'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ClearScope.html',
'http://developer.mozilla.org/en/JS_ClearScope'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CloneFunctionObject.html',
'http://developer.mozilla.org/en/JS_CloneFunctionObject'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CompareStrings.html',
'http://developer.mozilla.org/en/JS_CompareStrings'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CompileFile.html',
'http://developer.mozilla.org/en/JS_CompileFile'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CompileFunction.html',
'http://developer.mozilla.org/en/JS_CompileFunction'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CompileFunctionForPrincipals.html',
'http://developer.mozilla.org/en/JS_CompileFunctionForPrincipals'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CompileScript.html',
'http://developer.mozilla.org/en/JS_CompileScript'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CompileScriptForPrincipals.html',
'http://developer.mozilla.org/en/JS_CompileScriptForPrincipals'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CompileUCFunction.html',
'http://developer.mozilla.org/en/JS_CompileUCFunction'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CompileUCFunctionForPrincipals.html',
'http://developer.mozilla.org/en/JS_CompileUCFunctionForPrincipals'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CompileUCScript.html',
'http://developer.mozilla.org/en/JS_CompileUCScript'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_CompileUCScriptForPrincipals.html',
'http://developer.mozilla.org/en/JS_CompileUCScriptForPrincipals'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ConstructObject.html',
'http://developer.mozilla.org/en/JS_ConstructObject'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ContextIterator.html',
'http://developer.mozilla.org/en/JS_ContextIterator'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ConvertArguments.html',
'http://developer.mozilla.org/en/JS_ConvertArguments'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ConvertStub.html',
'http://developer.mozilla.org/en/JS_ConvertStub'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ConvertValue.html',
'http://developer.mozilla.org/en/JS_ConvertValue'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DecompileFunction.html',
'http://developer.mozilla.org/en/JS_DecompileFunction'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DecompileFunctionBody.html',
'http://developer.mozilla.org/en/JS_DecompileFunctionBody'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DecompileScript.html',
'http://developer.mozilla.org/en/JS_DecompileScript'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DefineConstDoubles.html',
'http://developer.mozilla.org/en/JS_DefineConstDoubles'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DefineElement.html',
'http://developer.mozilla.org/en/JS_DefineElement'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DefineFunction.html',
'http://developer.mozilla.org/en/JS_DefineFunction'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DefineFunctions.html',
'http://developer.mozilla.org/en/JS_DefineFunctions'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DefineObject.html',
'http://developer.mozilla.org/en/JS_DefineObject'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DefineProperties.html',
'http://developer.mozilla.org/en/JS_DefineProperties'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DefineProperty.html',
'http://developer.mozilla.org/en/JS_DefineProperty'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DefinePropertyWithTinyId.html',
'http://developer.mozilla.org/en/JS_DefinePropertyWithTinyId'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DefineUCProperty.html',
'http://developer.mozilla.org/en/JS_DefineUCProperty'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DefineUCPropertyWithTinyID.html',
'http://developer.mozilla.org/en/JS_DefineUCPropertyWithTinyID'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DeleteElement.html',
'http://developer.mozilla.org/en/JS_DeleteElement'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DeleteElement2.html',
'http://developer.mozilla.org/en/JS_DeleteElement2'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DeleteProperty.html',
'http://developer.mozilla.org/en/JS_DeleteProperty'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DeleteProperty2.html',
'http://developer.mozilla.org/en/JS_DeleteProperty2'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DeleteUCProperty2.html',
'http://developer.mozilla.org/en/JS_DeleteUCProperty2'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DestroyContext.html',
'http://developer.mozilla.org/en/JS_DestroyContext'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DestroyIdArray.html',
'http://developer.mozilla.org/en/JS_DestroyIdArray'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DestroyRuntime.html',
'http://developer.mozilla.org/en/JS_DestroyRuntime'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DestroyScript.html',
'http://developer.mozilla.org/en/JS_DestroyScript'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_DumpNamedRoots.html',
'http://developer.mozilla.org/en/JS_DumpNamedRoots'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_EndRequest.html',
'http://developer.mozilla.org/en/JS_EndRequest'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_Enumerate.html',
'http://developer.mozilla.org/en/JS_Enumerate'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_EnumerateStub.html',
'http://developer.mozilla.org/en/JS_EnumerateStub'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_EvaluateScript.html',
'http://developer.mozilla.org/en/JS_EvaluateScript'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_EvaluateScriptForPrincipals.html',
'http://developer.mozilla.org/en/JS_EvaluateScriptForPrincipals'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_EvaluateUCScript.html',
'http://developer.mozilla.org/en/JS_EvaluateUCScript'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_EvaluateUCScriptForPrincipals.html',
'http://developer.mozilla.org/en/JS_EvaluateUCScriptForPrincipals'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ExecuteScript.html',
'http://developer.mozilla.org/en/JS_ExecuteScript'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_FinalizeStub.html',
'http://developer.mozilla.org/en/JS_FinalizeStub'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_Finish.html',
'http://developer.mozilla.org/en/JS_Finish'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GC.html',
'http://developer.mozilla.org/en/JS_GC'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetArrayLength.html',
'http://developer.mozilla.org/en/JS_GetArrayLength'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetClass.html',
'http://developer.mozilla.org/en/JS_GetClass'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetConstructor.html',
'http://developer.mozilla.org/en/JS_GetConstructor'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetContextPrivate.html',
'http://developer.mozilla.org/en/JS_GetContextPrivate'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetContextThread.html',
'http://developer.mozilla.org/en/JS_GetContextThread'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetElement.html',
'http://developer.mozilla.org/en/JS_GetElement'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetEmptyStringValue.html',
'http://developer.mozilla.org/en/JS_GetEmptyStringValue'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetFunctionName.html',
'http://developer.mozilla.org/en/JS_GetFunctionName'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetFunctionObject.html',
'http://developer.mozilla.org/en/JS_GetFunctionObject'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetGlobalObject.html',
'http://developer.mozilla.org/en/JS_GetGlobalObject'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetImplementationVersion.html',
'http://developer.mozilla.org/en/JS_GetImplementationVersion'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetInstancePrivate.html',
'http://developer.mozilla.org/en/JS_GetInstancePrivate'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetNaNValue.html',
'http://developer.mozilla.org/en/JS_GetNaNValue'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetNegativeInfinityValue.html',
'http://developer.mozilla.org/en/JS_GetNegativeInfinityValue'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetParent.html',
'http://developer.mozilla.org/en/JS_GetParent'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetPositiveInfinityValue.html',
'http://developer.mozilla.org/en/JS_GetPositiveInfinityValue'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetPrivate.html',
'http://developer.mozilla.org/en/JS_GetPrivate'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetProperty.html',
'http://developer.mozilla.org/en/JS_GetProperty'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetPropertyAttributes.html',
'http://developer.mozilla.org/en/JS_GetPropertyAttributes'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetPrototype.html',
'http://developer.mozilla.org/en/JS_GetPrototype'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetRuntime.html',
'http://developer.mozilla.org/en/JS_GetRuntime'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetScopeChain.html',
'http://developer.mozilla.org/en/JS_GetScopeChain'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetStringBytes.html',
'http://developer.mozilla.org/en/JS_GetStringBytes'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetStringChars.html',
'http://developer.mozilla.org/en/JS_GetStringChars'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetStringLength.html',
'http://developer.mozilla.org/en/JS_GetStringLength'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetTypeName.html',
'http://developer.mozilla.org/en/JS_GetTypeName'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetUCProperty.html',
'http://developer.mozilla.org/en/JS_GetUCProperty'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_GetVersion.html',
'http://developer.mozilla.org/en/JS_GetVersion'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_HasArrayLength.html',
'http://developer.mozilla.org/en/JS_HasArrayLength'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_IdToValue.html',
'http://developer.mozilla.org/en/JS_IdToValue'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_Init.html',
'http://developer.mozilla.org/en/JS_Init'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_InitClass.html',
'http://developer.mozilla.org/en/JS_InitClass'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_InitStandardClasses.html',
'http://developer.mozilla.org/en/JS_InitStandardClasses'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_InstanceOf.html',
'http://developer.mozilla.org/en/JS_InstanceOf'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_InternString.html',
'http://developer.mozilla.org/en/JS_InternString'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_InternUCString.html',
'http://developer.mozilla.org/en/JS_InternUCString'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_InternUCStringN.html',
'http://developer.mozilla.org/en/JS_InternUCStringN'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_IsArrayObject.html',
'http://developer.mozilla.org/en/JS_IsArrayObject'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_IsConstructing.html',
'http://developer.mozilla.org/en/JS_IsConstructing'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_IsRunning.html',
'http://developer.mozilla.org/en/JS_IsRunning'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_Lock.html',
'http://developer.mozilla.org/en/JS_Lock'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_LockGCThing.html',
'http://developer.mozilla.org/en/JS_LockGCThing'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_LookupElement.html',
'http://developer.mozilla.org/en/JS_LookupElement'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_LookupProperty.html',
'http://developer.mozilla.org/en/JS_LookupProperty'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_LookupUCProperty.html',
'http://developer.mozilla.org/en/JS_LookupUCProperty'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_MaybeGC.html',
'http://developer.mozilla.org/en/JS_MaybeGC'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewArrayObject.html',
'http://developer.mozilla.org/en/JS_NewArrayObject'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewContext.html',
'http://developer.mozilla.org/en/JS_NewContext'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewDouble.html',
'http://developer.mozilla.org/en/JS_NewDouble'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewDoubleValue.html',
'http://developer.mozilla.org/en/JS_NewDoubleValue'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewFunction.html',
'http://developer.mozilla.org/en/JS_NewFunction'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewIdArray.html',
'http://developer.mozilla.org/en/JS_NewIdArray'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewNumberValue.html',
'http://developer.mozilla.org/en/JS_NewNumberValue'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewObject.html',
'http://developer.mozilla.org/en/JS_NewObject'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewRuntime.html',
'http://developer.mozilla.org/en/JS_NewRuntime'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewScriptObject.html',
'http://developer.mozilla.org/en/JS_NewScriptObject'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewString.html',
'http://developer.mozilla.org/en/JS_NewString'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewStringCopyN.html',
'http://developer.mozilla.org/en/JS_NewStringCopyN'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewStringCopyZ.html',
'http://developer.mozilla.org/en/JS_NewStringCopyZ'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewUCString.html',
'http://developer.mozilla.org/en/JS_NewUCString'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewUCStringCopyN.html',
'http://developer.mozilla.org/en/JS_NewUCStringCopyN'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_NewUCStringCopyZ.html',
'http://developer.mozilla.org/en/JS_NewUCStringCopyZ'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_PropertyStub.html',
'http://developer.mozilla.org/en/JS_PropertyStub'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_RemoveRoot.html',
'http://developer.mozilla.org/en/JS_RemoveRoot'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ReportError.html',
'http://developer.mozilla.org/en/JS_ReportError'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ReportOutOfMemory.html',
'http://developer.mozilla.org/en/JS_ReportOutOfMemory'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ResolveStub.html',
'http://developer.mozilla.org/en/JS_ResolveStub'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ResumeRequest.html',
'http://developer.mozilla.org/en/JS_ResumeRequest'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetArrayLength.html',
'http://developer.mozilla.org/en/JS_SetArrayLength'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetBranchCallback.html',
'http://developer.mozilla.org/en/JS_SetBranchCallback'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetContextPrivate.html',
'http://developer.mozilla.org/en/JS_SetContextPrivate'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetContextThread.html',
'http://developer.mozilla.org/en/JS_SetContextThread'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetElement.html',
'http://developer.mozilla.org/en/JS_SetElement'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetErrorReporter.html',
'http://developer.mozilla.org/en/JS_SetErrorReporter'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetGCCallback.html',
'http://developer.mozilla.org/en/JS_SetGCCallback'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetGlobalObject.html',
'http://developer.mozilla.org/en/JS_SetGlobalObject'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetParent.html',
'http://developer.mozilla.org/en/JS_SetParent'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetPrivate.html',
'http://developer.mozilla.org/en/JS_SetPrivate'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetProperty.html',
'http://developer.mozilla.org/en/JS_SetProperty'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetPropertyAttributes.html',
'http://developer.mozilla.org/en/JS_SetPropertyAttributes'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetPrototype.html',
'http://developer.mozilla.org/en/JS_SetPrototype'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetUCProperty.html',
'http://developer.mozilla.org/en/JS_SetUCProperty'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SetVersion.html',
'http://developer.mozilla.org/en/JS_SetVersion'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_SuspendRequest.html',
'http://developer.mozilla.org/en/JS_SuspendRequest'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_TypeOfValue.html',
'http://developer.mozilla.org/en/JS_TypeOfValue'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_Unlock.html',
'http://developer.mozilla.org/en/JS_Unlock'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_UnlockGCThing.html',
'http://developer.mozilla.org/en/JS_UnlockGCThing'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ValueToBoolean.html',
'http://developer.mozilla.org/en/JS_ValueToBoolean'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ValueToECMAInt32.html',
'http://developer.mozilla.org/en/JS_ValueToECMAInt32'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ValueToECMAUint32.html',
'http://developer.mozilla.org/en/JS_ValueToECMAUint32'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ValueToFunction.html',
'http://developer.mozilla.org/en/JS_ValueToFunction'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ValueToId.html',
'http://developer.mozilla.org/en/JS_ValueToId'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ValueToInt32.html',
'http://developer.mozilla.org/en/JS_ValueToInt32'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ValueToNumber.html',
'http://developer.mozilla.org/en/JS_ValueToNumber'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ValueToObject.html',
'http://developer.mozilla.org/en/JS_ValueToObject'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ValueToString.html',
'http://developer.mozilla.org/en/JS_ValueToString'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_ValueToUint16.html',
'http://developer.mozilla.org/en/JS_ValueToUint16'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_free.html',
'http://developer.mozilla.org/en/JS_free'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_malloc.html',
'http://developer.mozilla.org/en/JS_malloc'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_realloc.html',
'http://developer.mozilla.org/en/JS_realloc'),
url_test('/js/spidermonkey/apidoc/gen/api-JS_strdup.html',
'http://developer.mozilla.org/en/JS_strdup'),
url_test('/js/spidermonkey/apidoc/gen/api-OBJECT_TO_JSVAL.html',
'http://developer.mozilla.org/en/OBJECT_TO_JSVAL'),
url_test('/js/spidermonkey/apidoc/gen/api-PRIVATE_TO_JSVAL.html',
'http://developer.mozilla.org/en/PRIVATE_TO_JSVAL'),
url_test('/js/spidermonkey/apidoc/gen/api-STRING_TO_JSVAL.html',
'http://developer.mozilla.org/en/STRING_TO_JSVAL'),
url_test('/js/spidermonkey/apidoc/gen/complete-toc-abc.html',
'http://developer.mozilla.org/en/JSAPI_Reference'),
url_test('/js/spidermonkey/apidoc/gen/complete-toc-grp.html',
'http://developer.mozilla.org/en/JSAPI_Reference'),
url_test('/js/spidermonkey/apidoc/gen/complete-toc.html',
'http://developer.mozilla.org/en/JSAPI_Reference'),
url_test('/js/spidermonkey/apidoc/gen/complete.html',
'http://developer.mozilla.org/en/JSAPI_Reference'),
url_test('/js/spidermonkey/apidoc/gen/sidebar-toc.html',
'http://developer.mozilla.org/en/JSAPI_Reference'),
url_test('/js/spidermonkey/apidoc/gen/sparse-toc-abc.html',
'http://developer.mozilla.org/en/JSAPI_Reference'),
url_test('/js/spidermonkey/apidoc/gen/sparse-toc-grp.html',
'http://developer.mozilla.org/en/JSAPI_Reference'),
url_test('/js/spidermonkey/apidoc/gen/sparse-toc.html',
'http://developer.mozilla.org/en/JSAPI_Reference'),
url_test('/js/spidermonkey/gctips.html',
'http://developer.mozilla.org/en/SpiderMonkey_Garbage_Collection_Tips'),
url_test('/mailman', 'https://mail.mozilla.org'),
url_test('/mailnews/ABSyncClientDesign.html',
'https://developer.mozilla.org/en/Thunderbird/Address_book_sync_client_design'),
url_test('/mailnews/arch/ABSyncClientDesign.html',
'https://developer.mozilla.org/en/Thunderbird/Address_book_sync_client_design'),
url_test('/mailnews/arch/accountmanager.html',
'https://developer.mozilla.org/en/Thunderbird/Using_the_Multiple_Accounts_API'),
url_test('/mailnews/arch/addrbook/hiddenprefs.html',
'https://developer.mozilla.org/en/Thunderbird/Hidden_address_book_prefs'),
url_test('/mailnews/arch/compose-backend.html',
'https://developer.mozilla.org/en/Thunderbird/Mail_composition_back_end'),
url_test('/mailnews/arch/compose/cached.html',
'https://developer.mozilla.org/en/Thunderbird/Cached_compose_window_FAQ'),
url_test('/mailnews/arch/compose/hiddenprefs.html',
'https://developer.mozilla.org/en/Thunderbird/Hidden_prefs'),
url_test('/mailnews/arch/events.html',
'https://developer.mozilla.org/en/Thunderbird/Mail_event_system'),
url_test('/mailnews/arch/hiddenprefs.html',
'https://developer.mozilla.org/en/Thunderbird/Hidden_prefs'),
url_test('/mailnews/arch/libmime-content-type-handlers.html',
'https://developer.mozilla.org/en/Thunderbird/libmime_content_type_handlers'),
url_test('/mailnews/arch/libmime-description.html',
'https://developer.mozilla.org/en/Thunderbird/The_libmime_module'),
url_test('/mailnews/arch/overview.html',
'https://developer.mozilla.org/en/Thunderbird/Mail_client_architecture_overview'),
url_test('/mailnews/arch/rdf.html',
'https://developer.mozilla.org/en/Thunderbird/Mail_and_RDF'),
url_test('/mailnews/arch/spam/',
'https://developer.mozilla.org/en/Thunderbird/Spam_filtering'),
url_test('/mailnews/compose-backend.html',
'https://developer.mozilla.org/en/Thunderbird/Mail_composition_back_end'),
url_test('/mailnews/libmime-content-type-handlers.html',
'https://developer.mozilla.org/en/Thunderbird/libmime_content_type_handlers'),
url_test('/mailnews/libmime-description.html',
'https://developer.mozilla.org/en/Thunderbird/The_libmime_module'),
url_test('/mailnews/review-mail.html',
'https://developer.mozilla.org/en/Mailnews_and_Mail_code_review_requirements'),
url_test('/mailnews/review.html',
'https://developer.mozilla.org/en/Mailnews_and_Mail_code_review_requirements'),
url_test('/mirroring.html', 'http://www-archive.mozilla.org/mirroring.html'),
url_test('/mirrors.html', 'http://www-archive.mozilla.org/mirrors.html'),
url_test('/mission.html', '/mission/'),
url_test('/mozilla1.x', '/firefox/'),
url_test('/my-mozilla.html', '/'),
url_test('/newlayout/bugathon.html', 'http://developer.mozilla.org/en/Gecko_BugAThon'),
url_test('/newlayout/codestock', '/docs/codestock99'),
url_test('/newlayout/codestock/slides.html', '/docs/codestock99/'),
url_test('/newlayout/faq.html', 'http://developer.mozilla.org/en/Gecko_FAQ'),
url_test('/newlayout/glossary.html', 'https://developer.mozilla.org/en/Gecko_Glossary'),
url_test('/newlayout/', 'http://developer.mozilla.org/en/Gecko'),
url_test('/newlayout/regress.html',
'/newlayout/doc/regression_tests.html'),
url_test('/newlayout/xml/', 'http://developer.mozilla.org/en/XML_in_Mozilla'),
url_test('/newsfeeds.html', '/about/forums/'),
url_test('/nglayout', 'https://developer.mozilla.org/en/Gecko'),
url_test('/NPL', '/MPL/NPL/1.1/'),
url_test('/old-roadmap.html', 'https://wiki.mozilla.org/Roadmap_Scratchpad'),
url_test('/other-projects.html', '/projects/other-projects.html'),
url_test('/owners-js.html', 'https://wiki.mozilla.org/Modules'),
url_test('/owners.html', 'https://wiki.mozilla.org/Modules'),
url_test('/performance/tinderbox-tests.html',
'http://wiki.mozilla.org/Performance:Tinderbox_Tests'),
url_test('/performance/leak-brownbag.html',
'http://wiki.mozilla.org/Performance:Leak_Tools'),
url_test('/privacy-policy{,.html}', '/privacy/websites/'),
url_test('/products/camino/badges/', 'http://caminobrowser.org/community/promotion/'),
url_test('/products/camino/features/searchCustomization.html',
'http://caminobrowser.org/help/'),
url_test('/products/camino/features/tipsTricks.html', 'http://caminobrowser.org/help/'),
url_test('/products/camino/', 'http://caminobrowser.org/'),
url_test('/products/camino/releases/0.8.1.html',
'http://caminobrowser.org/releases/0.8.1/'),
url_test('/products/camino/releases/0.8.2.html',
'http://caminobrowser.org/releases/0.8.2/'),
url_test('/products/camino/releases/0.8.3.html',
'http://caminobrowser.org/releases/0.8.3/'),
url_test('/products/camino/releases/0.8.4.html',
'http://caminobrowser.org/releases/0.8.4/'),
url_test('/products/camino/releases/0.8.5.html',
'http://caminobrowser.org/releases/0.8.5/'),
url_test('/products/camino/releases/0.8.html', 'http://caminobrowser.org/releases/0.8/'),
url_test('/products/camino/releases/0.8b.html', 'http://caminobrowser.org/releases/0.8b/'),
url_test('/products/camino/releases/0.9a1.html',
'http://caminobrowser.org/releases/0.9a1/'),
url_test('/products/camino/releases/0.9a2.html',
'http://caminobrowser.org/releases/0.9a2/'),
url_test('/products/camino/releases/1.0.1.html',
'http://caminobrowser.org/releases/1.0.1/'),
url_test('/products/camino/releases/1.0.2.html',
'http://caminobrowser.org/releases/1.0.2/'),
url_test('/products/camino/releases/1.0.3.html',
'http://caminobrowser.org/releases/1.0.3/'),
url_test('/products/camino/releases/1.0.4.html',
'http://caminobrowser.org/releases/1.0.4/'),
url_test('/products/camino/releases/1.0.5.html',
'http://caminobrowser.org/releases/1.0.5/'),
url_test('/products/camino/releases/1.0.6.html',
'http://caminobrowser.org/releases/1.0.6/'),
url_test('/products/camino/releases/1.0.html', 'http://caminobrowser.org/releases/1.0/'),
url_test('/products/camino/releases/1.0a1.html',
'http://caminobrowser.org/releases/1.0a1/'),
url_test('/products/camino/releases/1.0b1.html',
'http://caminobrowser.org/releases/1.0b1/'),
url_test('/products/camino/releases/1.0b2.html',
'http://caminobrowser.org/releases/1.0b2/'),
url_test('/products/camino/releases/1.0rc1.html',
'http://caminobrowser.org/releases/1.0rc1/'),
url_test('/products/camino/support/', 'http://caminobrowser.org/help/'),
url_test('/products/choosing-products.html', '/projects/'),
url_test('/projects/embedding/',
'https://developer.mozilla.org/docs/Gecko/Embedding_Mozilla'),
url_test('/projects/embedding/GRE.html',
'https://developer.mozilla.org/docs/Archive/Mozilla/GRE'),
url_test('/projects/embedding/windowAPIs.html',
'https://developer.mozilla.org/docs/Mozilla/Tech/Embedded_Dialog_API'),
url_test('/projects/embedding/howto/config.html',
'https://developer.mozilla.org/docs/Gecko/Embedding_Mozilla/Roll_your_own_browser'),
url_test('/projects/embedding/howto/Initializations.html',
'https://developer.mozilla.org/docs/Gecko/Embedding_Mozilla/Roll_your_own_browser'),
url_test('/projects/embedding/embedoverview/EmbeddingBasicsTOC.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics#toc'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics2.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics#Why_Gecko'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics3.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics'
'#What_You_Need_to_Embed'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics4.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics'
'#Getting_the_Code'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics5.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics'
'#Understanding_the_Coding_Environment'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics6.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics#XPCOM'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics7.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics#XPIDL'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics8.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics'
'#XPConnect_and_XPT_files'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics9.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics'
'#String_classes'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics10.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics#XUL.2FXBL'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics11.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics'
'#Choosing_Additional_Functionalities'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics12.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics'
'#What_Gecko_Provides'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics13.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics'
'#What_You_Provide'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics14.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics'
'#Common_Embedding_Tasks'),
url_test('/projects/embedding/embedoverview/EmbeddingBasics16.html',
'https://developer.mozilla.org/docs/Mozilla/Gecko/Gecko_Embedding_Basics'
'#Appendix:_Data_Flow_Inside_Gecko'),
url_test('/projects/embedding/examples/',
'https://developer.mozilla.org/docs/Gecko/Embedding_Mozilla/Roll_your_own_browser'),
url_test('/products/firebird', '/firefox/'),
url_test('/products/firebird/download/', '/firefox/new/'),
url_test('/products/firefox/add-engines.html',
'https://addons.mozilla.org/search-engines.php'),
url_test('/products/firefox/all', '/firefox/all/'),
url_test('/products/firefox/all.html', '/firefox/all/'),
url_test('/products/firefox/banners.html', '/contribute/friends/'),
url_test('/products/firefox/buttons.html', '/contribute/friends/'),
url_test('/products/firefox/download', '/firefox/new/'),
url_test('/products/firefox/download.html', '/firefox/new/'),
url_test('/products/firefox/get', '/firefox/new/'),
url_test('/products/firefox/', '/firefox/'),
url_test('/products/firefox/live-bookmarks', '/firefox/features/'),
url_test('/products/firefox/live-bookmarks.html', '/firefox/features/'),
url_test('/products/firefox/mirrors.html', 'http://www-archive.mozilla.org/mirrors.html'),
url_test('/products/firefox/releases/', '/firefox/releases/'),
url_test('/products/firefox/releases/0.9.2.html',
'http://website-archive.mozilla.org/www.mozilla.org/firefox_releasenotes'
'/en-US/firefox/releases/0.9.1.html'),
url_test('/products/firefox/releases/0.10.1.html',
'http://website-archive.mozilla.org/www.mozilla.org/firefox_releasenotes'
'/en-US/firefox/releases/0.10.html'),
url_test('/products/firefox/search', '/firefox/features/'),
url_test('/products/firefox/search.html', '/firefox/features/'),
url_test('/products/firefox/shelf.html', 'https://blog.mozilla.org/press/awards/'),
url_test('/products/firefox/smart-keywords.html',
'https://support.mozilla.org/en-US/kb/Smart+keywords'),
url_test('/products/firefox/support/', 'https://support.mozilla.org/'),
url_test('/products/firefox/switch', '/firefox/new/'),
url_test('/products/firefox/system-requirements.html', '/firefox/system-requirements/'),
url_test('/products/firefox/tabbed-browsing.html', '/firefox/'),
url_test('/products/firefox/text-zoom.html',
'https://support.mozilla.org/kb/font-size-and-zoom-increase-size-of-web-pages'),
url_test('/products/firefox/themes', 'https://addons.mozilla.org/themes/'),
url_test('/products/firefox/themes.html', 'https://addons.mozilla.org/themes/'),
url_test('/products/firefox/ui-customize.html',
'https://support.mozilla.org/kb/customize-firefox-controls-buttons-and-toolbars'),
url_test('/products/firefox/upgrade/', '/firefox/new/'),
url_test('/products/firefox/why/', '/firefox/'),
url_test('/products/thunderbird/all-beta.html', '/thunderbird/all/'),
url_test('/products/thunderbird/all.html', '/thunderbird/all/'),
url_test('/products/thunderbird/global-inbox.html',
'http://kb.mozillazine.org/Global_Inbox'),
url_test('/products/thunderbird/', '/thunderbird/'),
url_test('/products/thunderbird/junkmail.html',
'http://kb.mozillazine.org/Junk_Mail_Controls'),
url_test('/products/thunderbird/message-grouping.html',
'http://kb.mozillazine.org/Message_Grouping'),
url_test('/products/thunderbird/privacy-protection.html',
'http://kb.mozillazine.org/Privacy_basics_%28Thunderbird%29'),
url_test('/products/thunderbird/releases/', '/thunderbird/releases/'),
url_test('/products/thunderbird/releases/0.1-release-notes.html', '/thunderbird/releases/'),
url_test('/products/thunderbird/rss.html',
'http://kb.mozillazine.org/RSS_basics_%28Thunderbird%29'),
url_test('/products/thunderbird/search-folders.html',
'http://kb.mozillazine.org/Saved_Search'),
url_test('/products/thunderbird/sysreq.html', '/thunderbird/system-requirements/'),
url_test('/profilemanager/isp-rdf-info.txt', 'https://developer.mozilla.org/docs/Isp_Data'),
url_test('/projects.html', 'https://www.mozilla.org/projects/'),
url_test('/projects/browsers.html', '/firefox/new/'),
url_test('/projects/bugzilla', 'https://www.bugzilla.org'),
url_test('/projects/deerpark/', '/firefox/channel/desktop/'),
url_test('/projects/distros.html', '/projects/mozilla-based.html'),
url_test('/projects/embedding/faq.html',
'https://developer.mozilla.org/docs/Gecko/Embedding_Mozilla/FAQ/How_do_I...'),
url_test('/projects/firebird/0.1-release-notes.html',
'/products/firefox/releases/0.1.html'),
url_test('/projects/firebird/0.2-release-notes.html',
'/products/firefox/releases/0.2.html'),
url_test('/projects/firebird/0.3-release-notes.html',
'/products/firefox/releases/0.3.html'),
url_test('/projects/firebird/0.4-release-notes.html',
'/products/firefox/releases/0.4.html'),
url_test('/projects/firebird/0.5-release-notes.html',
'/products/firefox/releases/0.5.html'),
url_test('/projects/firebird/0.6-release-notes.html',
'/products/firefox/releases/0.6.html'),
url_test('/projects/firebird/0.6.1-release-notes.html',
'/products/firefox/releases/0.6.1.html'),
url_test('/projects/firebird/0.7-release-notes.html',
'/products/firefox/releases/0.7.html'),
url_test('/projects/firebird/0.7.1-release-notes.html',
'/products/firefox/releases/0.7.1.html'),
url_test('/projects/firebird/build.html',
'/projects/firefox/build.html'),
url_test('/projects/firebird/charter.html',
'/projects/firefox/charter.html'),
url_test('/projects/firebird/', '/projects/firefox/'),
url_test('/projects/firebird/installer/build.html',
'/projects/firefox/installer/build.html'),
url_test('/projects/firebird/qa/downloads.html',
'/projects/firefox/qa/downloads.html'),
url_test('/projects/firebird/qa/', '/projects/firefox/qa/'),
url_test('/projects/firebird/release-notes.html',
'/products/firefox/releases/'),
url_test('/projects/firebird/releases.html', 'http://texturizer.net/firebird/download.html'),
url_test('/projects/firebird/review.html',
'/projects/firefox/review.html'),
url_test('/projects/firebird/roadmap.html',
'/projects/firefox/roadmap.html'),
url_test('/projects/firebird/ue/downloads/',
'/projects/firefox/ue/downloads/'),
url_test('/projects/firebird/ue/', '/projects/firefox/ue/'),
url_test('/projects/firebird/ue/installer/',
'/projects/firefox/ue/installer/'),
url_test('/projects/firebird/ue/migration/',
'/projects/firefox/ue/migration/'),
url_test('/projects/firebird/ue/philosophy/realities.html',
'/projects/firefox/ue/philosophy/realities.html'),
url_test('/projects/firebird/why/', '/firefox/'),
url_test('/projects/firefox/extensions/em-changes.html',
'http://developer.mozilla.org/en/Enhanced_Extension_Installation'),
url_test('/projects/firefox/extensions/update.html',
'http://developer.mozilla.org/en/Extension_Versioning%2C_Update_and_Compatibility'),
url_test('/projects/firefox/l10n/', 'https://wiki.mozilla.org/L10n'),
url_test('/projects/firefox/l10n/installer-encodings.html',
'http://developer.mozilla.org/en/Encodings_for_localization_files'),
url_test('/projects/firefox/l10n/l10n-step-by-step.html', 'https://wiki.mozilla.org/L10n'),
url_test('/projects/firefox/l10n/localize-release.html', 'https://wiki.mozilla.org/L10n'),
url_test('/projects/firefox/l10n/using-cvs.html', 'https://wiki.mozilla.org/L10n'),
url_test('/projects/foundation/', '/foundation/'),
url_test('/projects/inspector/faq.html',
'https://developer.mozilla.org/docs/Tools/Add-ons/DOM_Inspector/DOM_Inspector_FAQ'),
url_test('/projects/intl/xul-how2l10n.html',
'/projects/l10n/mlp_status.html'),
url_test('/projects/intl/xul-l10n.html',
'/projects/l10n/xul-l10n.html'),
url_test('/projects/intl/xul-styleguide.html',
'/projects/l10n/xul-styleguide.html'),
url_test('/projects/intl/fonts.html', 'http://wiki.mozilla.org/Font_selection'),
url_test('/projects/l10n/customizable-code.html',
'https://developer.mozilla.org/en/Writing_localizable_code'),
url_test('/projects/l10n/mlp_docs.html',
'https://wiki.mozilla.org/L10n:Localization_Process'),
url_test('/projects/l10n/mlp_howto_Firefox.html',
'https://wiki.mozilla.org/L10n:Localization_Process'),
url_test('/projects/l10n/mlp_status.html',
'https://wiki.mozilla.org/L10n:Localization_Teams'),
url_test('/projects/l10n/mlp_tools.html', 'https://wiki.mozilla.org/L10n:Tools'),
url_test('/projects/list.html', '/projects/'),
url_test('/projects/marketing/{buttons,banners}.html',
'https://wiki.mozilla.org/MarketingGuide'),
url_test('/projects/mathml/authoring.html',
'https://developer.mozilla.org/en/Mozilla_MathML_Project/Authoring'),
url_test('/projects/mathml/update.html',
'https://developer.mozilla.org/en/Mozilla_MathML_Project/Status'),
url_test('/projects/minefield/releases/', '/firefox/channel/desktop/'),
url_test('/projects/mstone', 'http://mstone.sourceforge.net/'),
url_test('/projects/netlib/http/http-debugging.html',
'https://developer.mozilla.org/docs/Mozilla/Debugging/HTTP_logging'),
url_test('/projects/netlib/integrated-auth.html',
'https://developer.mozilla.org/docs/Mozilla/Integrated_authentication'),
url_test('/projects/netlib/Link_Prefetching_FAQ.html',
'https://developer.mozilla.org/docs/Web/HTTP/Link_prefetching_FAQ'),
url_test('/projects/other-projects.html',
'/projects/mozilla-based.html'),
url_test('/projects/phoenix/0.1-release-notes.html',
'/products/firefox/releases/0.1.html'),
url_test('/projects/phoenix/0.2-release-notes.html',
'/products/firefox/releases/0.2.html'),
url_test('/projects/phoenix/0.3-release-notes.html',
'/products/firefox/releases/0.3.html'),
url_test('/projects/phoenix/0.4-release-notes.html',
'/products/firefox/releases/0.5.html'),
url_test('/projects/phoenix/0.5-release-notes.html',
'/products/firefox/releases/0.5.html'),
url_test('/projects/phoenix/0.6-release-notes.html',
'/products/firefox/releases/0.6.html'),
url_test('/projects/phoenix/extensions/', 'http://texturizer.net/firefox/extensions/'),
url_test('/projects/phoenix/', '/projects/firefox/'),
url_test('/projects/phoenix/phoenix-advantages.html',
'/products/firefox/releases/'),
url_test('/projects/phoenix/phoenix-roadmap.html',
'/projects/firefox/roadmap.html'),
url_test('/projects/phoenix/releases.html', '/products/firefox/'),
url_test('/projects/phoenix/why/', '/firefox/'),
url_test('/projects/plugins/first-install-problem.html',
'https://developer.mozilla.org/Add-ons/Plugins/The_First_Install_Problem'),
url_test('/projects/plugins/install-scheme.html',
'https://developer.mozilla.org/docs/'
'Installing_plugins_to_Gecko_embedding_browsers_on_Windows'),
url_test('/projects/plugins/npruntime-sample-in-visual-studio.html',
'https://developer.mozilla.org/docs/'
'Compiling_The_npruntime_Sample_Plugin_in_Visual_Studio'),
url_test('/projects/plugins/npruntime.html',
'https://developer.mozilla.org/docs/Plugins/Guide/Scripting_plugins'),
url_test('/projects/plugins/plugin-host-control.html',
'https://developer.mozilla.org/docs/'
'Archive/Mozilla/ActiveX_Control_for_Hosting_Netscape_Plug-ins_in_IE'),
url_test('/projects/plugins/xembed-plugin-extension.html',
'https://developer.mozilla.org/Add-ons/Plugins/XEmbed_Extension_for_Mozilla_Plugins'),
url_test('/projects/security/components/same-origin.html',
'https://developer.mozilla.org/docs/Web/Security/Same-origin_policy'),
url_test('/projects/sunbird', '/projects/calendar/sunbird/'),
url_test('/projects/svg/', 'https://developer.mozilla.org/en/SVG'),
url_test('/projects/svg/status.html', 'https://developer.mozilla.org/en/Mozilla_SVG_Status'),
url_test('/projects/svg/build.html',
'https://developer.mozilla.org/en/Developer_Guide/Build_Instructions'),
url_test('/projects/thunderbird/', 'https://wiki.mozilla.org/Thunderbird:Home_Page'),
url_test('/projects/thunderbird/build.html',
'http://developer.mozilla.org/docs/Build_Documentation'),
url_test('/projects/ui/accessibility/', '/access/'),
url_test('/projects/ui/accessibility/access-today.html',
'/access/today'),
url_test('/projects/ui/accessibility/slides/moz_accslides.html',
'/access/slideshow/'),
url_test('/projects/ui/accessibility/slides', '/access/slideshow'),
url_test('/projects/ui/accessibility/moz_accslides_text_version.html',
'/access/slideshow/text'),
url_test('/projects/ui/accessibility/index-users.html',
'/access/users'),
url_test('/projects/ui/accessibility/Accessibility_Features_in_Mozilla.html',
'/access/features'),
url_test('/projects/ui/accessibility/index-procurement.html',
'/access/evaluators'),
url_test('/projects/ui/accessibility/index-authors.html',
'/access/authors'),
url_test('/projects/ui/accessibility/dynamic-accessibility.html',
'/access/dynamic-content'),
url_test('/projects/ui/accessibility/index-atvendors.html',
'/access/at-vendors'),
url_test('/projects/ui/accessibility/index-qa-ui.html', '/access/qa'),
url_test('/projects/ui/accessibility/index-frontend-coders.html',
'/access/ui-developers'),
url_test('/projects/ui/accessibility/index-core-hackers.html',
'/access/core-developers'),
url_test('/projects/ui/accessibility/index-external-developers.html',
'/access/external-developers'),
url_test('/projects/ui/accessibility/toolkit-checklist.html',
'/access/toolkit-checklist'),
url_test('/projects/ui/accessibility/msaa-server-impl.html',
'/access/windows/msaa-server'),
url_test('/projects/ui/accessibility/accessible-xul-authoring.html',
'/access/xul-guidelines'),
url_test('/projects/ui/accessibility/window-eyes-status.html',
'/access/windows/window-eyes'),
url_test('/projects/ui/accessibility/typeaheadfind.html',
'/access/type-ahead'),
url_test('/projects/ui/accessibility/planning-ahead-for-accessibility.html',
'/access/planning'),
url_test('/projects/ui/accessibility/zoomtext-status.html',
'/access/windows/zoomtext'),
url_test('/projects/ui/accessibility/keyboard-status.html',
'/access/keyboard/testing'),
url_test('/projects/ui/accessibility/vendors-win.html',
'/access/windows/at-apis'),
url_test('/projects/ui/accessibility/section508.html',
'/access/section508'),
url_test('/projects/ui/accessibility/w3c-uaag.html',
'/access/w3c-uaag'),
url_test('/projects/ui/accessibility/span-checkbox.html',
'/access/samples/span-checkbox.html'),
url_test('/projects/ui/accessibility/ISimpleDOMNode.idl',
'https://dxr.mozilla.org/seamonkey/source/'
'accessible/public/msaa/ISimpleDOMNode.idl?raw=1'),
url_test('/projects/ui/accessibility/ISimpleDOMText.idl',
'https://dxr.mozilla.org/seamonkey/source/'
'accessible/public/msaa/ISimpleDOMText.idl?raw=1'),
url_test('/projects/ui/accessibility/ISimpleDOMDocument.idl',
'https://dxr.mozilla.org/seamonkey/source/'
'accessible/public/msaa/ISimpleDOMDocument.idl?raw=1'),
url_test('/projects/ui/accessibility/accesskey.html',
'/access/keyboard/accesskey'),
url_test('/projects/ui/accessibility/mozkeyintro.html',
'/access/keyboard/'),
url_test('/projects/ui/accessibility/Javascript-nsIAccessible.html',
'/access/samples/js-nsIAccessible'),
url_test('/projects/ui/accessibility/Javascript-nsIAccessible.js',
'/access/samples/js-nsIAccessible.js'),
url_test('/projects/ui/accessibility/mozkeyplan.html',
'/access/keyboard/interactive'),
url_test('/projects/ui/accessibility/mozkeylist.html',
'/access/keyboard/mozilla'),
url_test('/projects/ui/accessibility/mozkeyboard.html',
'/access/keyboard/layout'),
url_test('/projects/ui/accessibility/Javascript-nsIAccessible-notes.html',
'/access/samples/js-nsIAccessible-notes'),
url_test('/projects/ui/accessibility/accessible-architecture.html',
'/access/architecture'),
url_test('/projects/ui/accessibility/accessible-events.html',
'/access/event-flow'),
url_test('/projects/ui/accessibility/cross-ref-apis.html',
'/access/platform-apis'),
url_test('/projects/ui/accessibility/resources.html',
'/access/resources'),
url_test('/projects/ui/accessibility/access-mozilla.png',
'/access/access-mozilla.png'),
url_test('/projects/ui/accessibility/powerbraille.jpg',
'/access/powerbraille.jpg'),
url_test('/projects/ui/accessibility/vpduo2.jpg',
'/access/vpduo2.jpg'),
url_test('/projects/ui/accessibility/qa/taf_acceptance.html',
'/access/type-ahead/basic'),
url_test('/projects/ui/accessibility/qa/taf_functional.html',
'/access/type-ahead/full'),
url_test('/projects/ui/accessibility/qa/taf_qa.html',
'/access/type-ahead/testing'),
url_test('/projects/ui/accessibility/tabindex.html',
'/access/keyboard/tabindex'),
url_test('/projects/ui/accessibility/embedaccess.html',
'/access/prefs-and-apis'),
url_test('/projects/ui/accessibility/accessible-hierarchy.html',
'/projects/accessibility/images/accessible-hierarchy.jpg'),
url_test('/projects/ui/accessibility/unix/', '/access/unix/'),
url_test('/projects/ui/accessibility/unix/faq.html',
'/access/unix/faq'),
url_test('/projects/ui/accessibility/unix/introduction.html',
'/access/unix/team/'),
url_test('/projects/ui/accessibility/unix/photos/',
'/access/unix/team/photos'),
url_test('/projects/ui/accessibility/unix/photos', '/access/unix/team'),
url_test('/projects/ui/accessibility/unix/architecture.html',
'/access/unix/architecture'),
url_test('/projects/xmlextras/', 'http://developer.mozilla.org/en/XML_Extras'),
url_test('/projects/xslt/js-interface.html',
'https://developer.mozilla.org/docs/'
'Web/XSLT/Using_the_Mozilla_JavaScript_interface_to_XSL_Transformations'),
url_test('/projects/xslt/faq.html',
'https://developer.mozilla.org/docs/'
'Web/API/XSLTProcessor/XSL_Transformations_in_Mozilla_FAQ'),
url_test('/projects/xslt/standalone.html',
'https://developer.mozilla.org/docs/'
'Archive/Mozilla/Building_TransforMiiX_standalone'),
url_test('/projects/xul/joy-of-xul.html',
'https://developer.mozilla.org/docs/Mozilla/Tech/XUL/The_Joy_of_XUL'),
url_test('/projects/xul/xre{,old}.html',
'https://developer.mozilla.org/docs/Archive/Mozilla/XULRunner'),
url_test('/raptor/', 'http://developer.mozilla.org/en/Gecko'),
url_test('/README-cvs.html', '/contribute/writing/cvs'),
url_test('/README-style.html', '/contribute/writing/guidelines'),
url_test('/rdf/50-words.html', 'http://developer.mozilla.org/en/RDF_in_Fifty_Words_or_Less'),
url_test('/rdf/doc/aggregate.html',
'http://developer.mozilla.org/en/Aggregating_the_In-Memory_Datasource'),
url_test('/rdf/doc/faq.html', 'http://developer.mozilla.org/en/RDF_in_Mozilla_FAQ'),
url_test('/releases/cvstags.html', 'http://developer.mozilla.org/en/CVS_Tags'),
url_test('/releases/faq.html', '/projects/'),
url_test('/releases/', '/projects/'),
url_test('/releases/mozilla1.8b', '/releases/mozilla1.8b1'),
url_test('/releases/stable.html', '/projects/'),
url_test('/report.html', 'http://bugzilla.mozilla.org/enter_bug.cgi?format=guided'),
url_test('/roadmap.html', 'https://wiki.mozilla.org/Roadmap_Scratchpad'),
url_test('/scriptable/agnostic.html',
'http://wiki.mozilla.org/Roadmap_for_language_agnostic_scripting_support'),
url_test('/scriptable/avoiding-leaks.html',
'http://developer.mozilla.org/en/Using_XPCOM_in_JavaScript_without_leaking'),
url_test('/scriptable/components_object.html',
'http://developer.mozilla.org/en/Components_object'),
url_test('/scriptable/XPCShell.html', 'https://developer.mozilla.org/en/XPCShell_Reference'),
url_test('/scriptable/xpjs-components.html',
'http://developer.mozilla.org/en/XPJS_Components_Proposal'),
url_test('/scriptable/xptcall-faq.html', 'http://developer.mozilla.org/en/xptcall_FAQ'),
url_test('/search.html', '/'),
url_test('/sitemap.html', '/'),
url_test('/source-code.html',
'http://developer.mozilla.org/en/Download_Mozilla_Source_Code'),
url_test('/source.html', 'http://developer.mozilla.org/en/Download_Mozilla_Source_Code'),
url_test('/status/minutes.html', 'https://wiki.mozilla.org/WeeklyUpdates'),
url_test('/store', 'https://store.mozilla.org'),
url_test('/testdrivers', 'http://wiki.mozilla.org/B2G_Testdrivers_Program'),
url_test('/tinderbox.html', 'http://developer.mozilla.org/en/Tinderbox'),
url_test('/tools.html', 'http://developer.mozilla.org/en/Mozilla_Development_Tools'),
url_test('/university/courses.html', 'http://education.mozilla.org'),
url_test('/university/courses/appdev1/overview.html', 'http://education.mozilla.org'),
url_test('/university/courses/appdev2/overview.html', 'http://education.mozilla.org'),
url_test('/university/demos/xslt/demo_topic1.xml', 'http://education.mozilla.org'),
url_test('/university/demos/xslt/demo_topic2.xml', 'http://education.mozilla.org'),
url_test('/university/demos/xslt/demo_topic3.xml', 'http://education.mozilla.org'),
url_test('/university/demos/xslt/demo_topic4.xml', 'http://education.mozilla.org'),
url_test('/university/demos/xslt/demo_topic5.xml', 'http://education.mozilla.org'),
url_test('/university/demos/xslt/demo.xsl', 'http://education.mozilla.org'),
url_test('/university/HOF.html', '/projects/mozilla-based.html'),
url_test('/university/hof.xml', '/projects/mozilla-based.html'),
url_test('/university/resource_map.html', 'http://education.mozilla.org'),
url_test('/university/scc_roadshow_outline.html', 'http://education.mozilla.org'),
url_test('/unix/debugging-faq.html',
'http://developer.mozilla.org/en/Debugging_Mozilla_on_Linux_FAQ'),
url_test('/unix/solaris-build.html',
'http://developer.mozilla.org/en/Mozilla_Build_FAQ#Unix-specific_questions'),
url_test('/unix/customizing.html', 'https://www-archive.mozilla.org/unix/customizing.html'),
url_test('/webtools', 'https://developer.mozilla.org/en/Mozilla_Development_Tools'),
url_test('/xmlextras/', 'http://developer.mozilla.org/en/XML_Extras'),
url_test('/xmlextras/xmldataislands/example1.html',
'http://developer.mozilla.org/@api/deki/files/2861/=example1.html'),
url_test('/xmlextras/xmldataislands/',
'http://developer.mozilla.org/en/Using_XML_Data_Islands_in_Mozilla'),
url_test('/xmlextras/xmldataislands/MXX_Info.html',
'http://developer.mozilla.org/@api/deki/files/2863/=MXX_Info_(1).html'),
url_test('/xmlextras/xmldataislands/table.html',
'http://developer.mozilla.org/@api/deki/files/2864/=table.html'),
url_test('/xpfe/goodcss.html', 'http://developer.mozilla.org/en/Writing_Efficient_CSS'),
url_test('/xpfe/skins.html',
'http://developer.mozilla.org/en/Writing_Skinnable_XUL_and_CSS'),
url_test('/xpfe/xptoolkit/contents.html', '/xpfe/xptoolkit/'),
url_test('/xpfe/xptoolkit/overlays.html', 'http://developer.mozilla.org/en/XUL_Overlays'),
url_test('/xpfe/xptoolkit/xulintro.html',
'http://developer.mozilla.org/en/Introduction_to_XUL'),
url_test('/xpfe/xulrdf.htm',
'http://developer.mozilla.org/en/XUL_and_RDF:'
'_The_Implementation_of_the_Application_Object_Model'),
url_test('/xpfe/xulref', 'http://developer.mozilla.org/en/XUL_Reference'),
url_test('/xpfe/xulref-french',
'http://developer.mozilla.org/fr/docs/R%C3%A9f%C3%A9rence_XU'),
))
|
lxybox1/MissionPlanner
|
refs/heads/master
|
Lib/site-packages/scipy/signal/spectral.py
|
53
|
import sys
if sys.platform == 'cli':
import clr
clr.AddReference("signal")
from scipy__signal__spectral import *
|
systers/postorius
|
refs/heads/develop
|
example_project/__init__.py
|
12133432
| |
appapantula/scikit-learn
|
refs/heads/master
|
examples/applications/topics_extraction_with_nmf_lda.py
|
133
|
"""
========================================================================================
Topics extraction with Non-Negative Matrix Factorization And Latent Dirichlet Allocation
========================================================================================
This is an example of applying Non Negative Matrix Factorization
and Latent Dirichlet Allocation on a corpus of documents and
extract additive models of the topic structure of the corpus.
The output is a list of topics, each represented as a list of terms
(weights are not shown).
The default parameters (n_samples / n_features / n_topics) should make
the example runnable in a couple of tens of seconds. You can try to
increase the dimensions of the problem, but be aware that the time
complexity is polynomial in NMF. In LDA, the time complexity is
proportional to (n_samples * iterations).
"""
# Author: Olivier Grisel <olivier.grisel@ensta.org>
# Lars Buitinck <L.J.Buitinck@uva.nl>
# Chyi-Kwei Yau <chyikwei.yau@gmail.com>
# License: BSD 3 clause
from __future__ import print_function
from time import time
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.decomposition import NMF, LatentDirichletAllocation
from sklearn.datasets import fetch_20newsgroups
n_samples = 2000
n_features = 1000
n_topics = 10
n_top_words = 20
def print_top_words(model, feature_names, n_top_words):
for topic_idx, topic in enumerate(model.components_):
print("Topic #%d:" % topic_idx)
print(" ".join([feature_names[i] for i in topic.argsort()[:-n_top_words - 1:-1]]))
print()
# Load the 20 newsgroups dataset and vectorize it. We use a few heuristics
# to filter out useless terms early on: the posts are stripped of headers,
# footers and quoted replies, and common English words, words occurring in
# only one document or in at least 95% of the documents are removed.
t0 = time()
print("Loading dataset and extracting features...")
dataset = fetch_20newsgroups(shuffle=True, random_state=1,
remove=('headers', 'footers', 'quotes'))
data_samples = dataset.data[:n_samples]
# use tf-idf feature for NMF model
tfidf_vectorizer = TfidfVectorizer(max_df=0.95, min_df=2, max_features=n_features,
stop_words='english')
tfidf = tfidf_vectorizer.fit_transform(data_samples)
# use tf feature for LDA model
tf_vectorizer = CountVectorizer(max_df=0.95, min_df=2, max_features=n_features,
stop_words='english')
tf = tf_vectorizer.fit_transform(data_samples)
print("done in %0.3fs." % (time() - t0))
# Fit the NMF model
print("Fitting the NMF model with tf-idf feature, n_samples=%d and n_features=%d..."
% (n_samples, n_features))
nmf = NMF(n_components=n_topics, random_state=1).fit(tfidf)
print("done in %0.3fs." % (time() - t0))
print("\nTopics in NMF model:")
tfidf_feature_names = tfidf_vectorizer.get_feature_names()
print_top_words(nmf, tfidf_feature_names, n_top_words)
print("\nFitting LDA models with tf feature, n_samples=%d and n_features=%d..."
% (n_samples, n_features))
lda = LatentDirichletAllocation(n_topics=n_topics, max_iter=5,
learning_method='online', learning_offset=50.,
random_state=0)
lda.fit(tf)
print("done in %0.3fs." % (time() - t0))
print("\nTopics in LDA model:")
tf_feature_names = tf_vectorizer.get_feature_names()
print_top_words(lda, tf_feature_names, n_top_words)
|
blazewicz/micropython
|
refs/heads/master
|
tests/wipy/wdt.py
|
69
|
'''
WDT test for the CC3200 based boards
'''
from machine import WDT
import time
# test the invalid cases first
try:
wdt = WDT(1)
except Exception:
print("Exception")
try:
wdt = WDT(0, 500)
except Exception:
print("Exception")
try:
wdt = WDT(1, timeout=2000)
except Exception:
print("Exception")
wdt = WDT(timeout=1000)
print(wdt)
try:
wdt = WDT(0, timeout=2000)
except Exception:
print("Exception")
time.sleep_ms(500)
wdt.feed()
print(wdt)
time.sleep_ms(900)
wdt.feed()
print(wdt)
time.sleep_ms(950)
|
dext3r/lpc3xxx-ea3250v2
|
refs/heads/master
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py
|
12980
|
# SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
|
wndhydrnt/airflow
|
refs/heads/master
|
airflow/config_templates/default_celery.py
|
3
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import ssl
from airflow import configuration
from airflow.exceptions import AirflowConfigException, AirflowException
from airflow.utils.log.logging_mixin import LoggingMixin
log = LoggingMixin().log
broker_transport_options = configuration.conf.getsection(
'celery_broker_transport_options'
)
if broker_transport_options is None:
broker_transport_options = {'visibility_timeout': 21600}
DEFAULT_CELERY_CONFIG = {
'accept_content': ['json', 'pickle'],
'event_serializer': 'json',
'worker_prefetch_multiplier': 1,
'task_acks_late': True,
'task_default_queue': configuration.conf.get('celery', 'DEFAULT_QUEUE'),
'task_default_exchange': configuration.conf.get('celery', 'DEFAULT_QUEUE'),
'broker_url': configuration.conf.get('celery', 'BROKER_URL'),
'broker_transport_options': broker_transport_options,
'result_backend': configuration.conf.get('celery', 'RESULT_BACKEND'),
'worker_concurrency': configuration.conf.getint('celery', 'WORKER_CONCURRENCY'),
}
celery_ssl_active = False
try:
celery_ssl_active = configuration.conf.getboolean('celery', 'SSL_ACTIVE')
except AirflowConfigException as e:
log.warning("Celery Executor will run without SSL")
try:
if celery_ssl_active:
broker_use_ssl = {'keyfile': configuration.conf.get('celery', 'SSL_KEY'),
'certfile': configuration.conf.get('celery', 'SSL_CERT'),
'ca_certs': configuration.conf.get('celery', 'SSL_CACERT'),
'cert_reqs': ssl.CERT_REQUIRED}
DEFAULT_CELERY_CONFIG['broker_use_ssl'] = broker_use_ssl
except AirflowConfigException as e:
raise AirflowException('AirflowConfigException: SSL_ACTIVE is True, '
'please ensure SSL_KEY, '
'SSL_CERT and SSL_CACERT are set')
except Exception as e:
raise AirflowException('Exception: There was an unknown Celery SSL Error. '
'Please ensure you want to use '
'SSL and/or have all necessary certs and key ({}).'.format(e))
result_backend = DEFAULT_CELERY_CONFIG['result_backend']
if 'amqp' in result_backend or 'redis' in result_backend or 'rpc' in result_backend:
log.warning("You have configured a result_backend of %s, it is highly recommended "
"to use an alternative result_backend (i.e. a database).", result_backend)
|
ambikeshwar1991/gnuradio-3.7.4
|
refs/heads/master
|
docs/sphinx/source/blocks/get_blocks.py
|
20
|
"""
This script regenerates the gnuradio.blocks sphinx source code.
"""
from gnuradio import blocks
import sys
import os
doxyxml_location = os.path.abspath("../../../doxygen")
xml_location = os.path.abspath("../../../../build/docs/doxygen/xml/")
sys.path.append(doxyxml_location)
from doxyxml import DoxyIndex, DoxyClass, DoxyFriend, DoxyFunction, DoxyFile, DoxyGroup
from doxyxml import DoxyOther, base
class Block(object):
"""
Checks if doxyxml produced objects correspond to a new style
gnuradio block.
"""
@classmethod
def includes(cls, item):
if not isinstance(item, DoxyClass):
return False
# Check for a parsing error.
if item.error():
return False
is_a_block = item.has_member('make', DoxyFunction) and item.has_member('sptr', DoxyOther)
return is_a_block
class Group(object):
"""
Checks if doxyxml produced objects correspond to a group.
"""
@classmethod
def includes(cls, item):
if not isinstance(item, DoxyGroup):
return False
# Check for a parsing error.
if item.error():
return False
return True
def main():
di = DoxyIndex(xml_location)
blocks = di.in_category(Block)
block_blocks = []
for block in blocks:
if block.name().startswith("gr::blocks::"):
block_blocks.append(block)
all_grouped_blocks = []
groups = di.in_category(Group)
groupinfo = []
for group in groups:
contains_block_blocks = False
block_list = []
members = group.members()
for member in members:
if member.name().startswith("gr::blocks"):
all_grouped_blocks.append(member)
if not contains_block_blocks:
contains_block_blocks = True
groupinfo.append((group.name(), group.title, block_list))
block_list.append(member)
for block in block_blocks:
if block not in all_grouped_blocks:
print("Didn't find block {0}".format(block.name()))
blockindex = ["""gnuradio.blocks
===============
.. automodule:: gnuradio.blocks"""]
for groupname, grouptitle, blocks in groupinfo:
blockindex.append("")
blockindex.append(grouptitle)
blockindex.append('-'*len(grouptitle))
blockindex.append("""
.. autosummary::
:nosignatures:
""")
for block in blocks:
blockindex.append(" gnuradio.blocks.{0}".format(
block.name()[len("gr::blocks::"):]))
grouppage = []
title = "gnuradio.blocks: {0}".format(grouptitle)
grouppage.append(title)
grouppage.append('='*len(title))
grouppage.append('')
for block in blocks:
shortname = block.name()[len("gr::blocks::"):]
grouppage.append(".. autoblock:: gnuradio.blocks.{0}".format(shortname))
text = '\n'.join(grouppage)
f = open("{0}.rst".format(groupname), 'w')
f.write(text)
f.close()
text = "\n".join(blockindex)
f = open("index.rst", 'w')
f.write(text)
f.close()
if __name__ == '__main__':
main()
|
yinchunlong/abelkhan-1
|
refs/heads/master
|
ext/c++/thirdpart/c++/boost/tools/build/test/custom_generator.py
|
17
|
#!/usr/bin/python
# Copyright 2003, 2004, 2005 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Attempt to declare a generator for creating OBJ from RC files. That generator
# should be considered together with standard CPP->OBJ generators and
# successfully create the target. Since we do not have a RC compiler everywhere,
# we fake the action. The resulting OBJ will be unusable, but it must be
# created.
import BoostBuild
t = BoostBuild.Tester()
t.write("jamroot.jam", """
import rcc ;
""")
t.write("rcc.jam", """
import type ;
import generators ;
import print ;
# Use 'RCC' to avoid conflicts with definitions in the standard rc.jam and
# msvc.jam
type.register RCC : rcc ;
rule resource-compile ( targets * : sources * : properties * )
{
print.output $(targets[1]) ;
print.text "rc-object" ;
}
generators.register-standard rcc.resource-compile : RCC : OBJ ;
""")
t.write("rcc.py", """
import b2.build.type as type
import b2.build.generators as generators
from b2.manager import get_manager
# Use 'RCC' to avoid conflicts with definitions in the standard rc.jam and
# msvc.jam
type.register('RCC', ['rcc'])
generators.register_standard("rcc.resource-compile", ["RCC"], ["OBJ"])
get_manager().engine().register_action(
"rcc.resource-compile",
'@($(STDOUT):E=rc-object) > "$(<)"')
""")
t.write("jamfile.jam", """
obj r : r.rcc ;
""")
t.write("r.rcc", """
""")
t.run_build_system()
t.expect_content("bin/$toolset/debug/r.obj", "rc-object")
t.cleanup()
|
Nuclearfossil/ATF
|
refs/heads/master
|
Test/FunctionalTests/FsmEditorTestScripts/InsertTransitions.py
|
10
|
#Copyright (c) 2014 Sony Computer Entertainment America LLC. See License.txt.
import sys
sys.path.append("./CommonTestScripts")
import Test
import FsmUtil
atfDocService.OpenNewDocument(editor)
statesLeft = []
statesRight = []
transitions = []
trnCnt = 10
print "First create a bunch of states"
for i in range(trnCnt):
statesLeft.append(editingContext.InsertState(100, 100 + 50*i, "Left#" + unicode(i), 64))
for i in range(trnCnt):
statesRight.append(editingContext.InsertState(300, 100 + 50*i, "Right#" + unicode(i), 64))
print "Now add the transitions"
for i in range(trnCnt):
transitions.append(FsmUtil.AddNewTransitionAndVerify(editingContext, statesLeft[i], statesRight[i]))
transitions[i].Label = "Transition#" + unicode(i)
for i in range(trnCnt):
Test.Equal("Transition#" + unicode(i), transitions[i].Label)
print Test.SUCCESS
|
altsen/diandiyun-platform
|
refs/heads/master
|
lms/djangoapps/psychometrics/management/__init__.py
|
12133432
| |
marcsit/spiops
|
refs/heads/master
|
spiops/test/__init__.py
|
12133432
| |
Kungbib/CIPAC
|
refs/heads/master
|
webapp/kortkatalogen/hsnominal/management/__init__.py
|
12133432
| |
zouyapeng/horizon
|
refs/heads/stable/juno
|
openstack_dashboard/dashboards/project/data_processing/data_image_registry/__init__.py
|
12133432
| |
hamtamtots/sweetshopwebsite
|
refs/heads/master
|
sweetshop_site/home/views/__init__.py
|
12133432
| |
kisel/trex-core
|
refs/heads/master
|
scripts/external_libs/pyzmq-14.5.0/python3/ucs4/64bit/zmq/tests/test_error.py
|
41
|
# -*- coding: utf8 -*-
# Copyright (C) PyZMQ Developers
# Distributed under the terms of the Modified BSD License.
import sys
import time
import zmq
from zmq import ZMQError, strerror, Again, ContextTerminated
from zmq.tests import BaseZMQTestCase
if sys.version_info[0] >= 3:
long = int
class TestZMQError(BaseZMQTestCase):
def test_strerror(self):
"""test that strerror gets the right type."""
for i in range(10):
e = strerror(i)
self.assertTrue(isinstance(e, str))
def test_zmqerror(self):
for errno in range(10):
e = ZMQError(errno)
self.assertEqual(e.errno, errno)
self.assertEqual(str(e), strerror(errno))
def test_again(self):
s = self.context.socket(zmq.REP)
self.assertRaises(Again, s.recv, zmq.NOBLOCK)
self.assertRaisesErrno(zmq.EAGAIN, s.recv, zmq.NOBLOCK)
s.close()
def atest_ctxterm(self):
s = self.context.socket(zmq.REP)
t = Thread(target=self.context.term)
t.start()
self.assertRaises(ContextTerminated, s.recv, zmq.NOBLOCK)
self.assertRaisesErrno(zmq.TERM, s.recv, zmq.NOBLOCK)
s.close()
t.join()
|
petabytekr/namebench
|
refs/heads/master
|
nb_third_party/dns/rdtypes/ANY/SOA.py
|
246
|
# Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import struct
import dns.exception
import dns.rdata
import dns.name
class SOA(dns.rdata.Rdata):
"""SOA record
@ivar mname: the SOA MNAME (master name) field
@type mname: dns.name.Name object
@ivar rname: the SOA RNAME (responsible name) field
@type rname: dns.name.Name object
@ivar serial: The zone's serial number
@type serial: int
@ivar refresh: The zone's refresh value (in seconds)
@type refresh: int
@ivar retry: The zone's retry value (in seconds)
@type retry: int
@ivar expire: The zone's expiration value (in seconds)
@type expire: int
@ivar minimum: The zone's negative caching time (in seconds, called
"minimum" for historical reasons)
@type minimum: int
@see: RFC 1035"""
__slots__ = ['mname', 'rname', 'serial', 'refresh', 'retry', 'expire',
'minimum']
def __init__(self, rdclass, rdtype, mname, rname, serial, refresh, retry,
expire, minimum):
super(SOA, self).__init__(rdclass, rdtype)
self.mname = mname
self.rname = rname
self.serial = serial
self.refresh = refresh
self.retry = retry
self.expire = expire
self.minimum = minimum
def to_text(self, origin=None, relativize=True, **kw):
mname = self.mname.choose_relativity(origin, relativize)
rname = self.rname.choose_relativity(origin, relativize)
return '%s %s %d %d %d %d %d' % (
mname, rname, self.serial, self.refresh, self.retry,
self.expire, self.minimum )
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
mname = tok.get_name()
rname = tok.get_name()
mname = mname.choose_relativity(origin, relativize)
rname = rname.choose_relativity(origin, relativize)
serial = tok.get_uint32()
refresh = tok.get_ttl()
retry = tok.get_ttl()
expire = tok.get_ttl()
minimum = tok.get_ttl()
tok.get_eol()
return cls(rdclass, rdtype, mname, rname, serial, refresh, retry,
expire, minimum )
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
self.mname.to_wire(file, compress, origin)
self.rname.to_wire(file, compress, origin)
five_ints = struct.pack('!IIIII', self.serial, self.refresh,
self.retry, self.expire, self.minimum)
file.write(five_ints)
def to_digestable(self, origin = None):
return self.mname.to_digestable(origin) + \
self.rname.to_digestable(origin) + \
struct.pack('!IIIII', self.serial, self.refresh,
self.retry, self.expire, self.minimum)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
(mname, cused) = dns.name.from_wire(wire[: current + rdlen], current)
current += cused
rdlen -= cused
(rname, cused) = dns.name.from_wire(wire[: current + rdlen], current)
current += cused
rdlen -= cused
if rdlen != 20:
raise dns.exception.FormError
five_ints = struct.unpack('!IIIII',
wire[current : current + rdlen])
if not origin is None:
mname = mname.relativize(origin)
rname = rname.relativize(origin)
return cls(rdclass, rdtype, mname, rname,
five_ints[0], five_ints[1], five_ints[2], five_ints[3],
five_ints[4])
from_wire = classmethod(from_wire)
def choose_relativity(self, origin = None, relativize = True):
self.mname = self.mname.choose_relativity(origin, relativize)
self.rname = self.rname.choose_relativity(origin, relativize)
def _cmp(self, other):
v = cmp(self.mname, other.mname)
if v == 0:
v = cmp(self.rname, other.rname)
if v == 0:
self_ints = struct.pack('!IIIII', self.serial, self.refresh,
self.retry, self.expire, self.minimum)
other_ints = struct.pack('!IIIII', other.serial, other.refresh,
other.retry, other.expire,
other.minimum)
v = cmp(self_ints, other_ints)
return v
|
GoogleCloudPlatform/python-docs-samples
|
refs/heads/master
|
appengine/standard/storage/appengine-client/main.py
|
1
|
#!/usr/bin/env python
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START sample]
"""A sample app that uses GCS client to operate on bucket and file."""
# [START imports]
import os
import cloudstorage
from google.appengine.api import app_identity
import webapp2
# [END imports]
# [START retries]
cloudstorage.set_default_retry_params(
cloudstorage.RetryParams(
initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15
))
# [END retries]
class MainPage(webapp2.RequestHandler):
"""Main page for GCS demo application."""
# [START get_default_bucket]
def get(self):
bucket_name = os.environ.get(
'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
self.response.headers['Content-Type'] = 'text/plain'
self.response.write(
'Demo GCS Application running from Version: {}\n'.format(
os.environ['CURRENT_VERSION_ID']))
self.response.write('Using bucket name: {}\n\n'.format(bucket_name))
# [END get_default_bucket]
bucket = '/' + bucket_name
filename = bucket + '/demo-testfile'
self.tmp_filenames_to_clean_up = []
self.create_file(filename)
self.response.write('\n\n')
self.read_file(filename)
self.response.write('\n\n')
self.stat_file(filename)
self.response.write('\n\n')
self.create_files_for_list_bucket(bucket)
self.response.write('\n\n')
self.list_bucket(bucket)
self.response.write('\n\n')
self.list_bucket_directory_mode(bucket)
self.response.write('\n\n')
self.delete_files()
self.response.write('\n\nThe demo ran successfully!\n')
# [START write]
def create_file(self, filename):
"""Create a file."""
self.response.write('Creating file {}\n'.format(filename))
# The retry_params specified in the open call will override the default
# retry params for this particular file handle.
write_retry_params = cloudstorage.RetryParams(backoff_factor=1.1)
with cloudstorage.open(
filename, 'w', content_type='text/plain', options={
'x-goog-meta-foo': 'foo', 'x-goog-meta-bar': 'bar'},
retry_params=write_retry_params) as cloudstorage_file:
cloudstorage_file.write('abcde\n')
cloudstorage_file.write('f'*1024*4 + '\n')
self.tmp_filenames_to_clean_up.append(filename)
# [END write]
# [START read]
def read_file(self, filename):
self.response.write(
'Abbreviated file content (first line and last 1K):\n')
with cloudstorage.open(filename) as cloudstorage_file:
self.response.write(cloudstorage_file.readline())
cloudstorage_file.seek(-1024, os.SEEK_END)
self.response.write(cloudstorage_file.read())
# [END read]
def stat_file(self, filename):
self.response.write('File stat:\n')
stat = cloudstorage.stat(filename)
self.response.write(repr(stat))
def create_files_for_list_bucket(self, bucket):
self.response.write('Creating more files for listbucket...\n')
filenames = [bucket + n for n in [
'/foo1', '/foo2', '/bar', '/bar/1', '/bar/2', '/boo/']]
for f in filenames:
self.create_file(f)
# [START list_bucket]
def list_bucket(self, bucket):
"""Create several files and paginate through them."""
self.response.write('Listbucket result:\n')
# Production apps should set page_size to a practical value.
page_size = 1
stats = cloudstorage.listbucket(bucket + '/foo', max_keys=page_size)
while True:
count = 0
for stat in stats:
count += 1
self.response.write(repr(stat))
self.response.write('\n')
if count != page_size or count == 0:
break
stats = cloudstorage.listbucket(
bucket + '/foo', max_keys=page_size, marker=stat.filename)
# [END list_bucket]
def list_bucket_directory_mode(self, bucket):
self.response.write('Listbucket directory mode result:\n')
for stat in cloudstorage.listbucket(bucket + '/b', delimiter='/'):
self.response.write(stat)
self.response.write('\n')
if stat.is_dir:
for subdir_file in cloudstorage.listbucket(
stat.filename, delimiter='/'):
self.response.write(' {}'.format(subdir_file))
self.response.write('\n')
# [START delete_files]
def delete_files(self):
self.response.write('Deleting files...\n')
for filename in self.tmp_filenames_to_clean_up:
self.response.write('Deleting file {}\n'.format(filename))
try:
cloudstorage.delete(filename)
except cloudstorage.NotFoundError:
pass
# [END delete_files]
app = webapp2.WSGIApplication(
[('/', MainPage)], debug=True)
# [END sample]
|
savoirfairelinux/santropol-feast
|
refs/heads/dev
|
src/dataexec.py
|
2
|
# usage : python dataexec.py [santropolFeast.settingsSPECIAL]
import os
import sys
def run():
if len(sys.argv) > 1:
settings = sys.argv[1]
else:
settings = 'sous_chef.settings'
os.environ['DJANGO_SETTINGS_MODULE'] = settings
import django
django.setup()
from dataload import insert_all
insert_all()
run()
|
sprymix/python-dateutil
|
refs/heads/master
|
dateutil/test/__init__.py
|
12133432
| |
tfroehlich82/erpnext
|
refs/heads/develop
|
erpnext/healthcare/doctype/normal_test_template/__init__.py
|
12133432
| |
jimi-c/ansible
|
refs/heads/devel
|
lib/ansible/module_utils/network/ios/__init__.py
|
12133432
| |
dattatreya303/zulip
|
refs/heads/master
|
zerver/webhooks/heroku/__init__.py
|
12133432
| |
sicklem/google-python-exercises
|
refs/heads/master
|
basic/string2.py
|
1
|
#!/usr/bin/python2.4 -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Additional basic string exercises
# D. verbing
# Given a string, if its length is at least 3,
# add 'ing' to its end.
# Unless it already ends in 'ing', in which case
# add 'ly' instead.
# If the string length is less than 3, leave it unchanged.
# Return the resulting string.
def verbing(s):
# +++your code here+++
if len(s) < 3:
return s
if s[-3:] == 'ing':
output = s + 'ly'
else:
output = s + 'ing'
return output
# E. not_bad
# Given a string, find the first appearance of the
# substring 'not' and 'bad'. If the 'bad' follows
# the 'not', replace the whole 'not'...'bad' substring
# with 'good'.
# Return the resulting string.
# So 'This dinner is not that bad!' yields:
# This dinner is good!
def not_bad(s):
# +++your code here+++
notIdx = s.find('not')
badIdx = s.find('bad')
if notIdx < badIdx:
output = s.replace(s[notIdx:(badIdx + 3)], 'good')
else:
output = s
return output
# F. front_back
# Consider dividing a string into two halves.
# If the length is even, the front and back halves are the same length.
# If the length is odd, we'll say that the extra char goes in the front half.
# e.g. 'abcde', the front half is 'abc', the back half 'de'.
# Given 2 strings, a and b, return a string of the form
# a-front + b-front + a-back + b-back
def front_back(a, b):
# +++your code here+++
aArr = evenOddSplit(a)
bArr = evenOddSplit(b)
return aArr[0] + bArr[0] + aArr[1] + bArr[1]
def evenOddSplit(s):
lenS = len(s);
if lenS % 2 == 0:
sFront = s[:lenS / 2]
sBack = s[lenS / 2:]
else:
sFront = s[:lenS / 2 + 1]
sBack = s[lenS / 2 + 1:]
return [sFront, sBack]
# Simple provided test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# main() calls the above functions with interesting inputs,
# using the above test() to check if the result is correct or not.
def main():
print 'verbing'
test(verbing('hail'), 'hailing')
test(verbing('swiming'), 'swimingly')
test(verbing('do'), 'do')
print
print 'not_bad'
test(not_bad('This movie is not so bad'), 'This movie is good')
test(not_bad('This dinner is not that bad!'), 'This dinner is good!')
test(not_bad('This tea is not hot'), 'This tea is not hot')
test(not_bad("It's bad yet not"), "It's bad yet not")
print
print 'front_back'
test(front_back('abcd', 'xy'), 'abxcdy')
test(front_back('abcde', 'xyz'), 'abcxydez')
test(front_back('Kitten', 'Donut'), 'KitDontenut')
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.