code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
import openerp
from openerp import http, SUPERUSER_ID
from openerp.addons.web.controllers.main import Binary
import functools
from openerp.http import request, serialize_exception as _serialize_exception
from openerp.modules import get_module_resource
from cStringIO import StringIO
db_monodb = http.db_monodb
class BinaryCustom(Binary):
@http.route([
'/web/binary/company_logo',
'/logo',
'/logo.png',
], type='http', auth="none")
def company_logo(self, dbname=None, **kw):
imgname = 'logo.png'
default_logo_module = 'web_debranding'
if request.session.db:
request.env['ir.config_parameter'].get_param('web_debranding.default_logo_module')
placeholder = functools.partial(get_module_resource, default_logo_module, 'static', 'src', 'img')
uid = None
if request.session.db:
dbname = request.session.db
uid = request.session.uid
elif dbname is None:
dbname = db_monodb()
if not uid:
uid = openerp.SUPERUSER_ID
if not dbname:
response = http.send_file(placeholder(imgname))
else:
try:
# create an empty registry
registry = openerp.modules.registry.Registry(dbname)
with registry.cursor() as cr:
cr.execute("""SELECT c.logo_web, c.write_date
FROM res_users u
LEFT JOIN res_company c
ON c.id = u.company_id
WHERE u.id = %s
""", (uid,))
row = cr.fetchone()
if row and row[0]:
print 'row'
image_data = StringIO(str(row[0]).decode('base64'))
response = http.send_file(image_data, filename=imgname, mtime=row[1])
else:
response = http.send_file(placeholder('nologo.png'))
except Exception:
response = http.send_file(placeholder(imgname))
return response
|
litnimax/addons-yelizariev
|
web_debranding/controllers/main.py
|
Python
|
lgpl-3.0
| 2,180
|
"""Support for Vera scenes."""
from __future__ import annotations
from typing import Any
import pyvera as veraApi
from homeassistant.components.scene import Scene
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.util import slugify
from .common import ControllerData, get_controller_data
from .const import VERA_ID_FORMAT
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the sensor config entry."""
controller_data = get_controller_data(hass, entry)
async_add_entities(
[VeraScene(device, controller_data) for device in controller_data.scenes], True
)
class VeraScene(Scene):
"""Representation of a Vera scene entity."""
def __init__(
self, vera_scene: veraApi.VeraScene, controller_data: ControllerData
) -> None:
"""Initialize the scene."""
self.vera_scene = vera_scene
self.controller = controller_data.controller
self._name = self.vera_scene.name
# Append device id to prevent name clashes in HA.
self.vera_id = VERA_ID_FORMAT.format(
slugify(vera_scene.name), vera_scene.scene_id
)
def update(self) -> None:
"""Update the scene status."""
self.vera_scene.refresh()
def activate(self, **kwargs: Any) -> None:
"""Activate the scene."""
self.vera_scene.activate()
@property
def name(self) -> str:
"""Return the name of the scene."""
return self._name
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
"""Return the state attributes of the scene."""
return {"vera_scene_id": self.vera_scene.vera_scene_id}
|
mezz64/home-assistant
|
homeassistant/components/vera/scene.py
|
Python
|
apache-2.0
| 1,873
|
# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""This module defines the data structures used to represent a grammar.
These are a bit arcane because they are derived from the data
structures used by Python's 'pgen' parser generator.
There's also a table here mapping operators to their names in the
token module; the Python tokenize module reports all operators as the
fallback token code OP, but the parser needs the actual token code.
"""
# Python imports
import pickle
# Local imports
from sphinx.pycode.pgen2 import token
class Grammar(object):
"""Pgen parsing tables tables conversion class.
Once initialized, this class supplies the grammar tables for the
parsing engine implemented by parse.py. The parsing engine
accesses the instance variables directly. The class here does not
provide initialization of the tables; several subclasses exist to
do this (see the conv and pgen modules).
The load() method reads the tables from a pickle file, which is
much faster than the other ways offered by subclasses. The pickle
file is written by calling dump() (after loading the grammar
tables using a subclass). The report() method prints a readable
representation of the tables to stdout, for debugging.
The instance variables are as follows:
symbol2number -- a dict mapping symbol names to numbers. Symbol
numbers are always 256 or higher, to distinguish
them from token numbers, which are between 0 and
255 (inclusive).
number2symbol -- a dict mapping numbers to symbol names;
these two are each other's inverse.
states -- a list of DFAs, where each DFA is a list of
states, each state is is a list of arcs, and each
arc is a (i, j) pair where i is a label and j is
a state number. The DFA number is the index into
this list. (This name is slightly confusing.)
Final states are represented by a special arc of
the form (0, j) where j is its own state number.
dfas -- a dict mapping symbol numbers to (DFA, first)
pairs, where DFA is an item from the states list
above, and first is a set of tokens that can
begin this grammar rule (represented by a dict
whose values are always 1).
labels -- a list of (x, y) pairs where x is either a token
number or a symbol number, and y is either None
or a string; the strings are keywords. The label
number is the index in this list; label numbers
are used to mark state transitions (arcs) in the
DFAs.
start -- the number of the grammar's start symbol.
keywords -- a dict mapping keyword strings to arc labels.
tokens -- a dict mapping token numbers to arc labels.
"""
def __init__(self):
self.symbol2number = {}
self.number2symbol = {}
self.states = []
self.dfas = {}
self.labels = [(0, "EMPTY")]
self.keywords = {}
self.tokens = {}
self.symbol2label = {}
self.start = 256
def dump(self, filename):
"""Dump the grammar tables to a pickle file."""
f = open(filename, "wb")
pickle.dump(self.__dict__, f, 2)
f.close()
def load(self, filename):
"""Load the grammar tables from a pickle file."""
f = open(filename, "rb")
d = pickle.load(f)
f.close()
self.__dict__.update(d)
def report(self):
"""Dump the grammar tables to standard output, for debugging."""
from pprint import pprint
print "s2n"
pprint(self.symbol2number)
print "n2s"
pprint(self.number2symbol)
print "states"
pprint(self.states)
print "dfas"
pprint(self.dfas)
print "labels"
pprint(self.labels)
print "start", self.start
# Map from operator to number (since tokenize doesn't do this)
opmap_raw = """
( LPAR
) RPAR
[ LSQB
] RSQB
: COLON
, COMMA
; SEMI
+ PLUS
- MINUS
* STAR
/ SLASH
| VBAR
& AMPER
< LESS
> GREATER
= EQUAL
. DOT
% PERCENT
` BACKQUOTE
{ LBRACE
} RBRACE
@ AT
== EQEQUAL
!= NOTEQUAL
<> NOTEQUAL
<= LESSEQUAL
>= GREATEREQUAL
~ TILDE
^ CIRCUMFLEX
<< LEFTSHIFT
>> RIGHTSHIFT
** DOUBLESTAR
+= PLUSEQUAL
-= MINEQUAL
*= STAREQUAL
/= SLASHEQUAL
%= PERCENTEQUAL
&= AMPEREQUAL
|= VBAREQUAL
^= CIRCUMFLEXEQUAL
<<= LEFTSHIFTEQUAL
>>= RIGHTSHIFTEQUAL
**= DOUBLESTAREQUAL
// DOUBLESLASH
//= DOUBLESLASHEQUAL
-> RARROW
... ELLIPSIS
"""
opmap = {}
for line in opmap_raw.splitlines():
if line:
op, name = line.split()
opmap[op] = getattr(token, name)
|
havard024/prego
|
venv/lib/python2.7/site-packages/sphinx/pycode/pgen2/grammar.py
|
Python
|
mit
| 4,968
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Makefile execution.
Multiple `makes` can be run within the same process. Each one has an entirely data.Makefile and .Target
structure, environment, and working directory. Typically they will all share a parallel execution context,
except when a submake specifies -j1 when the parent make is building in parallel.
"""
import os, subprocess, sys, logging, time, traceback, re
from optparse import OptionParser
import data, parserdata, process, util
# TODO: If this ever goes from relocatable package to system-installed, this may need to be
# a configured-in path.
makepypath = util.normaljoin(os.path.dirname(__file__), '../make.py')
_simpleopts = re.compile(r'^[a-zA-Z]+(\s|$)')
def parsemakeflags(env):
"""
Parse MAKEFLAGS from the environment into a sequence of command-line arguments.
"""
makeflags = env.get('MAKEFLAGS', '')
makeflags = makeflags.strip()
if makeflags == '':
return []
if _simpleopts.match(makeflags):
makeflags = '-' + makeflags
opts = []
curopt = ''
i = 0
while i < len(makeflags):
c = makeflags[i]
if c.isspace():
opts.append(curopt)
curopt = ''
i += 1
while i < len(makeflags) and makeflags[i].isspace():
i += 1
continue
if c == '\\':
i += 1
if i == len(makeflags):
raise data.DataError("MAKEFLAGS has trailing backslash")
c = makeflags[i]
curopt += c
i += 1
if curopt != '':
opts.append(curopt)
return opts
def _version(*args):
print """pymake: GNU-compatible make program
Copyright (C) 2009 The Mozilla Foundation <http://www.mozilla.org/>
This is free software; see the source for copying conditions.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE."""
_log = logging.getLogger('pymake.execution')
class _MakeContext(object):
def __init__(self, makeflags, makelevel, workdir, context, env, targets, options, ostmts, overrides, cb):
self.makeflags = makeflags
self.makelevel = makelevel
self.workdir = workdir
self.context = context
self.env = env
self.targets = targets
self.options = options
self.ostmts = ostmts
self.overrides = overrides
self.cb = cb
self.restarts = 0
self.remakecb(True)
def remakecb(self, remade, error=None):
if error is not None:
print error
self.context.defer(self.cb, 2)
return
if remade:
if self.restarts > 0:
_log.info("make.py[%i]: Restarting makefile parsing", self.makelevel)
self.makefile = data.Makefile(restarts=self.restarts,
make='%s %s' % (sys.executable.replace('\\', '/'), makepypath.replace('\\', '/')),
makeflags=self.makeflags,
makeoverrides=self.overrides,
workdir=self.workdir,
context=self.context,
env=self.env,
makelevel=self.makelevel,
targets=self.targets,
keepgoing=self.options.keepgoing,
silent=self.options.silent,
justprint=self.options.justprint)
self.restarts += 1
try:
self.ostmts.execute(self.makefile)
for f in self.options.makefiles:
self.makefile.include(f)
self.makefile.finishparsing()
self.makefile.remakemakefiles(self.remakecb)
except util.MakeError, e:
print e
self.context.defer(self.cb, 2)
return
if len(self.targets) == 0:
if self.makefile.defaulttarget is None:
print "No target specified and no default target found."
self.context.defer(self.cb, 2)
return
_log.info("Making default target %s", self.makefile.defaulttarget)
self.realtargets = [self.makefile.defaulttarget]
self.tstack = ['<default-target>']
else:
self.realtargets = self.targets
self.tstack = ['<command-line>']
self.makefile.gettarget(self.realtargets.pop(0)).make(self.makefile, self.tstack, cb=self.makecb)
def makecb(self, error, didanything):
assert error in (True, False)
if error:
self.context.defer(self.cb, 2)
return
if not len(self.realtargets):
if self.options.printdir:
print "make.py[%i]: Leaving directory '%s'" % (self.makelevel, self.workdir)
sys.stdout.flush()
self.context.defer(self.cb, 0)
else:
self.makefile.gettarget(self.realtargets.pop(0)).make(self.makefile, self.tstack, self.makecb)
def main(args, env, cwd, cb):
"""
Start a single makefile execution, given a command line, working directory, and environment.
@param cb a callback to notify with an exit code when make execution is finished.
"""
try:
makelevel = int(env.get('MAKELEVEL', '0'))
op = OptionParser()
op.add_option('-f', '--file', '--makefile',
action='append',
dest='makefiles',
default=[])
op.add_option('-d',
action="store_true",
dest="verbose", default=False)
op.add_option('-k', '--keep-going',
action="store_true",
dest="keepgoing", default=False)
op.add_option('--debug-log',
dest="debuglog", default=None)
op.add_option('-C', '--directory',
dest="directory", default=None)
op.add_option('-v', '--version', action="store_true",
dest="printversion", default=False)
op.add_option('-j', '--jobs', type="int",
dest="jobcount", default=1)
op.add_option('-w', '--print-directory', action="store_true",
dest="printdir")
op.add_option('--no-print-directory', action="store_false",
dest="printdir", default=True)
op.add_option('-s', '--silent', action="store_true",
dest="silent", default=False)
op.add_option('-n', '--just-print', '--dry-run', '--recon',
action="store_true",
dest="justprint", default=False)
options, arguments1 = op.parse_args(parsemakeflags(env))
options, arguments2 = op.parse_args(args, values=options)
op.destroy()
arguments = arguments1 + arguments2
if options.printversion:
_version()
cb(0)
return
shortflags = []
longflags = []
if options.keepgoing:
shortflags.append('k')
if options.printdir:
shortflags.append('w')
if options.silent:
shortflags.append('s')
options.printdir = False
if options.justprint:
shortflags.append('n')
loglevel = logging.WARNING
if options.verbose:
loglevel = logging.DEBUG
shortflags.append('d')
logkwargs = {}
if options.debuglog:
logkwargs['filename'] = options.debuglog
longflags.append('--debug-log=%s' % options.debuglog)
if options.directory is None:
workdir = cwd
else:
workdir = util.normaljoin(cwd, options.directory)
if options.jobcount != 1:
longflags.append('-j%i' % (options.jobcount,))
makeflags = ''.join(shortflags)
if len(longflags):
makeflags += ' ' + ' '.join(longflags)
logging.basicConfig(level=loglevel, **logkwargs)
context = process.getcontext(options.jobcount)
if options.printdir:
print "make.py[%i]: Entering directory '%s'" % (makelevel, workdir)
sys.stdout.flush()
if len(options.makefiles) == 0:
if os.path.exists(util.normaljoin(workdir, 'Makefile')):
options.makefiles.append('Makefile')
else:
print "No makefile found"
cb(2)
return
ostmts, targets, overrides = parserdata.parsecommandlineargs(arguments)
_MakeContext(makeflags, makelevel, workdir, context, env, targets, options, ostmts, overrides, cb)
except (util.MakeError), e:
print e
if options.printdir:
print "make.py[%i]: Leaving directory '%s'" % (makelevel, workdir)
sys.stdout.flush()
cb(2)
return
|
lizh06/pymake
|
pymake/command.py
|
Python
|
mit
| 9,665
|
# -*- coding: utf-8 -*-
"""
Scales: Classes to define Vega scales
"""
from .core import grammar, GrammarClass
from ._compat import str_types
class DataRef(GrammarClass):
"""Definitions for how data is referenced by scales
Data can be referenced in multiple ways, and sometimes it makes sense to
reference multiple data fields at once.
"""
@grammar(str_types)
def data(value):
"""string : Name of data-set containing the domain values"""
@grammar((list,) + str_types)
def field(value):
"""string or list of strings : Reference to desired data field(s)
If multiple fields are given, then the values of all fields are
included in the domain.
"""
class Scale(GrammarClass):
"""Definitions for mapping from data space to visual space
Scales determine the way in which data is mapped from a data space (such
as numbers, time stamps, etc.) to a visual space (length of a line,
height of a bar, etc.), for both independent and dependent variables.
"""
@grammar(str_types)
def name(value):
"""string : Unique name for the scale
This is used for referencing by other components (mainly ``Mark``).
"""
@grammar(str_types)
def type(value):
"""string : Type of the scale
Valid types are as follows:
* ``'ordinal'``: ordinal scale types
* ``'time'`` or ``'utc'``: time scale types
* ``'linear'``, ``'log'``, ``'pow'``, ``'sqrt'``, ``'quantile'``,
``'quantize'``, and ``'threshold'``: quantitative scale types
For time scales, the value should be a Javascript-style numeric
value of seconds. ``'time'`` implies the value is in local time.
If unspecified, then the scale is assumed to be linear. See the d3
documentation for scale type details.
"""
@grammar((list, DataRef))
def domain(value):
"""list or DataRef : Domain of the scale
"""
@grammar(grammar_type=(float, int, DataRef), grammar_name='domainMin')
def domain_min(value):
"""float, int, or DataRef : Minimum domain value
Only used for quantitative/time scales. This takes precedence over
the minimum of the ``domain`` property.
"""
@grammar(grammar_type=(float, int, DataRef),
grammar_name='domainMax')
def domain_max(value):
"""float, int, or DataRef : Maximum domain value
Only used for quantitative/time scales. This takes precedence over
the maximum of the ``domain`` property.
"""
@grammar((list,) + str_types)
def range(value):
"""list or string : Range of the scale
For quantitative scales, the range may be specified as a two-element
list of min/max values. For ordinal scales, the range should be a
list of output values mapped to the input values.
String values may be used to automatically set a range:
- ``'width'`` - Set the range to the width of the visualization
- ``'height'`` - Set the range to the height of the visualization
- ``'shapes'`` - Equivalent to the symbol types ``['circle',
'cross', 'diamond', 'square', 'triangle-down',
'triangle-up']``
- ``'category10'`` - A pre-determined 10-color pallet
- ``'category20'`` - A pre-determined 20-color pallet
"""
@grammar(grammar_type=(float, int, DataRef), grammar_name='rangeMin')
def range_min(value):
"""float, int, or DataRef : Minimum range value
Only used for quantitative/time scales. This takes precedence over
the minimum of the ``range`` property.
"""
@grammar(grammar_type=(float, int, DataRef), grammar_name='rangeMax')
def range_max(value):
"""float, int, or DataRef : Maximum range value
Only used for quantitative/time scales. This takes precedence over
the maximum of the ``range`` property.
"""
@grammar(bool)
def reverse(value):
"""boolean : If True, flip the scale range"""
@grammar(bool)
def round(value):
"""boolean : If True, numeric output values are rounded to
integers"""
@grammar(bool)
def points(value):
"""boolean : If True, distribute ordinal values over evenly spaced
points between ``range_min`` and ``range_max``
Ignored for non-ordinal scales.
"""
@grammar(bool)
def clamp(value):
"""boolean : If True, values that exceed the domain are clamped to
within the domain
Ignored for ordinal scales.
"""
@grammar((bool,) + str_types)
def nice(value):
"""boolean or string : scale the domain to a more human-friendly set
If the scale ``type`` is ``'time'`` or ``'utc'``, then the value
should be one of ``'second'``, ``'minute'``, ``'hour'``, ``'day'``,
``'week'``, ``'month'``, or ``'year'``.
If the scale ``type`` is a quantitative scale, then the value should
be a boolean. The input values are rounded to a more human-friendly
value. The details of the rounding are in the d3 documentation.
Ignored for ordinal scales.
"""
@grammar((float, int))
def exponent(value):
"""float or int : Exponent for ``'pow'`` scale types
Ignored for all scale types other than ``'pow'``.
"""
@grammar(bool)
def zero(value):
"""boolean : If True, include zero in the domain
Only valid for quantitative scale types. This is useful if the
domain is defined as a DataRef that may not include 0 exactly.
"""
@grammar((float, int))
def padding(value):
"""string: Ordinal element padding
Only valid for ordinal scale types
"""
|
taohaoge/vincent
|
vincent/scales.py
|
Python
|
mit
| 5,858
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Joseph Callen <jcallen () csc.com>
# Copyright: (c) 2017-2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_dvs_portgroup
short_description: Create or remove a Distributed vSwitch portgroup.
description:
- Create or remove a Distributed vSwitch portgroup.
version_added: 2.0
author:
- Joseph Callen (@jcpowermac)
- Philippe Dellaert (@pdellaert) <philippe@dellaert.org>
notes:
- Tested on vSphere 5.5
- Tested on vSphere 6.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
portgroup_name:
description:
- The name of the portgroup that is to be created or deleted.
required: True
type: str
switch_name:
description:
- The name of the distributed vSwitch the port group should be created on.
required: True
type: str
vlan_id:
description:
- The VLAN ID that should be configured with the portgroup, use 0 for no VLAN.
- 'If C(vlan_trunk) is configured to be I(true), this can be a combination of multiple ranges and numbers, example: 1-200, 205, 400-4094.'
- The valid C(vlan_id) range is from 0 to 4094. Overlapping ranges are allowed.
required: True
type: str
num_ports:
description:
- The number of ports the portgroup should contain.
required: True
type: int
portgroup_type:
description:
- See VMware KB 1022312 regarding portgroup types.
required: True
choices:
- 'earlyBinding'
- 'lateBinding'
- 'ephemeral'
type: str
state:
description:
- Determines if the portgroup should be present or not.
required: True
type: str
choices:
- 'present'
- 'absent'
version_added: '2.5'
vlan_trunk:
description:
- Indicates whether this is a VLAN trunk or not.
required: False
default: False
type: bool
version_added: '2.5'
network_policy:
description:
- Dictionary which configures the different security values for portgroup.
- 'Valid attributes are:'
- '- C(promiscuous) (bool): indicates whether promiscuous mode is allowed. (default: false)'
- '- C(forged_transmits) (bool): indicates whether forged transmits are allowed. (default: false)'
- '- C(mac_changes) (bool): indicates whether mac changes are allowed. (default: false)'
required: False
version_added: '2.5'
default: {
promiscuous: False,
forged_transmits: False,
mac_changes: False,
}
type: dict
teaming_policy:
description:
- Dictionary which configures the different teaming values for portgroup.
- 'Valid attributes are:'
- '- C(load_balance_policy) (string): Network adapter teaming policy. (default: loadbalance_srcid)'
- ' - choices: [ loadbalance_ip, loadbalance_srcmac, loadbalance_srcid, loadbalance_loadbased, failover_explicit]'
- ' - "loadbalance_loadbased" is available from version 2.6 and onwards'
- '- C(inbound_policy) (bool): Indicate whether or not the teaming policy is applied to inbound frames as well. (default: False)'
- '- C(notify_switches) (bool): Indicate whether or not to notify the physical switch if a link fails. (default: True)'
- '- C(rolling_order) (bool): Indicate whether or not to use a rolling policy when restoring links. (default: False)'
required: False
version_added: '2.5'
default: {
'notify_switches': True,
'load_balance_policy': 'loadbalance_srcid',
'inbound_policy': False,
'rolling_order': False
}
type: dict
port_policy:
description:
- Dictionary which configures the advanced policy settings for the portgroup.
- 'Valid attributes are:'
- '- C(block_override) (bool): indicates if the block policy can be changed per port. (default: true)'
- '- C(ipfix_override) (bool): indicates if the ipfix policy can be changed per port. (default: false)'
- '- C(live_port_move) (bool): indicates if a live port can be moved in or out of the portgroup. (default: false)'
- '- C(network_rp_override) (bool): indicates if the network resource pool can be changed per port. (default: false)'
- '- C(port_config_reset_at_disconnect) (bool): indicates if the configuration of a port is reset automatically after disconnect. (default: true)'
- '- C(security_override) (bool): indicates if the security policy can be changed per port. (default: false)'
- '- C(shaping_override) (bool): indicates if the shaping policy can be changed per port. (default: false)'
- '- C(traffic_filter_override) (bool): indicates if the traffic filter can be changed per port. (default: false)'
- '- C(uplink_teaming_override) (bool): indicates if the uplink teaming policy can be changed per port. (default: false)'
- '- C(vendor_config_override) (bool): indicates if the vendor config can be changed per port. (default: false)'
- '- C(vlan_override) (bool): indicates if the vlan can be changed per port. (default: false)'
required: False
version_added: '2.5'
default: {
'traffic_filter_override': False,
'network_rp_override': False,
'live_port_move': False,
'security_override': False,
'vendor_config_override': False,
'port_config_reset_at_disconnect': True,
'uplink_teaming_override': False,
'block_override': True,
'shaping_override': False,
'vlan_override': False,
'ipfix_override': False
}
type: dict
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Create vlan portgroup
vmware_dvs_portgroup:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
portgroup_name: vlan-123-portrgoup
switch_name: dvSwitch
vlan_id: 123
num_ports: 120
portgroup_type: earlyBinding
state: present
delegate_to: localhost
- name: Create vlan trunk portgroup
vmware_dvs_portgroup:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
portgroup_name: vlan-trunk-portrgoup
switch_name: dvSwitch
vlan_id: 1-1000, 1005, 1100-1200
vlan_trunk: True
num_ports: 120
portgroup_type: earlyBinding
state: present
delegate_to: localhost
- name: Create no-vlan portgroup
vmware_dvs_portgroup:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
portgroup_name: no-vlan-portrgoup
switch_name: dvSwitch
vlan_id: 0
num_ports: 120
portgroup_type: earlyBinding
state: present
delegate_to: localhost
- name: Create vlan portgroup with all security and port policies
vmware_dvs_portgroup:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
portgroup_name: vlan-123-portrgoup
switch_name: dvSwitch
vlan_id: 123
num_ports: 120
portgroup_type: earlyBinding
state: present
network_policy:
promiscuous: yes
forged_transmits: yes
mac_changes: yes
port_policy:
block_override: yes
ipfix_override: yes
live_port_move: yes
network_rp_override: yes
port_config_reset_at_disconnect: yes
security_override: yes
shaping_override: yes
traffic_filter_override: yes
uplink_teaming_override: yes
vendor_config_override: yes
vlan_override: yes
delegate_to: localhost
'''
try:
from pyVmomi import vim, vmodl
except ImportError as e:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import (PyVmomi, find_dvs_by_name, find_dvspg_by_name,
vmware_argument_spec, wait_for_task)
class VMwareDvsPortgroup(PyVmomi):
def __init__(self, module):
super(VMwareDvsPortgroup, self).__init__(module)
self.dvs_portgroup = None
self.dv_switch = None
def create_vlan_list(self):
vlan_id_list = []
for vlan_id_splitted in self.module.params['vlan_id'].split(','):
vlans = vlan_id_splitted.split('-')
if len(vlans) > 2:
self.module.fail_json(msg="Invalid VLAN range %s." % vlan_id_splitted)
if len(vlans) == 2:
vlan_id_start = vlans[0].strip()
vlan_id_end = vlans[1].strip()
if not vlan_id_start.isdigit():
self.module.fail_json(msg="Invalid VLAN %s." % vlan_id_start)
if not vlan_id_end.isdigit():
self.module.fail_json(msg="Invalid VLAN %s." % vlan_id_end)
vlan_id_start = int(vlan_id_start)
vlan_id_end = int(vlan_id_end)
if vlan_id_start not in range(0, 4095) or vlan_id_end not in range(0, 4095):
self.module.fail_json(msg="vlan_id range %s specified is incorrect. The valid vlan_id range is from 0 to 4094." % vlan_id_splitted)
vlan_id_list.append((vlan_id_start, vlan_id_end))
else:
vlan_id = vlans[0].strip()
if not vlan_id.isdigit():
self.module.fail_json(msg="Invalid VLAN %s." % vlan_id)
vlan_id = int(vlan_id)
vlan_id_list.append((vlan_id, vlan_id))
vlan_id_list.sort()
return vlan_id_list
def build_config(self):
config = vim.dvs.DistributedVirtualPortgroup.ConfigSpec()
# Basic config
config.name = self.module.params['portgroup_name']
config.numPorts = self.module.params['num_ports']
# Default port config
config.defaultPortConfig = vim.dvs.VmwareDistributedVirtualSwitch.VmwarePortConfigPolicy()
if self.module.params['vlan_trunk']:
config.defaultPortConfig.vlan = vim.dvs.VmwareDistributedVirtualSwitch.TrunkVlanSpec()
config.defaultPortConfig.vlan.vlanId = list(map(lambda x: vim.NumericRange(start=x[0], end=x[1]), self.create_vlan_list()))
else:
config.defaultPortConfig.vlan = vim.dvs.VmwareDistributedVirtualSwitch.VlanIdSpec()
config.defaultPortConfig.vlan.vlanId = int(self.module.params['vlan_id'])
config.defaultPortConfig.vlan.inherited = False
config.defaultPortConfig.securityPolicy = vim.dvs.VmwareDistributedVirtualSwitch.SecurityPolicy()
config.defaultPortConfig.securityPolicy.allowPromiscuous = vim.BoolPolicy(value=self.module.params['network_policy']['promiscuous'])
config.defaultPortConfig.securityPolicy.forgedTransmits = vim.BoolPolicy(value=self.module.params['network_policy']['forged_transmits'])
config.defaultPortConfig.securityPolicy.macChanges = vim.BoolPolicy(value=self.module.params['network_policy']['mac_changes'])
# Teaming Policy
teamingPolicy = vim.dvs.VmwareDistributedVirtualSwitch.UplinkPortTeamingPolicy()
teamingPolicy.policy = vim.StringPolicy(value=self.module.params['teaming_policy']['load_balance_policy'])
teamingPolicy.reversePolicy = vim.BoolPolicy(value=self.module.params['teaming_policy']['inbound_policy'])
teamingPolicy.notifySwitches = vim.BoolPolicy(value=self.module.params['teaming_policy']['notify_switches'])
teamingPolicy.rollingOrder = vim.BoolPolicy(value=self.module.params['teaming_policy']['rolling_order'])
config.defaultPortConfig.uplinkTeamingPolicy = teamingPolicy
# PG policy (advanced_policy)
config.policy = vim.dvs.VmwareDistributedVirtualSwitch.VMwarePortgroupPolicy()
config.policy.blockOverrideAllowed = self.module.params['port_policy']['block_override']
config.policy.ipfixOverrideAllowed = self.module.params['port_policy']['ipfix_override']
config.policy.livePortMovingAllowed = self.module.params['port_policy']['live_port_move']
config.policy.networkResourcePoolOverrideAllowed = self.module.params['port_policy']['network_rp_override']
config.policy.portConfigResetAtDisconnect = self.module.params['port_policy']['port_config_reset_at_disconnect']
config.policy.securityPolicyOverrideAllowed = self.module.params['port_policy']['security_override']
config.policy.shapingOverrideAllowed = self.module.params['port_policy']['shaping_override']
config.policy.trafficFilterOverrideAllowed = self.module.params['port_policy']['traffic_filter_override']
config.policy.uplinkTeamingOverrideAllowed = self.module.params['port_policy']['uplink_teaming_override']
config.policy.vendorConfigOverrideAllowed = self.module.params['port_policy']['vendor_config_override']
config.policy.vlanOverrideAllowed = self.module.params['port_policy']['vlan_override']
# PG Type
config.type = self.module.params['portgroup_type']
return config
def process_state(self):
dvspg_states = {
'absent': {
'present': self.state_destroy_dvspg,
'absent': self.state_exit_unchanged,
},
'present': {
'update': self.state_update_dvspg,
'present': self.state_exit_unchanged,
'absent': self.state_create_dvspg,
}
}
try:
dvspg_states[self.module.params['state']][self.check_dvspg_state()]()
except vmodl.RuntimeFault as runtime_fault:
self.module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
self.module.fail_json(msg=method_fault.msg)
except Exception as e:
self.module.fail_json(msg=str(e))
def update_port_group(self):
config = self.build_config()
config.configVersion = self.dvs_portgroup.config.configVersion
task = self.dvs_portgroup.ReconfigureDVPortgroup_Task(config)
changed, result = wait_for_task(task)
return changed, result
def create_port_group(self):
config = self.build_config()
task = self.dv_switch.AddDVPortgroup_Task([config])
changed, result = wait_for_task(task)
return changed, result
def state_destroy_dvspg(self):
changed = True
result = None
if not self.module.check_mode:
task = self.dvs_portgroup.Destroy_Task()
changed, result = wait_for_task(task)
self.module.exit_json(changed=changed, result=str(result))
def state_exit_unchanged(self):
self.module.exit_json(changed=False)
def state_update_dvspg(self):
changed = True
result = None
if not self.module.check_mode:
changed, result = self.update_port_group()
self.module.exit_json(changed=changed, result=str(result))
def state_create_dvspg(self):
changed = True
result = None
if not self.module.check_mode:
changed, result = self.create_port_group()
self.module.exit_json(changed=changed, result=str(result))
def check_dvspg_state(self):
self.dv_switch = find_dvs_by_name(self.content, self.module.params['switch_name'])
if self.dv_switch is None:
self.module.fail_json(msg="A distributed virtual switch with name %s does not exist" % self.module.params['switch_name'])
self.dvs_portgroup = find_dvspg_by_name(self.dv_switch, self.module.params['portgroup_name'])
if self.dvs_portgroup is None:
return 'absent'
# Check config
# Basic config
if self.dvs_portgroup.config.numPorts != self.module.params['num_ports']:
return 'update'
# Default port config
defaultPortConfig = self.dvs_portgroup.config.defaultPortConfig
if self.module.params['vlan_trunk']:
if not isinstance(defaultPortConfig.vlan, vim.dvs.VmwareDistributedVirtualSwitch.TrunkVlanSpec):
return 'update'
if map(lambda x: (x.start, x.end), defaultPortConfig.vlan.vlanId) != self.create_vlan_list():
return 'update'
else:
if not isinstance(defaultPortConfig.vlan, vim.dvs.VmwareDistributedVirtualSwitch.VlanIdSpec):
return 'update'
if defaultPortConfig.vlan.vlanId != int(self.module.params['vlan_id']):
return 'update'
if defaultPortConfig.securityPolicy.allowPromiscuous.value != self.module.params['network_policy']['promiscuous'] or \
defaultPortConfig.securityPolicy.forgedTransmits.value != self.module.params['network_policy']['forged_transmits'] or \
defaultPortConfig.securityPolicy.macChanges.value != self.module.params['network_policy']['mac_changes']:
return 'update'
# Teaming Policy
teamingPolicy = self.dvs_portgroup.config.defaultPortConfig.uplinkTeamingPolicy
if teamingPolicy.policy.value != self.module.params['teaming_policy']['load_balance_policy'] or \
teamingPolicy.reversePolicy.value != self.module.params['teaming_policy']['inbound_policy'] or \
teamingPolicy.notifySwitches.value != self.module.params['teaming_policy']['notify_switches'] or \
teamingPolicy.rollingOrder.value != self.module.params['teaming_policy']['rolling_order']:
return 'update'
# PG policy (advanced_policy)
policy = self.dvs_portgroup.config.policy
if policy.blockOverrideAllowed != self.module.params['port_policy']['block_override'] or \
policy.ipfixOverrideAllowed != self.module.params['port_policy']['ipfix_override'] or \
policy.livePortMovingAllowed != self.module.params['port_policy']['live_port_move'] or \
policy.networkResourcePoolOverrideAllowed != self.module.params['port_policy']['network_rp_override'] or \
policy.portConfigResetAtDisconnect != self.module.params['port_policy']['port_config_reset_at_disconnect'] or \
policy.securityPolicyOverrideAllowed != self.module.params['port_policy']['security_override'] or \
policy.shapingOverrideAllowed != self.module.params['port_policy']['shaping_override'] or \
policy.trafficFilterOverrideAllowed != self.module.params['port_policy']['traffic_filter_override'] or \
policy.uplinkTeamingOverrideAllowed != self.module.params['port_policy']['uplink_teaming_override'] or \
policy.vendorConfigOverrideAllowed != self.module.params['port_policy']['vendor_config_override'] or \
policy.vlanOverrideAllowed != self.module.params['port_policy']['vlan_override']:
return 'update'
# PG Type
if self.dvs_portgroup.config.type != self.module.params['portgroup_type']:
return 'update'
return 'present'
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
dict(
portgroup_name=dict(required=True, type='str'),
switch_name=dict(required=True, type='str'),
vlan_id=dict(required=True, type='str'),
num_ports=dict(required=True, type='int'),
portgroup_type=dict(required=True, choices=['earlyBinding', 'lateBinding', 'ephemeral'], type='str'),
state=dict(required=True, choices=['present', 'absent'], type='str'),
vlan_trunk=dict(type='bool', default=False),
network_policy=dict(
type='dict',
options=dict(
promiscuous=dict(type='bool', default=False),
forged_transmits=dict(type='bool', default=False),
mac_changes=dict(type='bool', default=False)
),
default=dict(
promiscuous=False,
forged_transmits=False,
mac_changes=False
)
),
teaming_policy=dict(
type='dict',
options=dict(
inbound_policy=dict(type='bool', default=False),
notify_switches=dict(type='bool', default=True),
rolling_order=dict(type='bool', default=False),
load_balance_policy=dict(type='str',
default='loadbalance_srcid',
choices=[
'loadbalance_ip',
'loadbalance_srcmac',
'loadbalance_srcid',
'loadbalance_loadbased',
'failover_explicit',
],
)
),
default=dict(
inbound_policy=False,
notify_switches=True,
rolling_order=False,
load_balance_policy='loadbalance_srcid',
),
),
port_policy=dict(
type='dict',
options=dict(
block_override=dict(type='bool', default=True),
ipfix_override=dict(type='bool', default=False),
live_port_move=dict(type='bool', default=False),
network_rp_override=dict(type='bool', default=False),
port_config_reset_at_disconnect=dict(type='bool', default=True),
security_override=dict(type='bool', default=False),
shaping_override=dict(type='bool', default=False),
traffic_filter_override=dict(type='bool', default=False),
uplink_teaming_override=dict(type='bool', default=False),
vendor_config_override=dict(type='bool', default=False),
vlan_override=dict(type='bool', default=False)
),
default=dict(
block_override=True,
ipfix_override=False,
live_port_move=False,
network_rp_override=False,
port_config_reset_at_disconnect=True,
security_override=False,
shaping_override=False,
traffic_filter_override=False,
uplink_teaming_override=False,
vendor_config_override=False,
vlan_override=False
)
)
)
)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
vmware_dvs_portgroup = VMwareDvsPortgroup(module)
vmware_dvs_portgroup.process_state()
if __name__ == '__main__':
main()
|
roadmapper/ansible
|
lib/ansible/modules/cloud/vmware/vmware_dvs_portgroup.py
|
Python
|
gpl-3.0
| 23,760
|
# Copyright (c) 2014-2015, Intel Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class Criterion:
""" A simple class that defines common criterions attributes """
@classmethod
def allowedValues(cls):
"""
The getter of the allowedValues attribute
:return: allowedValues attribute : The lexical states list the
criterion can take.
:rtype: string list
"""
# _allowedValues variable is created in CriterionClassFactory
# when creating dynamic child classes
return cls._allowedValues
@property
def noValue(self):
return '0'
class InvalidCriterionException(Exception):
""" Exception raised in case of problem with a criterion """
def __init__(self, msg):
self.__msg = msg
def __str__(self):
return "Invalid Criterion Error : " + self.__msg
|
soaresm/parameter-framework
|
tools/clientSimulator/criterion/Criterion.py
|
Python
|
bsd-3-clause
| 2,351
|
__version__ = (0, 3, 5)
import logging
import gevent
log = logging.getLogger(__name__)
def socketio_manage(environ, namespaces, request=None, error_handler=None,
json_loads=None, json_dumps=None):
"""Main SocketIO management function, call from within your Framework of
choice's view.
The ``environ`` variable is the WSGI ``environ``. It is used to extract
Socket object from the underlying server (as the 'socketio' key), and will
be attached to both the ``Socket`` and ``Namespace`` objects.
The ``namespaces`` parameter is a dictionary of the namespace string
representation as key, and the BaseNamespace namespace class descendant as
a value. The empty string ('') namespace is the global namespace. You can
use Socket.GLOBAL_NS to be more explicit. So it would look like:
.. code-block:: python
namespaces={'': GlobalNamespace,
'/chat': ChatNamespace}
The ``request`` object is not required, but will probably be useful to pass
framework-specific things into your Socket and Namespace functions. It will
simply be attached to the Socket and Namespace object (accessible through
``self.request`` in both cases), and it is not accessed in any case by the
``gevent-socketio`` library.
Pass in an ``error_handler`` if you want to override the default
error_handler (which is :func:`socketio.virtsocket.default_error_handler`.
The callable you pass in should have the same signature as the default
error handler.
The ``json_loads`` and ``json_dumps`` are overrides for the default
``json.loads`` and ``json.dumps`` function calls. Override these at
the top-most level here. This will affect all sockets created by this
socketio manager, and all namespaces inside.
This function will block the current "view" or "controller" in your
framework to do the recv/send on the socket, and dispatch incoming messages
to your namespaces.
This is a simple example using Pyramid:
.. code-block:: python
def my_view(request):
socketio_manage(request.environ, {'': GlobalNamespace}, request)
NOTE: You must understand that this function is going to be called
*only once* per socket opening, *even though* you are using a long
polling mechanism. The subsequent calls (for long polling) will
be hooked directly at the server-level, to interact with the
active ``Socket`` instance. This means you will *not* get access
to the future ``request`` or ``environ`` objects. This is of
particular importance regarding sessions (like Beaker). The
session will be opened once at the opening of the Socket, and not
closed until the socket is closed. You are responsible for
opening and closing the cookie-based session yourself if you want
to keep its data in sync with the rest of your GET/POST calls.
"""
socket = environ['socketio']
socket._set_environ(environ)
socket._set_namespaces(namespaces)
if request:
socket._set_request(request)
if error_handler:
socket._set_error_handler(error_handler)
if json_loads:
socket._set_json_loads(json_loads)
if json_dumps:
socket._set_json_dumps(json_dumps)
receiver_loop = socket._spawn_receiver_loop()
gevent.joinall([receiver_loop])
# TODO: double check, what happens to the WSGI request here ? it vanishes ?
return
|
grokcore/dev.lexycross
|
wordsmithed/src/gevent-socketio/socketio/__init__.py
|
Python
|
mit
| 3,463
|
from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('details')
class PluginDetails(object):
def on_task_start(self, task, config):
# Make a flag for tasks to declare if it is ok not to produce entries
task.no_entries_ok = False
@plugin.priority(-512)
def on_task_input(self, task, config):
if not task.entries:
if task.no_entries_ok:
log.verbose('Task didn\'t produce any entries.')
else:
log.warning('Task didn\'t produce any entries.\
This is likely due to a mis-configured or non-functional input.')
else:
log.verbose('Produced %s entries.' % (len(task.entries)))
@plugin.priority(-512)
def on_task_download(self, task, config):
# Needs to happen as the first in download, so it runs after urlrewrites
# and IMDB queue acceptance.
log.verbose('Summary - Accepted: %s (Rejected: %s Undecided: %s Failed: %s)' %
(len(task.accepted), len(task.rejected),
len(task.entries) - len(task.accepted), len(task.failed)))
class NoEntriesOk(object):
"""Allows manually silencing the warning message for tasks that regularly produce no entries."""
schema = {'type': 'boolean'}
# Run after details plugin task_start
@plugin.priority(127)
def on_task_start(self, task, config):
task.no_entries_ok = config
@event('plugin.register')
def register_plugin():
plugin.register(PluginDetails, 'details', builtin=True, api_ver=2)
plugin.register(NoEntriesOk, 'no_entries_ok', api_ver=2)
|
ratoaq2/Flexget
|
flexget/plugins/plugin_verbose_details.py
|
Python
|
mit
| 1,696
|
import serial
import random, time, math
port = "\\\\.\\CNCB0"
ser = serial.Serial(port, 38400)
incycle = 0
while True:
t = int(random.randint(60, 80) * (1 + math.sin(incycle)))
x = ser.write(chr(t))
time.sleep(0.02)
incycle += 0.01
if incycle >= 2 * math.pi:
incycle = 0
ser.close()
|
xzmagic/code-for-blog
|
2009/plotting_data_monitor/sender_sim.py
|
Python
|
unlicense
| 344
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""AbsoluteValue bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.contrib.distributions.python.ops.bijectors.absolute_value_impl import *
# pylint: enable=wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = ["AbsoluteValue"]
remove_undocumented(__name__, _allowed_symbols)
|
dyoung418/tensorflow
|
tensorflow/contrib/distributions/python/ops/bijectors/absolute_value.py
|
Python
|
apache-2.0
| 1,160
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
import re
import logging
_logger = logging.getLogger(__name__)
class partner_massmail_wizard(osv.osv_memory):
""" Mass Mailing """
_name = "partner.massmail.wizard"
_description = "Mass Mailing"
_columns = {
'email_from': fields.char("Sender's email", size=256, required=True),
'subject': fields.char('Subject', size=256,required=True),
'text': fields.text('Message',required=True),
}
def mass_mail_send(self, cr, uid, ids, context):
"""Send the given mail to all partners whose ids
are present in ``context['active_ids']``, to
all addresses with an email set.
:param dict context: ``context['active_ids']``
should contain the list of
ids of the partners who should
receive the mail.
"""
nbr = 0
partner_pool = self.pool.get('res.partner')
data = self.browse(cr, uid, ids[0], context=context)
event_pool = self.pool.get('res.partner.event')
assert context['active_model'] == 'res.partner', 'This wizard must be started on a list of Partners'
active_ids = context.get('active_ids', [])
partners = partner_pool.browse(cr, uid, active_ids, context)
subtype = 'plain'
if re.search('(<(pre)|[pubi].*>)', data.text):
subtype = 'html'
ir_mail_server = self.pool.get('ir.mail_server')
emails_seen = set()
for partner in partners:
for adr in partner.address:
if adr.email and not adr.email in emails_seen:
try:
emails_seen.add(adr.email)
name = adr.name or partner.name
to = '"%s" <%s>' % (name, adr.email)
msg = ir_mail_server.build_email(data.email_from, [to], data.subject, data.text, subtype=subtype)
if ir_mail_server.send_email(cr, uid, msg):
nbr += 1
except Exception:
#ignore failed deliveries, will be logged anyway
pass
event_pool.create(cr, uid,
{'name': 'Email(s) sent through mass mailing',
'partner_id': partner.id,
'description': data.text })
_logger.info('Mass-mailing wizard sent %s emails', nbr)
return {'email_sent': nbr}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
BorgERP/borg-erp-6of3
|
server/openerp/addons/base/res/wizard/partner_wizard_massmail.py
|
Python
|
agpl-3.0
| 3,567
|
# Copyright 2012 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Management class for host operations.
"""
import datetime
import os
import platform
import time
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import units
from nova.compute import arch
from nova.compute import hv_type
from nova.compute import vm_mode
from nova.i18n import _
from nova.virt.hyperv import constants
from nova.virt.hyperv import utilsfactory
CONF = cfg.CONF
CONF.import_opt('my_ip', 'nova.netconf')
LOG = logging.getLogger(__name__)
class HostOps(object):
def __init__(self):
self._hostutils = utilsfactory.get_hostutils()
self._pathutils = utilsfactory.get_pathutils()
def _get_cpu_info(self):
"""Get the CPU information.
:returns: A dictionary containing the main properties
of the central processor in the hypervisor.
"""
cpu_info = dict()
processors = self._hostutils.get_cpus_info()
w32_arch_dict = constants.WMI_WIN32_PROCESSOR_ARCHITECTURE
cpu_info['arch'] = w32_arch_dict.get(processors[0]['Architecture'],
'Unknown')
cpu_info['model'] = processors[0]['Name']
cpu_info['vendor'] = processors[0]['Manufacturer']
topology = dict()
topology['sockets'] = len(processors)
topology['cores'] = processors[0]['NumberOfCores']
topology['threads'] = (processors[0]['NumberOfLogicalProcessors'] /
processors[0]['NumberOfCores'])
cpu_info['topology'] = topology
features = list()
for fkey, fname in constants.PROCESSOR_FEATURE.items():
if self._hostutils.is_cpu_feature_present(fkey):
features.append(fname)
cpu_info['features'] = features
return cpu_info
def _get_memory_info(self):
(total_mem_kb, free_mem_kb) = self._hostutils.get_memory_info()
total_mem_mb = total_mem_kb / 1024
free_mem_mb = free_mem_kb / 1024
return (total_mem_mb, free_mem_mb, total_mem_mb - free_mem_mb)
def _get_local_hdd_info_gb(self):
drive = os.path.splitdrive(self._pathutils.get_instances_dir())[0]
(size, free_space) = self._hostutils.get_volume_info(drive)
total_gb = size / units.Gi
free_gb = free_space / units.Gi
used_gb = total_gb - free_gb
return (total_gb, free_gb, used_gb)
def _get_hypervisor_version(self):
"""Get hypervisor version.
:returns: hypervisor version (ex. 6003)
"""
# NOTE(claudiub): The hypervisor_version will be stored in the database
# as an Integer and it will be used by the scheduler, if required by
# the image property 'hypervisor_version_requires'.
# The hypervisor_version will then be converted back to a version
# by splitting the int in groups of 3 digits.
# E.g.: hypervisor_version 6003 is converted to '6.3'.
version = self._hostutils.get_windows_version().split('.')
version = int(version[0]) * 1000 + int(version[1])
LOG.debug('Windows version: %s ', version)
return version
def get_available_resource(self):
"""Retrieve resource info.
This method is called when nova-compute launches, and
as part of a periodic task.
:returns: dictionary describing resources
"""
LOG.debug('get_available_resource called')
(total_mem_mb,
free_mem_mb,
used_mem_mb) = self._get_memory_info()
(total_hdd_gb,
free_hdd_gb,
used_hdd_gb) = self._get_local_hdd_info_gb()
cpu_info = self._get_cpu_info()
cpu_topology = cpu_info['topology']
vcpus = (cpu_topology['sockets'] *
cpu_topology['cores'] *
cpu_topology['threads'])
dic = {'vcpus': vcpus,
'memory_mb': total_mem_mb,
'memory_mb_used': used_mem_mb,
'local_gb': total_hdd_gb,
'local_gb_used': used_hdd_gb,
'hypervisor_type': "hyperv",
'hypervisor_version': self._get_hypervisor_version(),
'hypervisor_hostname': platform.node(),
'vcpus_used': 0,
'cpu_info': jsonutils.dumps(cpu_info),
'supported_instances': jsonutils.dumps(
[(arch.I686, hv_type.HYPERV, vm_mode.HVM),
(arch.X86_64, hv_type.HYPERV, vm_mode.HVM)]),
'numa_topology': None,
}
return dic
def host_power_action(self, action):
"""Reboots, shuts down or powers up the host."""
if action in [constants.HOST_POWER_ACTION_SHUTDOWN,
constants.HOST_POWER_ACTION_REBOOT]:
self._hostutils.host_power_action(action)
else:
if action == constants.HOST_POWER_ACTION_STARTUP:
raise NotImplementedError(
_("Host PowerOn is not supported by the Hyper-V driver"))
def get_host_ip_addr(self):
host_ip = CONF.my_ip
if not host_ip:
# Return the first available address
host_ip = self._hostutils.get_local_ips()[0]
LOG.debug("Host IP address is: %s", host_ip)
return host_ip
def get_host_uptime(self):
"""Returns the host uptime."""
tick_count64 = self._hostutils.get_host_tick_count64()
# format the string to match libvirt driver uptime
# Libvirt uptime returns a combination of the following
# - current host time
# - time since host is up
# - number of logged in users
# - cpu load
# Since the Windows function GetTickCount64 returns only
# the time since the host is up, returning 0s for cpu load
# and number of logged in users.
# This is done to ensure the format of the returned
# value is same as in libvirt
return "%s up %s, 0 users, load average: 0, 0, 0" % (
str(time.strftime("%H:%M:%S")),
str(datetime.timedelta(milliseconds=long(tick_count64))))
|
scripnichenko/nova
|
nova/virt/hyperv/hostops.py
|
Python
|
apache-2.0
| 6,811
|
#!/usr/bin/python
#
# Copyright (c) 2016 Matt Davis, <mdavis@ansible.com>
# Chris Houseknecht, <house@redhat.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: azure_rm_storageblob
short_description: Manage blob containers and blob objects.
version_added: "2.1"
description:
- Create, update and delete blob containers and blob objects. Use to upload a file and store it as a blob object,
or download a blob object to a file.
options:
storage_account_name:
description:
- Name of the storage account to use.
required: true
aliases:
- account_name
- storage_account
blob:
description:
- Name of a blob object within the container.
aliases:
- blob_name
blob_type:
description:
- Type of Blob Object.
default: block
choices:
- block
- page
version_added: "2.5"
container:
description:
- Name of a blob container within the storage account.
required: true
aliases:
- container_name
content_type:
description:
- Set the blob content-type header. For example, 'image/png'.
cache_control:
description:
- Set the blob cache-control header.
content_disposition:
description:
- Set the blob content-disposition header.
content_encoding:
description:
- Set the blob encoding header.
content_language:
description:
- Set the blob content-language header.
content_md5:
description:
- Set the blob md5 hash value.
dest:
description:
- Destination file path. Use with state 'present' to download a blob.
aliases:
- destination
force:
description:
- Overwrite existing blob or file when uploading or downloading. Force deletion of a container
that contains blobs.
type: bool
default: no
resource_group:
description:
- Name of the resource group to use.
required: true
aliases:
- resource_group_name
src:
description:
- Source file path. Use with state 'present' to upload a blob.
aliases:
- source
state:
description:
- Assert the state of a container or blob.
- Use state 'absent' with a container value only to delete a container. Include a blob value to remove
a specific blob. A container will not be deleted, if it contains blobs. Use the force option to override,
deleting the container and all associated blobs.
- Use state 'present' to create or update a container and upload or download a blob. If the container
does not exist, it will be created. If it exists, it will be updated with configuration options. Provide
a blob name and either src or dest to upload or download. Provide a src path to upload and a dest path
to download. If a blob (uploading) or a file (downloading) already exists, it will not be overwritten
unless the force parameter is true.
default: present
choices:
- absent
- present
public_access:
description:
- Determine a container's level of public access. By default containers are private. Can only be set at
time of container creation.
choices:
- container
- blob
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Chris Houseknecht (@chouseknecht)"
- "Matt Davis (@nitzmahone)"
'''
EXAMPLES = '''
- name: Remove container foo
azure_rm_storageblob:
resource_group: testing
storage_account_name: clh0002
container: foo
state: absent
- name: Create container foo and upload a file
azure_rm_storageblob:
resource_group: Testing
storage_account_name: clh0002
container: foo
blob: graylog.png
src: ./files/graylog.png
public_access: container
content_type: 'application/image'
- name: Download the file
azure_rm_storageblob:
resource_group: Testing
storage_account_name: clh0002
container: foo
blob: graylog.png
dest: ~/tmp/images/graylog.png
'''
RETURN = '''
blob:
description: Facts about the current state of the blob.
returned: when a blob is operated on
type: dict
sample: {
"content_length": 136532,
"content_settings": {
"cache_control": null,
"content_disposition": null,
"content_encoding": null,
"content_language": null,
"content_md5": null,
"content_type": "application/image"
},
"last_modified": "09-Mar-2016 22:08:25 +0000",
"name": "graylog.png",
"tags": {},
"type": "BlockBlob"
}
container:
description: Facts about the current state of the selected container.
returned: always
type: dict
sample: {
"last_mdoified": "09-Mar-2016 19:28:26 +0000",
"name": "foo",
"tags": {}
}
'''
import os
try:
from azure.storage.blob.models import ContentSettings
from azure.common import AzureMissingResourceHttpError, AzureHttpError
except ImportError:
# This is handled in azure_rm_common
pass
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
class AzureRMStorageBlob(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
storage_account_name=dict(required=True, type='str', aliases=['account_name', 'storage_account']),
blob=dict(type='str', aliases=['blob_name']),
blob_type=dict(type='str', default='block', choices=['block', 'page']),
container=dict(required=True, type='str', aliases=['container_name']),
dest=dict(type='path'),
force=dict(type='bool', default=False),
resource_group=dict(required=True, type='str', aliases=['resource_group_name']),
src=dict(type='str'),
state=dict(type='str', default='present', choices=['absent', 'present']),
public_access=dict(type='str', choices=['container', 'blob']),
content_type=dict(type='str'),
content_encoding=dict(type='str'),
content_language=dict(type='str'),
content_disposition=dict(type='str'),
cache_control=dict(type='str'),
content_md5=dict(type='str'),
)
mutually_exclusive = [('src', 'dest')]
self.blob_client = None
self.blob_details = None
self.storage_account_name = None
self.blob = None
self.blob_obj = None
self.blob_type = None
self.container = None
self.container_obj = None
self.dest = None
self.force = None
self.resource_group = None
self.src = None
self.state = None
self.tags = None
self.public_access = None
self.results = dict(
changed=False,
actions=[],
container=dict(),
blob=dict()
)
super(AzureRMStorageBlob, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
mutually_exclusive=mutually_exclusive,
supports_tags=True)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()) + ['tags']:
setattr(self, key, kwargs[key])
self.results['check_mode'] = self.check_mode
# add file path validation
self.blob_client = self.get_blob_client(self.resource_group, self.storage_account_name, self.blob_type)
self.container_obj = self.get_container()
if self.blob is not None:
self.blob_obj = self.get_blob()
if self.state == 'present':
if not self.container_obj:
# create the container
self.create_container()
elif self.container_obj and not self.blob:
# update container attributes
update_tags, self.container_obj['tags'] = self.update_tags(self.container_obj.get('tags'))
if update_tags:
self.update_container_tags(self.container_obj['tags'])
if self.blob:
# create, update or download blob
if self.src and self.src_is_valid():
if self.blob_obj and not self.force:
self.log("Cannot upload to {0}. Blob with that name already exists. "
"Use the force option".format(self.blob))
else:
self.upload_blob()
elif self.dest and self.dest_is_valid():
self.download_blob()
update_tags, self.blob_obj['tags'] = self.update_tags(self.blob_obj.get('tags'))
if update_tags:
self.update_blob_tags(self.blob_obj['tags'])
if self.blob_content_settings_differ():
self.update_blob_content_settings()
elif self.state == 'absent':
if self.container_obj and not self.blob:
# Delete container
if self.container_has_blobs():
if self.force:
self.delete_container()
else:
self.log("Cannot delete container {0}. It contains blobs. Use the force option.".format(
self.container))
else:
self.delete_container()
elif self.container_obj and self.blob_obj:
# Delete blob
self.delete_blob()
# until we sort out how we want to do this globally
del self.results['actions']
return self.results
def get_container(self):
result = {}
container = None
if self.container:
try:
container = self.blob_client.get_container_properties(self.container)
except AzureMissingResourceHttpError:
pass
if container:
result = dict(
name=container.name,
tags=container.metadata,
last_mdoified=container.properties.last_modified.strftime('%d-%b-%Y %H:%M:%S %z'),
)
return result
def get_blob(self):
result = dict()
blob = None
if self.blob:
try:
blob = self.blob_client.get_blob_properties(self.container, self.blob)
except AzureMissingResourceHttpError:
pass
if blob:
result = dict(
name=blob.name,
tags=blob.metadata,
last_modified=blob.properties.last_modified.strftime('%d-%b-%Y %H:%M:%S %z'),
type=blob.properties.blob_type,
content_length=blob.properties.content_length,
content_settings=dict(
content_type=blob.properties.content_settings.content_type,
content_encoding=blob.properties.content_settings.content_encoding,
content_language=blob.properties.content_settings.content_language,
content_disposition=blob.properties.content_settings.content_disposition,
cache_control=blob.properties.content_settings.cache_control,
content_md5=blob.properties.content_settings.content_md5
)
)
return result
def create_container(self):
self.log('Create container %s' % self.container)
tags = None
if not self.blob and self.tags:
# when a blob is present, then tags are assigned at the blob level
tags = self.tags
if not self.check_mode:
try:
self.blob_client.create_container(self.container, metadata=tags, public_access=self.public_access)
except AzureHttpError as exc:
self.fail("Error creating container {0} - {1}".format(self.container, str(exc)))
self.container_obj = self.get_container()
self.results['changed'] = True
self.results['actions'].append('created container {0}'.format(self.container))
self.results['container'] = self.container_obj
def upload_blob(self):
content_settings = None
if self.content_type or self.content_encoding or self.content_language or self.content_disposition or \
self.cache_control or self.content_md5:
content_settings = ContentSettings(
content_type=self.content_type,
content_encoding=self.content_encoding,
content_language=self.content_language,
content_disposition=self.content_disposition,
cache_control=self.cache_control,
content_md5=self.content_md5
)
if not self.check_mode:
try:
self.blob_client.create_blob_from_path(self.container, self.blob, self.src,
metadata=self.tags, content_settings=content_settings)
except AzureHttpError as exc:
self.fail("Error creating blob {0} - {1}".format(self.blob, str(exc)))
self.blob_obj = self.get_blob()
self.results['changed'] = True
self.results['actions'].append('created blob {0} from {1}'.format(self.blob, self.src))
self.results['container'] = self.container_obj
self.results['blob'] = self.blob_obj
def download_blob(self):
if not self.check_mode:
try:
self.blob_client.get_blob_to_path(self.container, self.blob, self.dest)
except Exception as exc:
self.fail("Failed to download blob {0}:{1} to {2} - {3}".format(self.container,
self.blob,
self.dest,
exc))
self.results['changed'] = True
self.results['actions'].append('downloaded blob {0}:{1} to {2}'.format(self.container,
self.blob,
self.dest))
self.results['container'] = self.container_obj
self.results['blob'] = self.blob_obj
def src_is_valid(self):
if not os.path.isfile(self.src):
self.fail("The source path must be a file.")
try:
fp = open(self.src, 'r')
fp.close()
except IOError:
self.fail("Failed to access {0}. Make sure the file exists and that you have "
"read access.".format(self.src))
return True
def dest_is_valid(self):
if not self.check_mode:
if not os.path.basename(self.dest):
# dest is a directory
if os.path.isdir(self.dest):
self.log("Path is dir. Appending blob name.")
self.dest += self.blob
else:
try:
self.log('Attempting to makedirs {0}'.format(self.dest))
os.makddirs(self.dest)
except IOError as exc:
self.fail("Failed to create directory {0} - {1}".format(self.dest, str(exc)))
self.dest += self.blob
else:
# does path exist without basename
file_name = os.path.basename(self.dest)
path = self.dest.replace(file_name, '')
self.log('Checking path {0}'.format(path))
if not os.path.isdir(path):
try:
self.log('Attempting to makedirs {0}'.format(path))
os.makedirs(path)
except IOError as exc:
self.fail("Failed to create directory {0} - {1}".format(path, str(exc)))
self.log('Checking final path {0}'.format(self.dest))
if os.path.isfile(self.dest) and not self.force:
# dest already exists and we're not forcing
self.log("Dest {0} already exists. Cannot download. Use the force option.".format(self.dest))
return False
return True
def delete_container(self):
if not self.check_mode:
try:
self.blob_client.delete_container(self.container)
except AzureHttpError as exc:
self.fail("Error deleting container {0} - {1}".format(self.container, str(exc)))
self.results['changed'] = True
self.results['actions'].append('deleted container {0}'.format(self.container))
def container_has_blobs(self):
try:
list_generator = self.blob_client.list_blobs(self.container)
except AzureHttpError as exc:
self.fail("Error list blobs in {0} - {1}".format(self.container, str(exc)))
if len(list_generator.items) > 0:
return True
return False
def delete_blob(self):
if not self.check_mode:
try:
self.blob_client.delete_blob(self.container, self.blob)
except AzureHttpError as exc:
self.fail("Error deleting blob {0}:{1} - {2}".format(self.container, self.blob, str(exc)))
self.results['changed'] = True
self.results['actions'].append('deleted blob {0}:{1}'.format(self.container, self.blob))
self.results['container'] = self.container_obj
def update_container_tags(self, tags):
if not self.check_mode:
try:
self.blob_client.set_container_metadata(self.container, metadata=tags)
except AzureHttpError as exc:
self.fail("Error updating container tags {0} - {1}".format(self.container, str(exc)))
self.container_obj = self.get_container()
self.results['changed'] = True
self.results['actions'].append("updated container {0} tags.".format(self.container))
self.results['container'] = self.container_obj
def update_blob_tags(self, tags):
if not self.check_mode:
try:
self.blob_client.set_blob_metadata(self.container, self.blob, metadata=tags)
except AzureHttpError as exc:
self.fail("Update blob tags {0}:{1} - {2}".format(self.container, self.blob, str(exc)))
self.blob_obj = self.get_blob()
self.results['changed'] = True
self.results['actions'].append("updated blob {0}:{1} tags.".format(self.container, self.blob))
self.results['container'] = self.container_obj
self.results['blob'] = self.blob_obj
def blob_content_settings_differ(self):
if self.content_type or self.content_encoding or self.content_language or self.content_disposition or \
self.cache_control or self.content_md5:
settings = dict(
content_type=self.content_type,
content_encoding=self.content_encoding,
content_language=self.content_language,
content_disposition=self.content_disposition,
cache_control=self.cache_control,
content_md5=self.content_md5
)
if self.blob_obj['content_settings'] != settings:
return True
return False
def update_blob_content_settings(self):
content_settings = ContentSettings(
content_type=self.content_type,
content_encoding=self.content_encoding,
content_language=self.content_language,
content_disposition=self.content_disposition,
cache_control=self.cache_control,
content_md5=self.content_md5
)
if not self.check_mode:
try:
self.blob_client.set_blob_properties(self.container, self.blob, content_settings=content_settings)
except AzureHttpError as exc:
self.fail("Update blob content settings {0}:{1} - {2}".format(self.container, self.blob, str(exc)))
self.blob_obj = self.get_blob()
self.results['changed'] = True
self.results['actions'].append("updated blob {0}:{1} content settings.".format(self.container, self.blob))
self.results['container'] = self.container_obj
self.results['blob'] = self.blob_obj
def main():
AzureRMStorageBlob()
if __name__ == '__main__':
main()
|
bregman-arie/ansible
|
lib/ansible/modules/cloud/azure/azure_rm_storageblob.py
|
Python
|
gpl-3.0
| 21,360
|
#!/usr/bin/python
#
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
#
# Copyright: (c) 2017 Gaurav Rastogi, <grastogi@avinetworks.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_alertconfig
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of AlertConfig Avi RESTful Object
description:
- This module is used to configure AlertConfig object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
action_group_ref:
description:
- The alert config will trigger the selected alert action, which can send notifications and execute a controlscript.
- It is a reference to an object of type actiongroupconfig.
alert_rule:
description:
- List of filters matching on events or client logs used for triggering alerts.
required: true
autoscale_alert:
description:
- This alert config applies to auto scale alerts.
category:
description:
- Determines whether an alert is raised immediately when event occurs (realtime) or after specified number of events occurs within rolling time
- window.
- Enum options - REALTIME, ROLLINGWINDOW, WATERMARK.
- Default value when not specified in API or module is interpreted by Avi Controller as REALTIME.
required: true
description:
description:
- A custom description field.
enabled:
description:
- Enable or disable this alert config from generating new alerts.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
expiry_time:
description:
- An alert is expired and deleted after the expiry time has elapsed.
- The original event triggering the alert remains in the event's log.
- Allowed values are 1-31536000.
- Default value when not specified in API or module is interpreted by Avi Controller as 86400.
- Units(SEC).
name:
description:
- Name of the alert configuration.
required: true
obj_uuid:
description:
- Uuid of the resource for which alert was raised.
object_type:
description:
- The object type to which the alert config is associated with.
- Valid object types are - virtual service, pool, service engine.
- Enum options - VIRTUALSERVICE, POOL, HEALTHMONITOR, NETWORKPROFILE, APPLICATIONPROFILE, HTTPPOLICYSET, DNSPOLICY, IPADDRGROUP, STRINGGROUP,
- SSLPROFILE, SSLKEYANDCERTIFICATE, NETWORKSECURITYPOLICY, APPLICATIONPERSISTENCEPROFILE, ANALYTICSPROFILE, VSDATASCRIPTSET, TENANT, PKIPROFILE,
- AUTHPROFILE, CLOUD, SERVERAUTOSCALEPOLICY, AUTOSCALELAUNCHCONFIG, MICROSERVICEGROUP, IPAMPROFILE, HARDWARESECURITYMODULEGROUP, POOLGROUP,
- PRIORITYLABELS, POOLGROUPDEPLOYMENTPOLICY, GSLBSERVICE, GSLBSERVICERUNTIME, SCHEDULER, GSLBGEODBPROFILE, GSLBAPPLICATIONPERSISTENCEPROFILE,
- TRAFFICCLONEPROFILE, VSVIP, WAFPOLICY, WAFPROFILE, ERRORPAGEPROFILE, ERRORPAGEBODY, SERVICEENGINE, DEBUGSERVICEENGINE, DEBUGCONTROLLER,
- DEBUGVIRTUALSERVICE, SERVICEENGINEGROUP, SEPROPERTIES, NETWORK, CONTROLLERNODE, CONTROLLERPROPERTIES, SYSTEMCONFIGURATION, VRFCONTEXT, USER,
- ALERTCONFIG, ALERTSYSLOGCONFIG, ALERTEMAILCONFIG, ALERTTYPECONFIG, APPLICATION, ROLE, CLOUDPROPERTIES, SNMPTRAPPROFILE, ACTIONGROUPPROFILE,
- MICROSERVICE, ALERTPARAMS, ACTIONGROUPCONFIG, CLOUDCONNECTORUSER, GSLB, GSLBDNSUPDATE, GSLBSITEOPS, GLBMGRWARMSTART, IPAMDNSRECORD,
- GSLBDNSGSSTATUS, GSLBDNSGEOFILEOPS, GSLBDNSGEOUPDATE, GSLBDNSGEOCLUSTEROPS, GSLBDNSCLEANUP, GSLBSITEOPSRESYNC, TCPSTATRUNTIME, UDPSTATRUNTIME,
- IPSTATRUNTIME, ARPSTATRUNTIME, MBSTATRUNTIME, IPSTKQSTATSRUNTIME, MALLOCSTATRUNTIME, SHMALLOCSTATRUNTIME, CPUUSAGERUNTIME, L7GLOBALSTATSRUNTIME,
- L7VIRTUALSERVICESTATSRUNTIME, SEAGENTVNICDBRUNTIME, SEAGENTGRAPHDBRUNTIME, SEAGENTSTATERUNTIME, INTERFACERUNTIME, ARPTABLERUNTIME,
- DISPATCHERSTATRUNTIME, DISPATCHERSTATCLEARRUNTIME, DISPATCHERTABLEDUMPRUNTIME, DISPATCHERREMOTETIMERLISTDUMPRUNTIME, METRICSAGENTMESSAGE,
- HEALTHMONITORSTATRUNTIME, METRICSENTITYRUNTIME, PERSISTENCEINTERNAL, HTTPPOLICYSETINTERNAL, DNSPOLICYINTERNAL, CONNECTIONDUMPRUNTIME,
- SHAREDDBSTATS, SHAREDDBSTATSCLEAR, ICMPSTATRUNTIME, ROUTETABLERUNTIME, VIRTUALMACHINE, POOLSERVER, SEVSLIST, MEMINFORUNTIME, RTERINGSTATRUNTIME,
- ALGOSTATRUNTIME, HEALTHMONITORRUNTIME, CPUSTATRUNTIME, SEVM, HOST, PORTGROUP, CLUSTER, DATACENTER, VCENTER, HTTPPOLICYSETSTATS, DNSPOLICYSTATS,
- METRICSSESTATS, RATELIMITERSTATRUNTIME, NETWORKSECURITYPOLICYSTATS, TCPCONNRUNTIME, POOLSTATS, CONNPOOLINTERNAL, CONNPOOLSTATS, VSHASHSHOWRUNTIME,
- SELOGSTATSRUNTIME, NETWORKSECURITYPOLICYDETAIL, LICENSERUNTIME, SERVERRUNTIME, METRICSRUNTIMESUMMARY, METRICSRUNTIMEDETAIL,
- DISPATCHERSEHMPROBETEMPDISABLERUNTIME, POOLDEBUG, VSLOGMGRMAP, SERUMINSERTIONSTATS, HTTPCACHE, HTTPCACHESTATS, SEDOSSTATRUNTIME, VSDOSSTATRUNTIME,
- SERVERUPDATEREQ, VSSCALEOUTLIST, SEMEMDISTRUNTIME, TCPCONNRUNTIMEDETAIL, SEUPGRADESTATUS, SEUPGRADEPREVIEW, SEFAULTINJECTEXHAUSTM,
- SEFAULTINJECTEXHAUSTMCL, SEFAULTINJECTEXHAUSTMCLSMALL, SEFAULTINJECTEXHAUSTCONN, SEHEADLESSONLINEREQ, SEUPGRADE, SEUPGRADESTATUSDETAIL,
- SERESERVEDVS, SERESERVEDVSCLEAR, VSCANDIDATESEHOSTLIST, SEGROUPUPGRADE, REBALANCE, SEGROUPREBALANCE, SEAUTHSTATSRUNTIME, AUTOSCALESTATE,
- VIRTUALSERVICEAUTHSTATS, NETWORKSECURITYPOLICYDOS, KEYVALINTERNAL, KEYVALSUMMARYINTERNAL, SERVERSTATEUPDATEINFO, CLTRACKINTERNAL,
- CLTRACKSUMMARYINTERNAL, MICROSERVICERUNTIME, SEMICROSERVICE, VIRTUALSERVICEANALYSIS, CLIENTINTERNAL, CLIENTSUMMARYINTERNAL,
- MICROSERVICEGROUPRUNTIME, BGPRUNTIME, REQUESTQUEUERUNTIME, MIGRATEALL, MIGRATEALLSTATUSSUMMARY, MIGRATEALLSTATUSDETAIL, INTERFACESUMMARYRUNTIME,
- INTERFACELACPRUNTIME, DNSTABLE, GSLBSERVICEDETAIL, GSLBSERVICEINTERNAL, GSLBSERVICEHMONSTAT, SETROLESREQUEST, TRAFFICCLONERUNTIME,
- GEOLOCATIONINFO, SEVSHBSTATRUNTIME, GEODBINTERNAL, GSLBSITEINTERNAL, WAFSTATS, USERDEFINEDDATASCRIPTCOUNTERS, LLDPRUNTIME, VSESSHARINGPOOL,
- SEVSSPLACEMENT, SERESOURCEPROTO, SECONSUMERPROTO, SECREATEPENDINGPROTO, PLACEMENTSTATS, SEVIPPROTO, RMVRFPROTO, VCENTERMAP, VIMGRVCENTERRUNTIME,
- INTERESTEDVMS, INTERESTEDHOSTS, VCENTERSUPPORTEDCOUNTERS, ENTITYCOUNTERS, TRANSACTIONSTATS, SEVMCREATEPROGRESS, PLACEMENTSTATUS, VISUBFOLDERS,
- VIDATASTORE, VIHOSTRESOURCES, CLOUDCONNECTOR, VINETWORKSUBNETVMS, VIDATASTORECONTENTS, VIMGRVCENTERCLOUDRUNTIME, VIVCENTERPORTGROUPS,
- VIVCENTERDATACENTERS, VIMGRHOSTRUNTIME, PLACEMENTGLOBALS, APICCONFIGURATION, CIFTABLE, APICTRANSACTION, VIRTUALSERVICESTATEDBCACHESUMMARY,
- POOLSTATEDBCACHESUMMARY, SERVERSTATEDBCACHESUMMARY, APICAGENTINTERNAL, APICTRANSACTIONFLAP, APICGRAPHINSTANCES, APICEPGS, APICEPGEPS,
- APICDEVICEPKGVER, APICTENANTS, APICVMMDOMAINS, NSXCONFIGURATION, NSXSGTABLE, NSXAGENTINTERNAL, NSXSGINFO, NSXSGIPS, NSXAGENTINTERNALCLI,
- MAXOBJECTS.
recommendation:
description:
- Recommendation of alertconfig.
rolling_window:
description:
- Only if the number of events is reached or exceeded within the time window will an alert be generated.
- Allowed values are 1-31536000.
- Default value when not specified in API or module is interpreted by Avi Controller as 300.
- Units(SEC).
source:
description:
- Signifies system events or the type of client logsused in this alert configuration.
- Enum options - CONN_LOGS, APP_LOGS, EVENT_LOGS, METRICS.
required: true
summary:
description:
- Summary of reason why alert is generated.
tenant_ref:
description:
- It is a reference to an object of type tenant.
threshold:
description:
- An alert is created only when the number of events meets or exceeds this number within the chosen time frame.
- Allowed values are 1-65536.
- Default value when not specified in API or module is interpreted by Avi Controller as 1.
throttle:
description:
- Alerts are suppressed (throttled) for this duration of time since the last alert was raised for this alert config.
- Allowed values are 0-31536000.
- Default value when not specified in API or module is interpreted by Avi Controller as 600.
- Units(SEC).
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Unique object identifier of the object.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create AlertConfig object
avi_alertconfig:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_alertconfig
"""
RETURN = '''
obj:
description: AlertConfig (api/alertconfig) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
action_group_ref=dict(type='str',),
alert_rule=dict(type='dict', required=True),
autoscale_alert=dict(type='bool',),
category=dict(type='str', required=True),
description=dict(type='str',),
enabled=dict(type='bool',),
expiry_time=dict(type='int',),
name=dict(type='str', required=True),
obj_uuid=dict(type='str',),
object_type=dict(type='str',),
recommendation=dict(type='str',),
rolling_window=dict(type='int',),
source=dict(type='str', required=True),
summary=dict(type='str',),
tenant_ref=dict(type='str',),
threshold=dict(type='int',),
throttle=dict(type='int',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'alertconfig',
set([]))
if __name__ == '__main__':
main()
|
ravibhure/ansible
|
lib/ansible/modules/network/avi/avi_alertconfig.py
|
Python
|
gpl-3.0
| 12,090
|
"""The tests for the Home Assistant HTTP component."""
import asyncio
import requests
from homeassistant import setup, const
import homeassistant.components.http as http
from tests.common import get_test_instance_port, get_test_home_assistant
API_PASSWORD = 'test1234'
SERVER_PORT = get_test_instance_port()
HTTP_BASE = '127.0.0.1:{}'.format(SERVER_PORT)
HTTP_BASE_URL = 'http://{}'.format(HTTP_BASE)
HA_HEADERS = {
const.HTTP_HEADER_HA_AUTH: API_PASSWORD,
const.HTTP_HEADER_CONTENT_TYPE: const.CONTENT_TYPE_JSON,
}
CORS_ORIGINS = [HTTP_BASE_URL, HTTP_BASE]
hass = None
def _url(path=''):
"""Helper method to generate URLs."""
return HTTP_BASE_URL + path
# pylint: disable=invalid-name
def setUpModule():
"""Initialize a Home Assistant server."""
global hass
hass = get_test_home_assistant()
setup.setup_component(
hass, http.DOMAIN, {
http.DOMAIN: {
http.CONF_API_PASSWORD: API_PASSWORD,
http.CONF_SERVER_PORT: SERVER_PORT,
http.CONF_CORS_ORIGINS: CORS_ORIGINS,
}
}
)
setup.setup_component(hass, 'api')
# Registering static path as it caused CORS to blow up
hass.http.register_static_path(
'/custom_components', hass.config.path('custom_components'))
hass.start()
# pylint: disable=invalid-name
def tearDownModule():
"""Stop the Home Assistant server."""
hass.stop()
class TestCors:
"""Test HTTP component."""
def test_cors_allowed_with_password_in_url(self):
"""Test cross origin resource sharing with password in url."""
req = requests.get(_url(const.URL_API),
params={'api_password': API_PASSWORD},
headers={const.HTTP_HEADER_ORIGIN: HTTP_BASE_URL})
allow_origin = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN
assert req.status_code == 200
assert req.headers.get(allow_origin) == HTTP_BASE_URL
def test_cors_allowed_with_password_in_header(self):
"""Test cross origin resource sharing with password in header."""
headers = {
const.HTTP_HEADER_HA_AUTH: API_PASSWORD,
const.HTTP_HEADER_ORIGIN: HTTP_BASE_URL
}
req = requests.get(_url(const.URL_API), headers=headers)
allow_origin = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN
assert req.status_code == 200
assert req.headers.get(allow_origin) == HTTP_BASE_URL
def test_cors_denied_without_origin_header(self):
"""Test cross origin resource sharing with password in header."""
headers = {
const.HTTP_HEADER_HA_AUTH: API_PASSWORD
}
req = requests.get(_url(const.URL_API), headers=headers)
allow_origin = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN
allow_headers = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_HEADERS
assert req.status_code == 200
assert allow_origin not in req.headers
assert allow_headers not in req.headers
def test_cors_preflight_allowed(self):
"""Test cross origin resource sharing preflight (OPTIONS) request."""
headers = {
const.HTTP_HEADER_ORIGIN: HTTP_BASE_URL,
'Access-Control-Request-Method': 'GET',
'Access-Control-Request-Headers': 'x-ha-access'
}
req = requests.options(_url(const.URL_API), headers=headers)
allow_origin = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN
allow_headers = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_HEADERS
assert req.status_code == 200
assert req.headers.get(allow_origin) == HTTP_BASE_URL
assert req.headers.get(allow_headers) == \
const.HTTP_HEADER_HA_AUTH.upper()
class TestView(http.HomeAssistantView):
"""Test the HTTP views."""
name = 'test'
url = '/hello'
@asyncio.coroutine
def get(self, request):
"""Return a get request."""
return 'hello'
@asyncio.coroutine
def test_registering_view_while_running(hass, test_client):
"""Test that we can register a view while the server is running."""
yield from setup.async_setup_component(
hass, http.DOMAIN, {
http.DOMAIN: {
http.CONF_SERVER_PORT: get_test_instance_port(),
}
}
)
yield from setup.async_setup_component(hass, 'api')
yield from hass.async_start()
yield from hass.async_block_till_done()
hass.http.register_view(TestView)
client = yield from test_client(hass.http.app)
resp = yield from client.get('/hello')
assert resp.status == 200
text = yield from resp.text()
assert text == 'hello'
@asyncio.coroutine
def test_api_base_url_with_domain(hass):
"""Test setting api url."""
result = yield from setup.async_setup_component(hass, 'http', {
'http': {
'base_url': 'example.com'
}
})
assert result
assert hass.config.api.base_url == 'http://example.com'
@asyncio.coroutine
def test_api_base_url_with_ip(hass):
"""Test setting api url."""
result = yield from setup.async_setup_component(hass, 'http', {
'http': {
'server_host': '1.1.1.1'
}
})
assert result
assert hass.config.api.base_url == 'http://1.1.1.1:8123'
@asyncio.coroutine
def test_api_base_url_with_ip_port(hass):
"""Test setting api url."""
result = yield from setup.async_setup_component(hass, 'http', {
'http': {
'base_url': '1.1.1.1:8124'
}
})
assert result
assert hass.config.api.base_url == 'http://1.1.1.1:8124'
@asyncio.coroutine
def test_api_no_base_url(hass):
"""Test setting api url."""
result = yield from setup.async_setup_component(hass, 'http', {
'http': {
}
})
assert result
assert hass.config.api.base_url == 'http://127.0.0.1:8123'
|
JshWright/home-assistant
|
tests/components/http/test_init.py
|
Python
|
apache-2.0
| 5,897
|
from IOCommon import IOCommon
|
sanjeevtripurari/HiBench
|
src/sparkbench/src/main/python/IOCommon/__init__.py
|
Python
|
apache-2.0
| 29
|
import time
class get_rate_limited_function(object):
"""
Close over a function and a time limit in seconds. The resulting object can
be called like the function, but will not delegate to the function if that
function was called through the object in the time limit.
Clients can ignore the time limit by calling the function directly as the
func attribute of the object.
"""
def __init__(self, func, limit):
self.func, self.limit = func, limit
self.last_called = False
def __call__(self, *args, **kwargs):
elapsed = time.time() - self.last_called
if elapsed >= self.limit:
self.last_called = time.time()
return self.func(*args, **kwargs)
def __repr__(self):
return '{cls_name}(func={func}, limit={limit}, last_called={last_called})'.format(
cls_name=self.__class__.__name__,
func=self.func,
limit=self.limit,
last_called=self.last_called,
)
def merge_dicts(*dict_args):
"""
Given any number of dicts, shallow copy and merge into a new dict,
precedence goes to key value pairs in latter dicts.
"""
result = {}
for dictionary in dict_args:
result.update(dictionary)
return result
|
mambocab/cassandra-dtest
|
utils/funcutils.py
|
Python
|
apache-2.0
| 1,280
|
#!/usr/bin/env python
from bezmisc import *
from ffgeom import *
def maxdist(((p0x,p0y),(p1x,p1y),(p2x,p2y),(p3x,p3y))):
p0 = Point(p0x,p0y)
p1 = Point(p1x,p1y)
p2 = Point(p2x,p2y)
p3 = Point(p3x,p3y)
s1 = Segment(p0,p3)
return max(s1.distanceToPoint(p1),s1.distanceToPoint(p2))
def cspsubdiv(csp,flat):
for sp in csp:
subdiv(sp,flat)
def subdiv(sp,flat,i=1):
while i < len(sp):
p0 = sp[i-1][1]
p1 = sp[i-1][2]
p2 = sp[i][0]
p3 = sp[i][1]
b = (p0,p1,p2,p3)
m = maxdist(b)
if m <= flat:
i += 1
else:
one, two = beziersplitatt(b,0.5)
sp[i-1][2] = one[1]
sp[i][0] = two[2]
p = [one[2],one[3],two[1]]
sp[i:1] = [p]
# vim: expandtab shiftwidth=4 tabstop=8 softtabstop=4 fileencoding=utf-8 textwidth=99
|
gtfierro/BAS
|
web/smapgeo/inkscape/cspsubdiv.py
|
Python
|
gpl-3.0
| 892
|
"""Abstract base classes related to import."""
from . import _bootstrap
from . import _bootstrap_external
from . import machinery
try:
import _frozen_importlib
# import _frozen_importlib_external
except ImportError as exc:
if exc.name != '_frozen_importlib':
raise
_frozen_importlib = None
try:
import _frozen_importlib_external
except ImportError as exc:
_frozen_importlib_external = _bootstrap_external
import abc
def _register(abstract_cls, *classes):
for cls in classes:
abstract_cls.register(cls)
if _frozen_importlib is not None:
try:
frozen_cls = getattr(_frozen_importlib, cls.__name__)
except AttributeError:
frozen_cls = getattr(_frozen_importlib_external, cls.__name__)
abstract_cls.register(frozen_cls)
class Finder(metaclass=abc.ABCMeta):
"""Legacy abstract base class for import finders.
It may be subclassed for compatibility with legacy third party
reimplementations of the import system. Otherwise, finder
implementations should derive from the more specific MetaPathFinder
or PathEntryFinder ABCs.
"""
@abc.abstractmethod
def find_module(self, fullname, path=None):
"""An abstract method that should find a module.
The fullname is a str and the optional path is a str or None.
Returns a Loader object or None.
"""
class MetaPathFinder(Finder):
"""Abstract base class for import finders on sys.meta_path."""
# We don't define find_spec() here since that would break
# hasattr checks we do to support backward compatibility.
def find_module(self, fullname, path):
"""Return a loader for the module.
If no module is found, return None. The fullname is a str and
the path is a list of strings or None.
This method is deprecated in favor of finder.find_spec(). If find_spec()
exists then backwards-compatible functionality is provided for this
method.
"""
if not hasattr(self, 'find_spec'):
return None
found = self.find_spec(fullname, path)
return found.loader if found is not None else None
def invalidate_caches(self):
"""An optional method for clearing the finder's cache, if any.
This method is used by importlib.invalidate_caches().
"""
_register(MetaPathFinder, machinery.BuiltinImporter, machinery.FrozenImporter,
machinery.PathFinder, machinery.WindowsRegistryFinder)
class PathEntryFinder(Finder):
"""Abstract base class for path entry finders used by PathFinder."""
# We don't define find_spec() here since that would break
# hasattr checks we do to support backward compatibility.
def find_loader(self, fullname):
"""Return (loader, namespace portion) for the path entry.
The fullname is a str. The namespace portion is a sequence of
path entries contributing to part of a namespace package. The
sequence may be empty. If loader is not None, the portion will
be ignored.
The portion will be discarded if another path entry finder
locates the module as a normal module or package.
This method is deprecated in favor of finder.find_spec(). If find_spec()
is provided than backwards-compatible functionality is provided.
"""
if not hasattr(self, 'find_spec'):
return None, []
found = self.find_spec(fullname)
if found is not None:
if not found.submodule_search_locations:
portions = []
else:
portions = found.submodule_search_locations
return found.loader, portions
else:
return None, []
find_module = _bootstrap_external._find_module_shim
def invalidate_caches(self):
"""An optional method for clearing the finder's cache, if any.
This method is used by PathFinder.invalidate_caches().
"""
_register(PathEntryFinder, machinery.FileFinder)
class Loader(metaclass=abc.ABCMeta):
"""Abstract base class for import loaders."""
def create_module(self, spec):
"""Return a module to initialize and into which to load.
This method should raise ImportError if anything prevents it
from creating a new module. It may return None to indicate
that the spec should create the new module.
"""
# By default, defer to default semantics for the new module.
return None
# We don't define exec_module() here since that would break
# hasattr checks we do to support backward compatibility.
def load_module(self, fullname):
"""Return the loaded module.
The module must be added to sys.modules and have import-related
attributes set properly. The fullname is a str.
ImportError is raised on failure.
This method is deprecated in favor of loader.exec_module(). If
exec_module() exists then it is used to provide a backwards-compatible
functionality for this method.
"""
if not hasattr(self, 'exec_module'):
raise ImportError
return _bootstrap._load_module_shim(self, fullname)
def module_repr(self, module):
"""Return a module's repr.
Used by the module type when the method does not raise
NotImplementedError.
This method is deprecated.
"""
# The exception will cause ModuleType.__repr__ to ignore this method.
raise NotImplementedError
class ResourceLoader(Loader):
"""Abstract base class for loaders which can return data from their
back-end storage.
This ABC represents one of the optional protocols specified by PEP 302.
"""
@abc.abstractmethod
def get_data(self, path):
"""Abstract method which when implemented should return the bytes for
the specified path. The path must be a str."""
raise IOError
class InspectLoader(Loader):
"""Abstract base class for loaders which support inspection about the
modules they can load.
This ABC represents one of the optional protocols specified by PEP 302.
"""
def is_package(self, fullname):
"""Optional method which when implemented should return whether the
module is a package. The fullname is a str. Returns a bool.
Raises ImportError if the module cannot be found.
"""
raise ImportError
def get_code(self, fullname):
"""Method which returns the code object for the module.
The fullname is a str. Returns a types.CodeType if possible, else
returns None if a code object does not make sense
(e.g. built-in module). Raises ImportError if the module cannot be
found.
"""
source = self.get_source(fullname)
if source is None:
return None
return self.source_to_code(source)
@abc.abstractmethod
def get_source(self, fullname):
"""Abstract method which should return the source code for the
module. The fullname is a str. Returns a str.
Raises ImportError if the module cannot be found.
"""
raise ImportError
@staticmethod
def source_to_code(data, path='<string>'):
"""Compile 'data' into a code object.
The 'data' argument can be anything that compile() can handle. The'path'
argument should be where the data was retrieved (when applicable)."""
return compile(data, path, 'exec', dont_inherit=True)
exec_module = _bootstrap_external._LoaderBasics.exec_module
load_module = _bootstrap_external._LoaderBasics.load_module
_register(InspectLoader, machinery.BuiltinImporter, machinery.FrozenImporter)
class ExecutionLoader(InspectLoader):
"""Abstract base class for loaders that wish to support the execution of
modules as scripts.
This ABC represents one of the optional protocols specified in PEP 302.
"""
@abc.abstractmethod
def get_filename(self, fullname):
"""Abstract method which should return the value that __file__ is to be
set to.
Raises ImportError if the module cannot be found.
"""
raise ImportError
def get_code(self, fullname):
"""Method to return the code object for fullname.
Should return None if not applicable (e.g. built-in module).
Raise ImportError if the module cannot be found.
"""
source = self.get_source(fullname)
if source is None:
return None
try:
path = self.get_filename(fullname)
except ImportError:
return self.source_to_code(source)
else:
return self.source_to_code(source, path)
_register(ExecutionLoader, machinery.ExtensionFileLoader)
class FileLoader(_bootstrap_external.FileLoader, ResourceLoader, ExecutionLoader):
"""Abstract base class partially implementing the ResourceLoader and
ExecutionLoader ABCs."""
_register(FileLoader, machinery.SourceFileLoader,
machinery.SourcelessFileLoader)
class SourceLoader(_bootstrap_external.SourceLoader, ResourceLoader, ExecutionLoader):
"""Abstract base class for loading source code (and optionally any
corresponding bytecode).
To support loading from source code, the abstractmethods inherited from
ResourceLoader and ExecutionLoader need to be implemented. To also support
loading from bytecode, the optional methods specified directly by this ABC
is required.
Inherited abstractmethods not implemented in this ABC:
* ResourceLoader.get_data
* ExecutionLoader.get_filename
"""
def path_mtime(self, path):
"""Return the (int) modification time for the path (str)."""
if self.path_stats.__func__ is SourceLoader.path_stats:
raise IOError
return int(self.path_stats(path)['mtime'])
def path_stats(self, path):
"""Return a metadata dict for the source pointed to by the path (str).
Possible keys:
- 'mtime' (mandatory) is the numeric timestamp of last source
code modification;
- 'size' (optional) is the size in bytes of the source code.
"""
if self.path_mtime.__func__ is SourceLoader.path_mtime:
raise IOError
return {'mtime': self.path_mtime(path)}
def set_data(self, path, data):
"""Write the bytes to the path (if possible).
Accepts a str path and data as bytes.
Any needed intermediary directories are to be created. If for some
reason the file cannot be written because of permissions, fail
silently.
"""
_register(SourceLoader, machinery.SourceFileLoader)
|
Microvellum/Fluid-Designer
|
win64-vc/2.78/python/lib/importlib/abc.py
|
Python
|
gpl-3.0
| 10,821
|
# coding: utf-8
from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from ..utils import (
compat_parse_qs,
compat_urlparse,
)
class FranceCultureIE(InfoExtractor):
_VALID_URL = r'(?P<baseurl>http://(?:www\.)?franceculture\.fr/)player/reecouter\?play=(?P<id>[0-9]+)'
_TEST = {
'url': 'http://www.franceculture.fr/player/reecouter?play=4795174',
'info_dict': {
'id': '4795174',
'ext': 'mp3',
'title': 'Rendez-vous au pays des geeks',
'vcodec': 'none',
'uploader': 'Colette Fellous',
'upload_date': '20140301',
'duration': 3601,
'thumbnail': r're:^http://www\.franceculture\.fr/.*/images/player/Carnet-nomade\.jpg$',
'description': 'Avec :Jean-Baptiste Péretié pour son documentaire sur Arte "La revanche des « geeks », une enquête menée aux Etats-Unis dans la S ...',
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
baseurl = mobj.group('baseurl')
webpage = self._download_webpage(url, video_id)
params_code = self._search_regex(
r"<param name='movie' value='/sites/all/modules/rf/rf_player/swf/loader.swf\?([^']+)' />",
webpage, 'parameter code')
params = compat_parse_qs(params_code)
video_url = compat_urlparse.urljoin(baseurl, params['urlAOD'][0])
title = self._html_search_regex(
r'<h1 class="title[^"]+">(.+?)</h1>', webpage, 'title')
uploader = self._html_search_regex(
r'(?s)<div id="emission".*?<span class="author">(.*?)</span>',
webpage, 'uploader', fatal=False)
thumbnail_part = self._html_search_regex(
r'(?s)<div id="emission".*?<img src="([^"]+)"', webpage,
'thumbnail', fatal=False)
if thumbnail_part is None:
thumbnail = None
else:
thumbnail = compat_urlparse.urljoin(baseurl, thumbnail_part)
description = self._html_search_regex(
r'(?s)<p class="desc">(.*?)</p>', webpage, 'description')
info = json.loads(params['infoData'][0])[0]
duration = info.get('media_length')
upload_date_candidate = info.get('media_section5')
upload_date = (
upload_date_candidate
if (upload_date_candidate is not None and
re.match(r'[0-9]{8}$', upload_date_candidate))
else None)
return {
'id': video_id,
'url': video_url,
'vcodec': 'none' if video_url.lower().endswith('.mp3') else None,
'duration': duration,
'uploader': uploader,
'upload_date': upload_date,
'title': title,
'thumbnail': thumbnail,
'description': description,
}
|
svagionitis/youtube-dl
|
youtube_dl/extractor/franceculture.py
|
Python
|
unlicense
| 2,924
|
from datetime import datetime
from nose.tools import eq_
from kitsune.announcements.models import Announcement
from kitsune.announcements.tests import announcement
from kitsune.sumo.tests import TestCase
from kitsune.sumo.urlresolvers import reverse
from kitsune.users.tests import user, add_permission
from kitsune.wiki.tests import locale
class TestCreateLocaleAnnouncement(TestCase):
def setUp(self):
self.locale = locale(save=True, locale='es')
def _create_test(self, status, count):
"""Login, or other setup, then call this."""
url = reverse('announcements.create_for_locale', locale='es')
resp = self.client.post(url, {
'content': 'Look at me!',
'show_after': '2012-01-01',
})
eq_(resp.status_code, status)
eq_(Announcement.objects.count(), count)
def test_create(self):
u = user(save=True, is_superuser=1)
self.client.login(username=u.username, password='testpass')
self._create_test(200, 1)
def test_leader(self):
u = user(save=True)
self.locale.leaders.add(u)
self.locale.save()
self.client.login(username=u.username, password='testpass')
self._create_test(200, 1)
def test_has_permission(self):
u = user(save=True)
add_permission(u, Announcement, 'add_announcement')
self.client.login(username=u.username, password='testpass')
self._create_test(200, 1)
def test_no_perms(self):
u = user(save=True)
self.client.login(username=u.username, password='testpass')
self._create_test(403, 0)
def test_anon(self):
self._create_test(302, 0)
class TestDeleteAnnouncement(TestCase):
def setUp(self):
self.locale = locale(save=True, locale='es')
self.u = user(save=True)
self.locale.leaders.add(self.u)
self.locale.save()
self.announcement = announcement(
creator=self.u, locale=self.locale, content="Look at me!",
show_after=datetime(2012, 01, 01, 0, 0, 0), save=True)
def _delete_test(self, id, status, count):
"""Login, or other setup, then call this."""
url = reverse('announcements.delete', locale='es', args=(id,))
resp = self.client.post(url)
eq_(resp.status_code, status)
eq_(Announcement.objects.count(), count)
def test_delete(self):
u = user(save=True, is_superuser=1)
self.client.login(username=u.username, password='testpass')
self._delete_test(self.announcement.id, 204, 0)
def test_leader(self):
# Use the user that was created in setUp.
self.client.login(username=self.u.username, password='testpass')
self._delete_test(self.announcement.id, 204, 0)
def test_has_permission(self):
u = user(save=True)
add_permission(u, Announcement, 'add_announcement')
self.client.login(username=u.username, password='testpass')
self._delete_test(self.announcement.id, 204, 0)
def test_no_perms(self):
u = user(save=True)
self.client.login(username=u.username, password='testpass')
self._delete_test(self.announcement.id, 403, 1)
def test_anon(self):
self._delete_test(self.announcement.id, 302, 1)
|
orvi2014/kitsune
|
kitsune/announcements/tests/test_views.py
|
Python
|
bsd-3-clause
| 3,299
|
from south.db import db
from django.db import models
from apps.feed_import.models import *
class Migration:
def forwards(self, orm):
# Adding model 'OAuthToken'
db.create_table('feed_import_oauthtoken', (
('id', orm['feed_import.oauthtoken:id']),
('user', orm['feed_import.oauthtoken:user']),
('request_token', orm['feed_import.oauthtoken:request_token']),
('request_token_secret', orm['feed_import.oauthtoken:request_token_secret']),
('access_token', orm['feed_import.oauthtoken:access_token']),
('access_token_secret', orm['feed_import.oauthtoken:access_token_secret']),
))
db.send_create_signal('feed_import', ['OAuthToken'])
def backwards(self, orm):
# Deleting model 'OAuthToken'
db.delete_table('feed_import_oauthtoken')
models = {
'auth.group': {
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)"},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'feed_import.oauthtoken': {
'access_token': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'access_token_secret': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'request_token': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'request_token_secret': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['feed_import']
|
lucidbard/NewsBlur
|
apps/feed_import/migrations/0002_oauth.py
|
Python
|
mit
| 4,508
|
"""tzlocal for OS X."""
import os
import dateutil.tz
import subprocess
_cache_tz = None
def _get_localzone():
tzname = subprocess.check_output(["systemsetup", "-gettimezone"]).decode('utf-8')
tzname = tzname.replace("Time Zone: ", "")
# OS X 10.9+, this command is root-only
if 'exiting!' in tzname:
tzname = ''
if not tzname:
# link will be something like /usr/share/zoneinfo/America/Los_Angeles.
link = os.readlink("/etc/localtime")
tzname = link.split('zoneinfo/')[-1]
tzname = tzname.strip()
try:
# test the name
assert tzname
dateutil.tz.gettz(tzname)
return tzname
except:
return None
def get_localzone():
"""Get the computers configured local timezone, if any."""
global _cache_tz
if _cache_tz is None:
_cache_tz = _get_localzone()
return _cache_tz
def reload_localzone():
"""Reload the cached localzone. You need to call this if the timezone has changed."""
global _cache_tz
_cache_tz = _get_localzone()
return _cache_tz
|
s2hc-johan/nikola
|
nikola/packages/tzlocal/darwin.py
|
Python
|
mit
| 1,082
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Python worker logging."""
import json
import logging
import threading
import traceback
from apache_beam.runners.common import LoggingContext
# This module is experimental. No backwards-compatibility guarantees.
# Per-thread worker information. This is used only for logging to set
# context information that changes while work items get executed:
# work_item_id, step_name, stage_name.
class _PerThreadWorkerData(threading.local):
def __init__(self):
super(_PerThreadWorkerData, self).__init__()
# TODO(robertwb): Consider starting with an initial (ignored) ~20 elements
# in the list, as going up and down all the way to zero incurs several
# reallocations.
self.stack = []
def get_data(self):
all_data = {}
for datum in self.stack:
all_data.update(datum)
return all_data
per_thread_worker_data = _PerThreadWorkerData()
class PerThreadLoggingContext(LoggingContext):
"""A context manager to add per thread attributes."""
def __init__(self, **kwargs):
self.kwargs = kwargs
self.stack = per_thread_worker_data.stack
def __enter__(self):
self.enter()
def enter(self):
self.stack.append(self.kwargs)
def __exit__(self, exn_type, exn_value, exn_traceback):
self.exit()
def exit(self):
self.stack.pop()
class JsonLogFormatter(logging.Formatter):
"""A JSON formatter class as expected by the logging standard module."""
def __init__(self, job_id, worker_id):
super(JsonLogFormatter, self).__init__()
self.job_id = job_id
self.worker_id = worker_id
def format(self, record):
"""Returns a JSON string based on a LogRecord instance.
Args:
record: A LogRecord instance. See below for details.
Returns:
A JSON string representing the record.
A LogRecord instance has the following attributes and is used for
formatting the final message.
Attributes:
created: A double representing the timestamp for record creation
(e.g., 1438365207.624597). Note that the number contains also msecs and
microsecs information. Part of this is also available in the 'msecs'
attribute.
msecs: A double representing the msecs part of the record creation
(e.g., 624.5970726013184).
msg: Logging message containing formatting instructions or an arbitrary
object. This is the first argument of a log call.
args: A tuple containing the positional arguments for the logging call.
levelname: A string. Possible values are: INFO, WARNING, ERROR, etc.
exc_info: None or a 3-tuple with exception information as it is
returned by a call to sys.exc_info().
name: Logger's name. Most logging is done using the default root logger
and therefore the name will be 'root'.
filename: Basename of the file where logging occurred.
funcName: Name of the function where logging occurred.
process: The PID of the process running the worker.
thread: An id for the thread where the record was logged. This is not a
real TID (the one provided by OS) but rather the id (address) of a
Python thread object. Nevertheless having this value can allow to
filter log statement from only one specific thread.
"""
output = {}
output['timestamp'] = {
'seconds': int(record.created),
'nanos': int(record.msecs * 1000000)}
# ERROR. INFO, DEBUG log levels translate into the same for severity
# property. WARNING becomes WARN.
output['severity'] = (
record.levelname if record.levelname != 'WARNING' else 'WARN')
# msg could be an arbitrary object, convert it to a string first.
record_msg = str(record.msg)
# Prepare the actual message using the message formatting string and the
# positional arguments as they have been used in the log call.
if record.args:
try:
output['message'] = record_msg % record.args
except (TypeError, ValueError):
output['message'] = '%s with args (%s)' % (record_msg, record.args)
else:
output['message'] = record_msg
# The thread ID is logged as a combination of the process ID and thread ID
# since workers can run in multiple processes.
output['thread'] = '%s:%s' % (record.process, record.thread)
# job ID and worker ID. These do not change during the lifetime of a worker.
output['job'] = self.job_id
output['worker'] = self.worker_id
# Stage, step and work item ID come from thread local storage since they
# change with every new work item leased for execution. If there is no
# work item ID then we make sure the step is undefined too.
data = per_thread_worker_data.get_data()
if 'work_item_id' in data:
output['work'] = data['work_item_id']
if 'stage_name' in data:
output['stage'] = data['stage_name']
if 'step_name' in data:
output['step'] = data['step_name']
# All logging happens using the root logger. We will add the basename of the
# file and the function name where the logging happened to make it easier
# to identify who generated the record.
output['logger'] = '%s:%s:%s' % (
record.name, record.filename, record.funcName)
# Add exception information if any is available.
if record.exc_info:
output['exception'] = ''.join(
traceback.format_exception(*record.exc_info))
return json.dumps(output)
def initialize(job_id, worker_id, log_path):
"""Initialize root logger so that we log JSON to a file and text to stdout."""
file_handler = logging.FileHandler(log_path)
file_handler.setFormatter(JsonLogFormatter(job_id, worker_id))
logging.getLogger().addHandler(file_handler)
# Set default level to INFO to avoid logging various DEBUG level log calls
# sprinkled throughout the code.
logging.getLogger().setLevel(logging.INFO)
|
dhalperi/beam
|
sdks/python/apache_beam/runners/worker/logger.py
|
Python
|
apache-2.0
| 6,644
|
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import numpy as np
import argparse
import struct
def process_dlrm_data(embedding_rows_bound, data_file, dest_dir, num_samples = 0):
dest_dir = output_dir + "/build/criteo"
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
# No of lines in the file
if num_samples == 0:
# 40 int_32 values per line
n_lines = os.path.getsize(data_file) // 40 // 4
else:
n_lines = num_samples
with open(os.path.join(dest_dir, "val_map.txt"), "w") as f:
for i in range(n_lines):
print("{:08d}".format(i), file=f)
ground_truth_list = []
int_features_list = []
int_features_int8_list = []
cat_features_list = []
with open(str(data_file), "rb") as f:
for n in range(n_lines):
if n % 1000 == 0:
print("Processing No.{:d}/{:d}...".format(n, n_lines))
# Save one line into list
nums = struct.unpack_from("40i", f.read(40 * 4))
ground_truth_list.append(nums[0])
int_features = nums[1:14]
# In reference implementation, we do log(max(0, feature) + 1).
# TODO: should this be in timed path?
int_features = [np.log(max(0.0, i) + 1.0) for i in int_features]
int_features_list.append(int_features)
# Using [-14.2313, 14.2313] as the range for the numerical input according to calibration cache.
int8_factor = 127.0 / 14.2313
int_features_int8 = [min(max(i * int8_factor, -128.0), 127.0) for i in int_features]
int_features_int8_list.append(int_features_int8)
cat_features = np.array(nums[14:40], dtype=np.int32)
cat_features = [x % embedding_rows_bound for x in cat_features]
cat_features_list.append(cat_features)
np.save(os.path.join(dest_dir, "ground_truth.npy".format(n)), np.array(ground_truth_list, dtype=np.int32))
np.save(os.path.join(dest_dir, "numeric_fp32.npy".format(n)), np.array(int_features_list, dtype=np.float32))
np.save(os.path.join(dest_dir, "numeric_fp16.npy".format(n)), np.array(int_features_list, dtype=np.float16))
np.save(os.path.join(dest_dir, "numeric_int8_linear.npy".format(n)), np.array(int_features_int8_list, dtype=np.int8))
np.save(os.path.join(dest_dir, "numeric_int8_chw4.npy".format(n)), np.array([i + [0 for j in range(16-13)] for i in int_features_int8_list], dtype=np.int8))
np.save(os.path.join(dest_dir, "numeric_int8_chw32.npy".format(n)), np.array([i + [0 for j in range(32-13)] for i in int_features_int8_list], dtype=np.int8))
np.save(os.path.join(dest_dir, "categorical_int32.npy".format(n)), np.array(cat_features_list, dtype=np.int32))
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--embedding_rows_bound", "-b", help="Specifies the upper bound on the number of embedding rows", default=40000000)
parser.add_argument("--data_file", "-d", help="Specifies the input data file test_data.bin")
parser.add_argument("--output_dir", "-o", help="Specifies the output directory for the npy files")
parser.add_argument("--num_samples", "-n", help="Specifies the number of samples to be processed. Default: all", type=int, default=0)
args = parser.parse_args()
data_file = args.data_file
output_dir = args.output_dir
embedding_rows_bound = args.embedding_rows_bound
num_samples = args.num_samples
process_dlrm_data(embedding_rows_bound, data_file, output_dir, num_samples)
if __name__ == "__main__":
main()
|
mlperf/inference_results_v0.7
|
open/Inspur/code/dlrm/tensorrt/scripts/convert_dlrm_data.py
|
Python
|
apache-2.0
| 4,174
|
#!/usr/bin/env python
import re
import sys
import os
import codecs
include_pat = r'(<!--\s*#include\s*virtual\s*=\s*"([^"]+)"\s*-->)'
include_regex = re.compile(include_pat)
url_pat = r'(\s+href\s*=\s*")([^"#]+)(#[^"]+)?(")'
url_regex = re.compile(url_pat)
version_pat = r'(@SLURM_VERSION@)'
version_regex = re.compile(version_pat)
dirname = ''
def include_virtual(matchobj):
global dirname
if dirname:
filename = dirname + '/' + matchobj.group(2)
else:
filename = matchobj.group(2)
if os.access(filename, os.F_OK):
#print 'Including file', filename
lines = open(filename, 'r').read()
return lines
else:
return matchobj.group(0)
def url_rewrite(matchobj):
global dirname
if dirname:
localpath = dirname + '/' + matchobj.group(2)
else:
localpath = matchobj.group(2)
if matchobj.group(2)[-6:] == '.shtml' and os.access(localpath, os.F_OK):
location = matchobj.group(2)
if matchobj.group(3) is None:
newname = location[:-6] + '.html'
else:
newname = location[:-6] + '.html' + matchobj.group(3)
#print 'Rewriting', location, 'to', newname
return matchobj.group(1) + newname + matchobj.group(4)
else:
return matchobj.group(0)
def version_rewrite(matchobj):
global version
return version
# Make sure all of the files on the command line have the .shtml extension.
version = sys.argv[1]
files = []
for f in sys.argv[2:]:
if f[-6:] == '.shtml':
files.append(f)
else:
#print 'Skipping file %s (extension is not .shtml)' % f
pass
for filename in files:
dirname, basefilename = os.path.split(filename)
newfilename = basefilename[:-6] + '.html'
print('Converting', filename, '->', newfilename)
shtml = codecs.open(filename, 'r', encoding='utf-8')
html = codecs.open(newfilename, 'w', encoding='utf-8')
for line in shtml.readlines():
line = include_regex.sub(include_virtual, line)
line = version_regex.sub(version_rewrite, line)
line = url_regex.sub(url_rewrite, line)
html.write(line)
html.close()
shtml.close()
|
pompp/slurm-power-management
|
slurm/doc/html/shtml2html.py
|
Python
|
gpl-2.0
| 2,196
|
#!/usr/bin/env python3
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
# A database of locale data for all the languages known to glibc
# To regenerate run:
# sudo locale-gen -A && python3 lc_time.py
import locale, os, pprint, sys
def generate_data():
def nl(code):
return locale.nl_langinfo(code)
ans = []
for x, limit in (('day', 8), ('mon', 13)):
for attr in ('ab' + x, x):
ans.append((attr, tuple(map(nl, (getattr(locale, '%s_%d' % (attr.upper(), i)) for i in range(1, limit)))))),
for x in ('d_t_fmt', 'd_fmt', 't_fmt', 't_fmt_ampm', 'radixchar', 'thousep', 'yesexpr', 'noexpr'):
ans.append((x, nl(getattr(locale, x.upper()))))
return ans
def main():
if sys.version_info[0] < 3:
raise RuntimeError('Must be run using python 3.x')
locale.setlocale(locale.LC_ALL, '')
dest = os.path.abspath(__file__)
os.chdir('/usr/share/i18n/locales')
data = []
for f in sorted(os.listdir('.')):
try:
locale.setlocale(locale.LC_ALL, (f, 'utf-8'))
except locale.Error:
continue
data.append((f, generate_data()))
with open(dest, 'r+b') as f:
raw = f.read()
marker = b'# The data {{' + b'{'
pos = raw.find(marker)
data = pprint.pformat(data, width=160)
if not isinstance(data, bytes):
data = data.encode('utf-8')
f.seek(pos)
f.truncate()
f.write(marker + b'\ndata = ' + data + b'\n' + b'# }}' + b'}')
if __name__ == '__main__':
main()
# The data {{{
data = [('aa_DJ',
[('abday', ('aca', 'etl', 'tal', 'arb', 'kam', 'gum', 'sab')),
('day', ('Acaada', 'Etleeni', 'Talaata', 'Arbaqa', 'Kamiisi', 'Gumqata', 'Sabti')),
('abmon', ('qun', 'nah', 'cig', 'agd', 'cax', 'qas', 'qad', 'leq', 'way', 'dit', 'xim', 'kax')),
('mon',
('Qunxa Garablu',
'Kudo',
'Ciggilta Kudo',
'Agda Baxisso',
'Caxah Alsa',
'Qasa Dirri',
'Qado Dirri',
'Liiqen',
'Waysu',
'Diteli',
'Ximoli',
'Kaxxa Garablu')),
('d_t_fmt', '%a %d %b %Y %r %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ''),
('yesexpr', '^[oOyY].*'),
('noexpr', '^[mnMN].*')]),
('aa_ER',
[('abday', ('Aca', 'Etl', 'Tal', 'Arb', 'Kam', 'Gum', 'Sab')),
('day', ('Acaada', 'Etleeni', 'Talaata', 'Arbaqa', 'Kamiisi', 'Gumqata', 'Sabti')),
('abmon', ('Qun', 'Nah', 'Cig', 'Agd', 'Cax', 'Qas', 'Qad', 'Leq', 'Way', 'Dit', 'Xim', 'Kax')),
('mon',
('Qunxa Garablu',
'Naharsi Kudo',
'Ciggilta Kudo',
'Agda Baxisso',
'Caxah Alsa',
'Qasa Dirri',
'Qado Dirri',
'Leqeeni',
'Waysu',
'Diteli',
'Ximoli',
'Kaxxa Garablu')),
('d_t_fmt', '%A, %B %e, %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ''),
('yesexpr', '^[yY].*'),
('noexpr', '^[mnMN].*')]),
('aa_ET',
[('abday', ('Aca', 'Etl', 'Tal', 'Arb', 'Kam', 'Gum', 'Sab')),
('day', ('Acaada', 'Etleeni', 'Talaata', 'Arbaqa', 'Kamiisi', 'Gumqata', 'Sabti')),
('abmon', ('Qun', 'Kud', 'Cig', 'Agd', 'Cax', 'Qas', 'Qad', 'Leq', 'Way', 'Dit', 'Xim', 'Kax')),
('mon',
('Qunxa Garablu',
'Kudo',
'Ciggilta Kudo',
'Agda Baxisso',
'Caxah Alsa',
'Qasa Dirri',
'Qado Dirri',
'Liiqen',
'Waysu',
'Diteli',
'Ximoli',
'Kaxxa Garablu')),
('d_t_fmt', '%A, %B %e, %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[mnMN].*')]),
('af_ZA',
[('abday', ('So', 'Ma', 'Di', 'Wo', 'Do', 'Vr', 'Sa')),
('day', ('Sondag', 'Maandag', 'Dinsdag', 'Woensdag', 'Donderdag', 'Vrydag', 'Saterdag')),
('abmon', ('Jan', 'Feb', 'Mrt', 'Apr', 'Mei', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Des')),
('mon', ('Januarie', 'Februarie', 'Maart', 'April', 'Mei', 'Junie', 'Julie', 'Augustus', 'September', 'Oktober', 'November', 'Desember')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[jJyY]'),
('noexpr', '^[nN]')]),
('ak_GH',
[('abday', ('Kwe', 'Dwo', 'Ben', 'Wuk', 'Yaw', 'Fia', 'Mem')),
('day', ('Kwesida', 'Dwowda', 'Benada', 'Wukuda', 'Yawda', 'Fida', 'Memeneda')),
('abmon', ('S-Ɔ', 'K-Ɔ', 'E-Ɔ', 'E-O', 'E-K', 'O-A', 'A-K', 'D-Ɔ', 'F-Ɛ', 'Ɔ-A', 'Ɔ-O', 'M-Ɔ')),
('mon',
('Sanda-Ɔpɛpɔn',
'Kwakwar-Ɔgyefuo',
'Ebɔw-Ɔbenem',
'Ebɔbira-Oforisuo',
'Esusow Aketseaba-Kɔtɔnimba',
'Obirade-Ayɛwohomumu',
'Ayɛwoho-Kitawonsa',
'Difuu-Ɔsandaa',
'Fankwa-Ɛbɔ',
'Ɔbɛsɛ-Ahinime',
'Ɔberɛfɛw-Obubuo',
'Mumu-Ɔpɛnimba')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%Y/%m/%d'),
('t_fmt', '%r'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[dDnN].*')]),
('am_ET',
[('abday', ('እሑድ', 'ሰኞ ', 'ማክሰ', 'ረቡዕ', 'ሐሙስ', 'ዓርብ', 'ቅዳሜ')),
('day', ('እሑድ', 'ሰኞ', 'ማክሰኞ', 'ረቡዕ', 'ሐሙስ', 'ዓርብ', 'ቅዳሜ')),
('abmon', ('ጃንዩ', 'ፌብሩ', 'ማርች', 'ኤፕረ', 'ሜይ ', 'ጁን ', 'ጁላይ', 'ኦገስ', 'ሴፕቴ', 'ኦክተ', 'ኖቬም', 'ዲሴም')),
('mon', ('ጃንዩወሪ', 'ፌብሩወሪ', 'ማርች', 'ኤፕረል', 'ሜይ', 'ጁን', 'ጁላይ', 'ኦገስት', 'ሴፕቴምበር', 'ኦክተውበር', 'ኖቬምበር', 'ዲሴምበር')),
('d_t_fmt', '%A፣ %B %e ቀን %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('an_ES',
[('abday', ('dom', 'lun', 'mar', 'mie', 'chu', 'bie', 'sab')),
('day', ('domingo', 'luns', 'martes', 'miecols', 'chuebes', 'biernes', 'sabado')),
('abmon', ('chi', 'fre', 'mar', 'abr', 'may', 'chn', 'chl', 'ago', 'set', 'oct', 'nob', 'abi')),
('mon', ('chinero', 'frebero', 'marzo', 'abril', 'mayo', 'chunio', 'chulio', 'agosto', 'setiembre', 'octubre', 'nobiembre', 'abiento')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('anp_IN',
[('abday', ('रवि ', 'सोम ', 'मंगल ', 'बुध ', 'बृहस्पति ', 'शुक्र ', 'शनि ')),
('day', ('रविवार ', 'सोमवार ', 'मंगलवार ', 'बुधवार ', 'बृहस्पतिवार ', 'शुक्रवार ', 'शनिवार ')),
('abmon', ('जनवरी', 'फरवरी', 'मार्च', 'अप्रैल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितंबर', 'अक्टूबर', 'नवंबर', 'दिसंबर')),
('mon', ('जनवरी', 'फरवरी', 'मार्च', 'अप्रैल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितंबर', 'अक्टूबर', 'नवंबर', 'दिसंबर')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[हवyY]'),
('noexpr', '^[नइnN]')]),
('ar_AE',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت ')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_BH',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_DZ',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_EG',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_IN',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%A %d %B %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %B %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_IQ',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_JO',
[('abday', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('كانون الثاني', 'شباط', 'آذار', 'نيسان', 'نوار', 'حزيران', 'تموز', 'آب', 'أيلول', 'تشرين الأول', 'تشرين الثاني', 'كانون الأول')),
('mon', ('كانون الثاني', 'شباط', 'آذار', 'نيسان', 'نوار', 'حزيران', 'تموز', 'آب', 'أيلول', 'تشرين الأول', 'تشرين الثاني', 'كانون الأول')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_KW',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_LB',
[('abday', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('كانون الثاني', 'شباط', 'آذار', 'نيسان', 'نوار', 'حزيران', 'تموز', 'آب', 'أيلول', 'تشرين الأول', 'تشرين الثاني', 'كانون الأول')),
('mon', ('كانون الثاني', 'شباط', 'آذار', 'نيسان', 'نوار', 'حزيران', 'تموز', 'آب', 'أيلول', 'تشرين الأول', 'تشرين الثاني', 'كانون الأول')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_LY',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_MA',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_OM',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_QA',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_SA',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('الأحد', 'الإثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعـة', 'السبت')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('كانون الثاني', 'شباط', 'آذار', 'نيسـان', 'أيار', 'حزيران', 'تـمـوز', 'آب', 'أيلول', 'تشرين الأول', 'تشرين الثاني', 'كانون الأول')),
('d_t_fmt', '%A %e %B %Y %k:%M:%S'),
('d_fmt', '%A %e %B %Y'),
('t_fmt', '%k:%M:%S'),
('t_fmt_ampm', '%k:%M:%S'),
('radixchar', '.'),
('thousep', ''),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_SD',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_SS',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_SY',
[('abday', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('كانون الثاني', 'شباط', 'آذار', 'نيسان', 'نوار', 'حزيران', 'تموز', 'آب', 'أيلول', 'تشرين الأول', 'تشرين الثاني', 'كانون الأول')),
('mon', ('كانون الثاني', 'شباط', 'آذار', 'نيسان', 'نواران', 'حزير', 'تموز', 'آب', 'أيلول', 'تشرين الأول', 'تشرين الثاني', 'كانون الأول')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_TN',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('ar_YE',
[('abday', ('ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س')),
('day', ('الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس', 'الجمعة', 'السبت')),
('abmon', ('ينا', 'فبر', 'مار', 'أبر', 'ماي', 'يون', 'يول', 'أغس', 'سبت', 'أكت', 'نوف', 'ديس')),
('mon', ('يناير', 'فبراير', 'مارس', 'أبريل', 'مايو', 'يونيو', 'يوليو', 'أغسطس', 'سبتمبر', 'أكتوبر', 'نوفمبر', 'ديسمبر')),
('d_t_fmt', '%d %b, %Y %Z %I:%M:%S %p'),
('d_fmt', '%d %b, %Y'),
('t_fmt', '%Z %I:%M:%S '),
('t_fmt_ampm', '%Z %I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('as_IN',
[('abday', ('দেও', 'সোম', 'মঙ্গল', 'বুধ', 'বৃহষ্পতি', 'শুক্ৰ', 'শনি')),
('day', ('দেওবাৰ', 'সোমবাৰ', 'মঙ্গলবাৰ', 'বুধবাৰ', 'বৃহষ্পতিবাৰ', 'শুক্ৰবাৰ', 'শনিবাৰ')),
('abmon', ('জানুৱাৰী', 'ফেব্ৰুৱাৰী', 'মাৰ্চ', 'এপ্ৰিল', 'মে', 'জুন', 'জুলাই', 'আগ', 'চেপ্তেম্বৰ', 'অক্টোবৰ', 'নভেম্বৰ', 'ডিচেম্বৰ')),
('mon', ('জানুৱাৰী', 'ফেব্ৰুৱাৰী', 'মাৰ্চ', 'এপ্ৰিল', 'মে', 'জুন', 'জুলাই', 'আগষ্ট', 'চেপ্তেম্বৰ', 'অক্টোবৰ', 'নভেম্বৰ', 'ডিচেম্বৰ')),
('d_t_fmt', '%e %B, %Y %I.%M.%S %p %Z'),
('d_fmt', '%e-%m-%Y'),
('t_fmt', '%I.%M.%S %p'),
('t_fmt_ampm', '%I.%M.%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYহ].*'),
('noexpr', '^[nNন].*')]),
('ast_ES',
[('abday', ('dom', 'llu', 'mar', 'mié', 'xue', 'vie', 'sáb')),
('day', ('domingu', 'llunes', 'martes', 'miércoles', 'xueves', 'vienres', 'sábadu')),
('abmon', ('xin', 'feb', 'mar', 'abr', 'may', 'xun', 'xnt', 'ago', 'set', 'och', 'pay', 'avi')),
('mon', ('xineru', 'febreru', 'marzu', 'abril', 'mayu', 'xunu', 'xunetu', 'agostu', 'setiembre', 'ochobre', 'payares', 'avientu')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('ayc_PE',
[('abday', ('tum', 'lun', 'mar', 'mir', 'juy', 'wir', 'saw')),
('day', ('tuminku', 'lunisa', 'martisa', 'mirkulisa', 'juywisa', 'wirnisa', 'sawäru')),
('abmon', ('ini', 'phi', 'mar', 'awr', 'may', 'jun', 'jul', 'awu', 'sit', 'ukt', 'nuw', 'ris')),
('mon', ('inïru', 'phiwriru', 'marsu', 'awrila', 'mayu', 'junyu', 'julyu', 'awustu', 'sitimri', 'uktuwri', 'nuwimri', 'risimri')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[uUsSyY].*'),
('noexpr', '^[jJnN].*')]),
('az_AZ',
[('abday', ('baz', 'ber', 'çax', 'çər', 'cax', 'cüm', 'şnb')),
('day', ('bazar günü', 'bazar ertəsi', 'çərşənbə axşamı', 'çərşənbə', 'cümə axşamı', 'cümə', 'şənbə')),
('abmon', ('Yan', 'Fev', 'Mar', 'Apr', 'May', 'İyn', 'İyl', 'Avq', 'Sen', 'Okt', 'Noy', 'Dek')),
('mon', ('yanvar', 'fevral', 'mart', 'aprel', 'may', 'iyun', 'iyul', 'avqust', 'sentyabr', 'oktyabr', 'noyabr', 'dekabr')),
('d_t_fmt', '%A, %d %B %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[Bb].*'),
('noexpr', '^[YyNn].*')]),
('be_BY',
[('abday', ('Няд', 'Пан', 'Аўт', 'Срд', 'Чцв', 'Пят', 'Суб')),
('day', ('Нядзеля', 'Панядзелак', 'Аўторак', 'Серада', 'Чацвер', 'Пятніца', 'Субота')),
('abmon', ('Стд', 'Лют', 'Сак', 'Крс', 'Тра', 'Чэр', 'Ліп', 'Жнв', 'Врс', 'Кст', 'Ліс', 'Снж')),
('mon', ('Студзень', 'Люты', 'Сакавік', 'Красавік', 'Травень', 'Чэрвень', 'Ліпень', 'Жнівень', 'Верасень', 'Кастрычнік', 'Лістапад', 'Снежань')),
('d_t_fmt', '%a %d %b %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[ТтYy].*'),
('noexpr', '^[НнNn].*')]),
('bem_ZM',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Pa Mulungu', 'Palichimo', 'Palichibuli', 'Palichitatu', 'Palichine', 'Palichisano', 'Pachibelushi')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'July', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('Januari', 'Februari', 'Machi', 'Epreo', 'Mei', 'Juni', 'Julai', 'Ogasti', 'Septemba', 'Oktoba', 'Novemba', 'Disemba')),
('d_t_fmt', '%a %d %b %Y %R %Z'),
('d_fmt', '%m/%d/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYeE].*'),
('noexpr', '^[nNaA].*')]),
('ber_DZ',
[('abday', ('baz', 'bir', 'iki', 'üçü', 'dör', 'beş', 'alt')),
('day', ('bazar günü', 'birinci gün', 'ikinci gün', 'üçüncü gün', 'dördüncü gün', 'beşinci gün', 'altıncı gün')),
('abmon', ('Yan', 'Fev', 'Mar', 'Apr', 'May', 'İyn', 'İyl', 'Avq', 'Sen', 'Okt', 'Noy', 'Dek')),
('mon', ('yanvar', 'fevral', 'mart', 'aprel', 'may', 'iyun', 'iyul', 'avqust', 'sentyabr', 'oktyabr', 'noyabr', 'dekabr')),
('d_t_fmt', '%A, %d %B %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[Bb].*'),
('noexpr', '^[YyNn].*')]),
('ber_MA',
[('abday', ('baz', 'bir', 'iki', 'üçü', 'dör', 'beş', 'alt')),
('day', ('bazar günü', 'birinci gün', 'ikinci gün', 'üçüncü gün', 'dördüncü gün', 'beşinci gün', 'altıncı gün')),
('abmon', ('Yan', 'Fev', 'Mar', 'Apr', 'May', 'İyn', 'İyl', 'Avq', 'Sen', 'Okt', 'Noy', 'Dek')),
('mon', ('yanvar', 'fevral', 'mart', 'aprel', 'may', 'iyun', 'iyul', 'avqust', 'sentyabr', 'oktyabr', 'noyabr', 'dekabr')),
('d_t_fmt', '%A, %d %B %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[Bb].*'),
('noexpr', '^[YyNn].*')]),
('bg_BG',
[('abday', ('нд', 'пн', 'вт', 'ср', 'чт', 'пт', 'сб')),
('day', ('неделя', 'понеделник', 'вторник', 'сряда', 'четвъртък', 'петък', 'събота')),
('abmon', ('яну', 'фев', 'мар', 'апр', 'май', 'юни', 'юли', 'авг', 'сеп', 'окт', 'ное', 'дек')),
('mon', ('януари', 'февруари', 'март', 'април', 'май', 'юни', 'юли', 'август', 'септември', 'октомври', 'ноември', 'декември')),
('d_t_fmt', '%x (%a) %X %Z'),
('d_fmt', '%e.%m.%Y'),
('t_fmt', '%k,%M,%S'),
('t_fmt_ampm', '%l,%M,%S'),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[+1ДдDdYyOo].*'),
('noexpr', '^[-0НнNnKk].*')]),
('bho_IN',
[('abday', ('रवि ', 'सोम ', 'मंगल ', 'बुध ', 'गुरु ', 'शुक्र ', 'शनि ')),
('day', ('रविवार ', 'सोमवार ', 'मंगलवार ', 'बुधवार ', 'गुरुवार ', 'शुक्रवार ', 'शनिवार ')),
('abmon', ('जनवरी', 'फरवरी', 'मार्च', 'अप्रैल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('mon', ('जनवरी', 'फरवरी', 'मार्च', 'अप्रैल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('bn_BD',
[('abday', ('রবি', 'সোম', 'মঙ্গল', 'বুধ', 'বৃহঃ', 'শুক্র', 'শনি')),
('day', ('রবিবার', 'সোমবার', 'মঙ্গলবার', 'বুধবার', 'বৃহস্পতিবার', 'শুক্রবার', 'শনিবার')),
('abmon', ('জানু', 'ফেব্রু', 'মার্চ', 'এপ্রি', 'মে', 'জুন', 'জুল', 'আগ', 'সেপ্টে', 'অক্টো', 'নভে', 'ডিসে')),
('mon', ('জানুয়ারি', 'ফেব্রুয়ারি', 'মার্চ', 'এপ্রিল', 'মে', 'জুন', 'জুলাই', 'আগস্ট', 'সেপ্টেম্বর', 'অক্টোবর', 'নভেম্বর', 'ডিসেম্বর')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[হ্যাঁyY]'),
('noexpr', '^[নাnN]')]),
('bn_IN',
[('abday', ('রবি', 'সোম', 'মঙ্গল', 'বুধ', 'বৃহস্পতি', 'শুক্র', 'শনি')),
('day', ('রবিবার', 'সোমবার', 'মঙ্গলবার', 'বুধবার', 'বৃহস্পতিবার', 'শুক্রবার', 'শনিবার')),
('abmon', ('জানুয়ারি', 'ফেব্রুয়ারি', 'মার্চ', 'এপ্রিল', 'মে', 'জুন', 'জুলাই', 'আগস্ট', 'সেপ্টেম্বর', 'অক্টোবর', 'নভেম্বর', 'ডিসেম্বর')),
('mon', ('জানুয়ারি', 'ফেব্রুয়ারি', 'মার্চ', 'এপ্রিল', 'মে', 'জুন', 'জুলাই', 'আগস্ট', 'সেপ্টেম্বর', 'অক্টোবর', 'নভেম্বর', 'ডিসেম্বর')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[হ্যাঁyY]'),
('noexpr', '^[নাnN]')]),
('bo_CN',
[('abday', ('ཉི་', 'ཟླ་', 'མིར་', 'ལྷག་', 'པུར་', 'སངས་', 'སྤེན་')),
('day', ('གཟའ་ཉི་མ་', 'གཟའ་ཟླ་བ་', 'གཟའ་མིག་དམར་', 'གཟའ་ལྷག་ཕ་', 'གཟའ་པུར་བུ་', 'གཟའ་པ་སངས་', 'གཟའ་སྤེན་ཕ་')),
('abmon', ('ཟླ་༡', 'ཟླ་༢', 'ཟླ་༣', 'ཟླ་༤', 'ཟླ་༥', 'ཟླ་༦', 'ཟླ་༧', 'ཟླ་༨', 'ཟླ་༩', 'ཟླ་༡༠', 'ཟླ་༡༡', 'ཟླ་༡༢')),
('mon',
('ཟླ་བ་དང་པ་',
'ཟླ་བ་གཉིས་པ་',
'ཟླ་བ་གསུམ་པ་',
'ཟླ་བ་བཞི་པ་',
'ཟླ་བ་ལྔ་ཕ་',
'ཟླ་བ་དྲུག་པ་',
'ཟླ་བ་བདུནཔ་',
'ཟླ་བ་བརྒྱད་པ་',
'ཟླ་བ་དགུ་པ་',
'ཟླ་བ་བཅུ་པ་',
'ཟླ་བ་བཅུ་གཅིག་པ་',
'ཟླ་བ་བཅུ་གཉིས་པ་')),
('d_t_fmt', 'པསྱི་ལོ%yཟལ%mཚེས%dཆུ་ཚོད%Hཀསར་མ%Mཀསར་ཆ%S'),
('d_fmt', 'པསྱི་ལོ%yཟལ%mཚེས%d'),
('t_fmt', 'ཆུ་ཚོད%Hཀསར་མ%Mཀསར་ཆ%S'),
('t_fmt_ampm', 'ཆུ་ཚོད%Iཀསར་མ%Mཀསར་ཆ%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[ཨYy].*'),
('noexpr', '^[མNn].*')]),
('bo_IN',
[('abday', ('ཉི་', 'ཟླ་', 'མིར་', 'ལྷག་', 'པུར་', 'སངས་', 'སྤེན་')),
('day', ('གཟའ་ཉི་མ་', 'གཟའ་ཟླ་བ་', 'གཟའ་མིག་དམར་', 'གཟའ་ལྷག་ཕ་', 'གཟའ་པུར་བུ་', 'གཟའ་པ་སངས་', 'གཟའ་སྤེན་ཕ་')),
('abmon', ('ཟླ་༡', 'ཟླ་༢', 'ཟླ་༣', 'ཟླ་༤', 'ཟླ་༥', 'ཟླ་༦', 'ཟླ་༧', 'ཟླ་༨', 'ཟླ་༩', 'ཟླ་༡༠', 'ཟླ་༡༡', 'ཟླ་༡༢')),
('mon',
('ཟླ་བ་དང་པ་',
'ཟླ་བ་གཉིས་པ་',
'ཟླ་བ་གསུམ་པ་',
'ཟླ་བ་བཞི་པ་',
'ཟླ་བ་ལྔ་ཕ་',
'ཟླ་བ་དྲུག་པ་',
'ཟླ་བ་བདུནཔ་',
'ཟླ་བ་བརྒྱད་པ་',
'ཟླ་བ་དགུ་པ་',
'ཟླ་བ་བཅུ་པ་',
'ཟླ་བ་བཅུ་གཅིག་པ་',
'ཟླ་བ་བཅུ་གཉིས་པ་')),
('d_t_fmt', 'པསྱི་ལོ%yཟལ%mཚེས%dཆུ་ཚོད%Hཀསར་མ%Mཀསར་ཆ%S'),
('d_fmt', 'པསྱི་ལོ%yཟལ%mཚེས%d'),
('t_fmt', 'ཆུ་ཚོད%Hཀསར་མ%Mཀསར་ཆ%S'),
('t_fmt_ampm', 'ཆུ་ཚོད%Iཀསར་མ%Mཀསར་ཆ%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[ཨYy].*'),
('noexpr', '^[མNn].*')]),
('br_FR',
[('abday', ('sul', 'lun', 'meu', 'mer', 'yao', 'gwe', 'sad')),
('day', ('sul', 'lun', 'meurzh', "merc'her", 'yaou', 'gwener', 'sadorn')),
('abmon', ('Gen ', "C'hw", 'Meu ', 'Ebr ', 'Mae ', 'Eve ', 'Gou ', 'Eos ', 'Gwe ', 'Her ', 'Du ', 'Ker ')),
('mon', ('Genver', "C'hwevrer", 'Meurzh', 'Ebrel', 'Mae', 'Mezheven', 'Gouere', 'Eost', 'Gwengolo', 'Here', 'Du', 'Kerzu')),
('d_t_fmt', "D'ar %A %d a viz %B %Y"),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%Ie%M:%S %p'),
('radixchar', ','),
('thousep', ' '),
('yesexpr', '^[oOyY].*'),
('noexpr', '^[nN].*')]),
('brx_IN',
[('abday', ('रबि', 'सम', 'मंगल', 'बुद', 'बिसथि', 'सुखुर', 'सुनि')),
('day', ('रबिबार', 'सोबार', 'मंगलबार', 'बुदबार', 'बिसथिबार', 'सुखुरबार', 'सुनिबार')),
('abmon', ('जानुवारी', 'फेब्रुवारी', 'मार्स', 'एप्रिल', 'मे', 'जुन', 'जुलाइ', 'आगस्थ', 'सेबथेज्ब़र', 'अखथबर', 'नबेज्ब़र', 'दिसेज्ब़र')),
('mon', ('जानुवारी', 'फेब्रुवारी', 'मार्स', 'एप्रिल', 'मे', 'जुन', 'जुलाइ', 'आगस्थ', 'सेबथेज्ब़र', 'अखथबर', 'नबेज्ब़र', 'दिसेज्ब़र')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^(नंगौ|[yY])'),
('noexpr', '^(नङा|[nN])')]),
('bs_BA',
[('abday', ('Ned', 'Pon', 'Uto', 'Sri', 'Čet', 'Pet', 'Sub')),
('day', ('Nedjelja', 'Ponedjeljak', 'Utorak', 'Srijeda', 'Četvrtak', 'Petak', 'Subota')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'Maj', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dec')),
('mon', ('Januar', 'Februar', 'Mart', 'April', 'Maj', 'Juni', 'Juli', 'August', 'Septembar', 'Oktobar', 'Novembar', 'Decembar')),
('d_t_fmt', '%a %d %b %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[dDyY]*.'),
('noexpr', '^[nN]*.')]),
('byn_ER',
[('abday', ('ሰ/ቅ', 'ሰኑ', 'ሰሊጝ', 'ለጓ', 'ኣምድ', 'ኣርብ', 'ሰ/ሽ')),
('day', ('ሰንበር ቅዳዅ', 'ሰኑ', 'ሰሊጝ', 'ለጓ ወሪ ለብዋ', 'ኣምድ', 'ኣርብ', 'ሰንበር ሽጓዅ')),
('abmon', ('ልደት', 'ካብኽ', 'ክብላ', 'ፋጅኺ', 'ክቢቅ', 'ም/ት', 'ኰር', 'ማርያ', 'ያኸኒ', 'መተሉ', 'ም/ም', 'ተሕሳ')),
('mon', ('ልደትሪ', 'ካብኽብቲ', 'ክብላ', 'ፋጅኺሪ', 'ክቢቅሪ', 'ምኪኤል ትጓ̅ኒሪ', 'ኰርኩ', 'ማርያም ትሪ', 'ያኸኒ መሳቅለሪ', 'መተሉ', 'ምኪኤል መሽወሪ', 'ተሕሳስሪ')),
('d_t_fmt', '%A፡ %B %e ግርጋ %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ''),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('ca_AD',
[('abday', ('dg', 'dl', 'dt', 'dc', 'dj', 'dv', 'ds')),
('day', ('diumenge', 'dilluns', 'dimarts', 'dimecres', 'dijous', 'divendres', 'dissabte')),
('abmon', ('gen', 'feb', 'mar', 'abr', 'mai', 'jun', 'jul', 'ago', 'set', 'oct', 'nov', 'des')),
('mon', ('gener', 'febrer', 'març', 'abril', 'maig', 'juny', 'juliol', 'agost', 'setembre', 'octubre', 'novembre', 'desembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('ca_ES',
[('abday', ('dg', 'dl', 'dt', 'dc', 'dj', 'dv', 'ds')),
('day', ('diumenge', 'dilluns', 'dimarts', 'dimecres', 'dijous', 'divendres', 'dissabte')),
('abmon', ('gen', 'feb', 'mar', 'abr', 'mai', 'jun', 'jul', 'ago', 'set', 'oct', 'nov', 'des')),
('mon', ('gener', 'febrer', 'març', 'abril', 'maig', 'juny', 'juliol', 'agost', 'setembre', 'octubre', 'novembre', 'desembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('ca_FR',
[('abday', ('dg', 'dl', 'dt', 'dc', 'dj', 'dv', 'ds')),
('day', ('diumenge', 'dilluns', 'dimarts', 'dimecres', 'dijous', 'divendres', 'dissabte')),
('abmon', ('gen', 'feb', 'mar', 'abr', 'mai', 'jun', 'jul', 'ago', 'set', 'oct', 'nov', 'des')),
('mon', ('gener', 'febrer', 'març', 'abril', 'maig', 'juny', 'juliol', 'agost', 'setembre', 'octubre', 'novembre', 'desembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('ca_IT',
[('abday', ('dg', 'dl', 'dt', 'dc', 'dj', 'dv', 'ds')),
('day', ('diumenge', 'dilluns', 'dimarts', 'dimecres', 'dijous', 'divendres', 'dissabte')),
('abmon', ('gen', 'feb', 'mar', 'abr', 'mai', 'jun', 'jul', 'ago', 'set', 'oct', 'nov', 'des')),
('mon', ('gener', 'febrer', 'març', 'abril', 'maig', 'juny', 'juliol', 'agost', 'setembre', 'octubre', 'novembre', 'desembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('cmn_TW',
[('abday', ('日', '一', '二', '三', '四', '五', '六')),
('day', ('星期日', '星期一', '星期二', '星期三', '星期四', '星期五', '星期六')),
('abmon', (' 1月', ' 2月', ' 3月', ' 4月', ' 5月', ' 6月', ' 7月', ' 8月', ' 9月', '10月', '11月', '12月')),
('mon', ('一月', '二月', '三月', '四月', '五月', '六月', '七月', '八月', '九月', '十月', '十一月', '十二月')),
('d_t_fmt', '%Y年%m月%d日 (%A) %H點%M分%S秒'),
('d_fmt', '%Y年%m月%d日'),
('t_fmt', '%H點%M分%S秒'),
('t_fmt_ampm', '%p %I點%M分%S秒'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY是]'),
('noexpr', '^[nN不否]')]),
('crh_UA',
[('abday', ('Baz', 'Ber', 'Sal', 'Çar', 'Caq', 'Cum', 'Cer')),
('day', ('Bazar', 'Bazarertesi', 'Salı', 'Çarşembe', 'Cumaaqşamı', 'Cuma', 'Cumaertesi')),
('abmon', ('Yan', 'Fev', 'Mar', 'Apr', 'May', 'İyn', 'İyl', 'Avg', 'Sen', 'Okt', 'Noy', 'Dek')),
('mon', ('Yanvar', 'Fevral', 'Mart', 'Aprel', 'Mayıs', 'İyun', 'İyul', 'Avgust', 'Sentâbr', 'Oktâbr', 'Noyabr', 'Dekabr')),
('d_t_fmt', '%a %d %b %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[yYeE]'),
('noexpr', '^[nNhH]')]),
('cs_CZ',
[('abday', ('Ne', 'Po', 'Út', 'St', 'Čt', 'Pá', 'So')),
('day', ('Neděle', 'Pondělí', 'Úterý', 'Středa', 'Čtvrtek', 'Pátek', 'Sobota')),
('abmon', ('led', 'úno', 'bře', 'dub', 'kvě', 'čen', 'čec', 'srp', 'zář', 'říj', 'lis', 'pro')),
('mon', ('leden', 'únor', 'březen', 'duben', 'květen', 'červen', 'červenec', 'srpen', 'září', 'říjen', 'listopad', 'prosinec')),
('d_t_fmt', '%a\xa0%-d.\xa0%B\xa0%Y,\xa0%H:%M:%S\xa0%Z'),
('d_fmt', '%-d.%-m.%Y'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '%I:%M:%S'),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^[aAyY].*'),
('noexpr', '^[nN].*')]),
('csb_PL',
[('abday', ('nie', 'pòn', 'wtó', 'str', 'czw', 'pią', 'sob')),
('day', ('niedzela', 'pòniedzôłk', 'wtórk', 'strzoda', 'czwiôrtk', 'piątk', 'sobòta')),
('abmon', ('stë', 'gro', 'stm', 'łżë', 'môj', 'cze', 'lëp', 'zél', 'séw', 'ruj', 'lës', 'gòd')),
('mon', ('stëcznik', 'gromicznik', 'strumiannik', 'łżëkwiôt', 'môj', 'czerwińc', 'lëpinc', 'zélnik', 'séwnik', 'rujan', 'lëstopadnik', 'gòdnik')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%Y-%m-%d'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[JjTtYy].*'),
('noexpr', '^[nN].*')]),
('cv_RU',
[('abday', ('vr', 'tn', 'yt', 'jn', 'kş', 'er', 'šm')),
('day', ('vyrsarnikun', 'tuntikun', 'ytlarikun', 'junkun', 'kĕşnernikun', 'ernekun', 'šămatkun')),
('abmon', ('KĂR', 'NAR', 'PUŠ', 'AKA', 'ŞU', 'ŞĔR', 'UTĂ', 'ŞUR', 'AVĂ', 'JUP', 'CÜK', 'RAŠ')),
('mon', ('kărlac', 'narăs', 'puš', 'aka', 'şu', 'şĕrtme', 'ută', 'şurla', 'avăn', 'jupa', 'cük', 'raštav')),
('d_t_fmt', '%a %d %b %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('cy_GB',
[('abday', ('Sul', 'Llu', 'Maw', 'Mer', 'Iau', 'Gwe', 'Sad')),
('day', ('Sul', 'Llun', 'Mawrth', 'Mercher', 'Iau', 'Gwener', 'Sadwrn')),
('abmon', ('Ion', 'Chw', 'Maw', 'Ebr', 'Mai', 'Meh', 'Gor', 'Aws', 'Med', 'Hyd', 'Tach', 'Rha')),
('mon', ('Ionawr', 'Chwefror', 'Mawrth', 'Ebrill', 'Mai', 'Mehefin', 'Gorffennaf', 'Awst', 'Medi', 'Hydref', 'Tachwedd', 'Rhagfyr')),
('d_t_fmt', 'Dydd %A %d mis %B %Y %T %Z'),
('d_fmt', '%d.%m.%y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%l:%M:%S %P %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[iItTyY].*'),
('noexpr', '^[nN].*')]),
('da_DK',
[('abday', ('søn', 'man', 'tir', 'ons', 'tor', 'fre', 'lør')),
('day', ('søndag', 'mandag', 'tirsdag', 'onsdag', 'torsdag', 'fredag', 'lørdag')),
('abmon', ('jan', 'feb', 'mar', 'apr', 'maj', 'jun', 'jul', 'aug', 'sep', 'okt', 'nov', 'dec')),
('mon', ('januar', 'februar', 'marts', 'april', 'maj', 'juni', 'juli', 'august', 'september', 'oktober', 'november', 'december')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d-%m-%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[1JjYy].*'),
('noexpr', '^[0Nn].*')]),
('de_AT',
[('abday', ('Son', 'Mon', 'Die', 'Mit', 'Don', 'Fre', 'Sam')),
('day', ('Sonntag', 'Montag', 'Dienstag', 'Mittwoch', 'Donnerstag', 'Freitag', 'Samstag')),
('abmon', ('Jän', 'Feb', 'Mär', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dez')),
('mon', ('Jänner', 'Februar', 'März', 'April', 'Mai', 'Juni', 'Juli', 'August', 'September', 'Oktober', 'November', 'Dezember')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%Y-%m-%d'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('de_BE',
[('abday', ('Son', 'Mon', 'Die', 'Mit', 'Don', 'Fre', 'Sam')),
('day', ('Sonntag', 'Montag', 'Dienstag', 'Mittwoch', 'Donnerstag', 'Freitag', 'Samstag')),
('abmon', ('Jan', 'Feb', 'Mär', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dez')),
('mon', ('Januar', 'Februar', 'März', 'April', 'Mai', 'Juni', 'Juli', 'August', 'September', 'Oktober', 'November', 'Dezember')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%Y-%m-%d'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('de_CH',
[('abday', ('Son', 'Mon', 'Die', 'Mit', 'Don', 'Fre', 'Sam')),
('day', ('Sonntag', 'Montag', 'Dienstag', 'Mittwoch', 'Donnerstag', 'Freitag', 'Samstag')),
('abmon', ('Jan', 'Feb', 'Mär', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dez')),
('mon', ('Januar', 'Februar', 'März', 'April', 'Mai', 'Juni', 'Juli', 'August', 'September', 'Oktober', 'November', 'Dezember')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', "'"),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('de_DE',
[('abday', ('So', 'Mo', 'Di', 'Mi', 'Do', 'Fr', 'Sa')),
('day', ('Sonntag', 'Montag', 'Dienstag', 'Mittwoch', 'Donnerstag', 'Freitag', 'Samstag')),
('abmon', ('Jan', 'Feb', 'Mär', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dez')),
('mon', ('Januar', 'Februar', 'März', 'April', 'Mai', 'Juni', 'Juli', 'August', 'September', 'Oktober', 'November', 'Dezember')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('de_LU',
[('abday', ('Son', 'Mon', 'Die', 'Mit', 'Don', 'Fre', 'Sam')),
('day', ('Sonntag', 'Montag', 'Dienstag', 'Mittwoch', 'Donnerstag', 'Freitag', 'Samstag')),
('abmon', ('Jan', 'Feb', 'Mär', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dez')),
('mon', ('Januar', 'Februar', 'März', 'April', 'Mai', 'Juni', 'Juli', 'August', 'September', 'Oktober', 'November', 'Dezember')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%Y-%m-%d'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('doi_IN',
[('abday', ('ऐत ', 'सोम ', 'मंगल ', 'बुध ', 'बीर ', 'शुक्कर ', 'श्नीचर ')),
('day', ('ऐतबार ', 'सोमबार ', 'मंगलबर ', 'बुधबार ', 'बीरबार ', 'शुक्करबार ', 'श्नीचरबार ')),
('abmon', ('जनवरी', 'फरवरी', 'मार्च', 'एप्रैल', 'मेई', 'जून', 'जूलै', 'अगस्त', 'सितंबर', 'अक्तूबर', 'नवंबर', 'दिसंबर')),
('mon', ('जनवरी', 'फरवरी', 'मार्च', 'एप्रैल', 'मेई', 'जून', 'जूलै', 'अगस्त', 'सितंबर', 'अक्तूबर', 'नवंबर', 'दिसंबर')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^(ऑह|[yY])'),
('noexpr', '^(ना|[nN])')]),
('dv_MV',
[('abday', ('އާދީއްތަ', 'ހޯމަ', 'އަންގާރަ', 'ބުދަ', 'ބުރާސްފަތި', 'ހުކުރު', 'ހޮނިހިރު')),
('day', ('އާދީއްތަ', 'ހޯމަ', 'އަންގާރަ', 'ބުދަ', 'ބުރާސްފަތި', 'ހުކުރު', 'ހޮނިހިރު')),
('abmon', ('ޖެނުއަރީ', 'ފެބްރުއަރީ', 'މާރޗް', 'އެޕްރީލް', 'މެއި', 'ޖޫން', 'ޖުލައި', 'އޮގަސްޓް', 'ސެޕްޓެންބަރ', 'އޮކްޓޫބަރ', 'ނޮވެންބަރ', 'ޑިސެންބަރ')),
('mon', ('ޖެނުއަރީ', 'ފެބްރުއަރީ', 'މާރޗް', 'އެޕްރީލް', 'މެއި', 'ޖޫން', 'ޖުލައި', 'އޮގަސްޓް', 'ސެޕްޓެންބަރ', 'އޮކްޓޫބަރ', 'ނޮވެންބަރ', 'ޑިސެންބަރ')),
('d_t_fmt', '%Z %H:%M:%S %Y %b %d %a'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '%P %I:%M:%S'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('dz_BT',
[('abday', ('ཟླ་', 'མིར་', 'ལྷག་', 'པུར་', 'སངས་', 'སྤེན་', 'ཉི་')),
('day', ('གཟའ་ཟླ་བ་', 'གཟའ་མིག་དམར་', 'གཟའ་ལྷག་ཕ་', 'གཟའ་པུར་བུ་', 'གཟའ་པ་སངས་', 'གཟའ་སྤེན་ཕ་', 'གཟའ་ཉི་མ་')),
('abmon', ('ཟླ་༡', 'ཟླ་༢', 'ཟླ་༣', 'ཟླ་༤', 'ཟླ་༥', 'ཟླ་༦', 'ཟླ་༧', 'ཟླ་༨', 'ཟླ་༩', 'ཟླ་༡༠', 'ཟླ་༡༡', 'ཟླ་༡༢')),
('mon',
('ཟླ་བ་དང་པ་',
'ཟླ་བ་གཉིས་པ་',
'ཟླ་བ་གསུམ་པ་',
'ཟླ་བ་བཞི་པ་',
'ཟླ་བ་ལྔ་ཕ་',
'ཟླ་བ་དྲུག་པ་',
'ཟླ་བ་བདུནཔ་',
'ཟླ་བ་བརྒྱད་པ་',
'ཟླ་བ་དགུ་པ་',
'ཟླ་བ་བཅུ་པ་',
'ཟླ་བ་བཅུ་གཅིག་པ་',
'ཟླ་བ་བཅུ་གཉིས་པ་')),
('d_t_fmt', 'པསྱི་ལོ%yཟལ%mཚེས%dཆུ་ཚོད%Hཀསར་མ%Mཀསར་ཆ%S'),
('d_fmt', 'པསྱི་ལོ%yཟལ%mཚེས%d'),
('t_fmt', 'ཆུ་ཚོད%Hཀསར་མ%Mཀསར་ཆ%S'),
('t_fmt_ampm', 'ཆུ་ཚོད%Iཀསར་མ%Mཀསར་ཆ%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[ཨYy].*'),
('noexpr', '^[མNn].*')]),
('el_CY',
[('abday', ('Κυρ', 'Δευ', 'Τρι', 'Τετ', 'Πεμ', 'Παρ', 'Σαβ')),
('day', ('Κυριακή', 'Δευτέρα', 'Τρίτη', 'Τετάρτη', 'Πέμπτη', 'Παρασκευή', 'Σάββατο')),
('abmon', ('Ιαν', 'Φεβ', 'Μάρ', 'Απρ', 'Μάι', 'Ιούν', 'Ιούλ', 'Αύγ', 'Σεπ', 'Οκτ', 'Νοέ', 'Δεκ')),
('mon',
('Ιανουάριος', 'Φεβρουάριος', 'Μάρτιος', 'Απρίλιος', 'Μάιος', 'Ιούνιος', 'Ιούλιος', 'Αύγουστος', 'Σεπτέμβριος', 'Οκτώβριος', 'Νοέμβριος', 'Δεκέμβριος')),
('d_t_fmt', '%a %d %b %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%r'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[νΝyY].*'),
('noexpr', '^[οΟnN].*')]),
('el_GR',
[('abday', ('Κυρ', 'Δευ', 'Τρι', 'Τετ', 'Πεμ', 'Παρ', 'Σαβ')),
('day', ('Κυριακή', 'Δευτέρα', 'Τρίτη', 'Τετάρτη', 'Πέμπτη', 'Παρασκευή', 'Σάββατο')),
('abmon', ('Ιαν', 'Φεβ', 'Μάρ', 'Απρ', 'Μάι', 'Ιούν', 'Ιούλ', 'Αύγ', 'Σεπ', 'Οκτ', 'Νοέ', 'Δεκ')),
('mon',
('Ιανουάριος', 'Φεβρουάριος', 'Μάρτιος', 'Απρίλιος', 'Μάιος', 'Ιούνιος', 'Ιούλιος', 'Αύγουστος', 'Σεπτέμβριος', 'Οκτώβριος', 'Νοέμβριος', 'Δεκέμβριος')),
('d_t_fmt', '%a %d %b %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%r'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[νΝyY].*'),
('noexpr', '^[οΟnN].*')]),
('en_AG',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%l:%M:%S %P %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('en_AU',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('en_BW',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('en_CA',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%a %d %b %Y %r %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%r'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYoO].*'),
('noexpr', '^[nN].*')]),
('en_DK',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%Y-%m-%dT%T %Z'),
('d_fmt', '%Y-%m-%d'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[1JjsSyYoO].*'),
('noexpr', '^[0nN].*')]),
('en_GB',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%l:%M:%S %P %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('en_HK',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%A, %B %d, %Y %p%I:%M:%S %Z'),
('d_fmt', '%A, %B %d, %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%p%I:%M:%S %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('en_IE',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('en_IN',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%A %d %B %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %B %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('en_NG',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('en_NZ',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('en_PH',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%A, %d %B, %Y %I:%M:%S %p %Z'),
('d_fmt', '%A, %d %B, %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('en_SG',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%a %d %b %Y %r'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('en_US',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%a %d %b %Y %r %Z'),
('d_fmt', '%m/%d/%Y'),
('t_fmt', '%r'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('en_ZA',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('en_ZM',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%l:%M:%S %P %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYeE].*'),
('noexpr', '^[nNaA].*')]),
('en_ZW',
[('abday', ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat')),
('day', ('Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('es_AR',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_BO',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_CL',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_CO',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_CR',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', ','),
('thousep', ' '),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_CU',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_DO',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_EC',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_ES',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_GT',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_HN',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_MX',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', '\u2009'),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_NI',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_PA',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_PE',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_PR',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_PY',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_SV',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_US',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_UY',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('es_VE',
[('abday', ('dom', 'lun', 'mar', 'mié', 'jue', 'vie', 'sáb')),
('day', ('domingo', 'lunes', 'martes', 'miércoles', 'jueves', 'viernes', 'sábado')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('et_EE',
[('abday', ('P', 'E', 'T', 'K', 'N', 'R', 'L')),
('day', ('pühapäev', 'esmaspäev', 'teisipäev', 'kolmapäev', 'neljapäev', 'reede', 'laupäev')),
('abmon', ('jaan ', 'veebr', 'märts', 'apr ', 'mai ', 'juuni', 'juuli', 'aug ', 'sept ', 'okt ', 'nov ', 'dets ')),
('mon', ('jaanuar', 'veebruar', 'märts', 'aprill', 'mai', 'juuni', 'juuli', 'august', 'september', 'oktoober', 'november', 'detsember')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^[JjYy].*'),
('noexpr', '^[EeNn].*')]),
('eu_ES',
[('abday', ('ig.', 'al.', 'ar.', 'az.', 'og.', 'or.', 'lr.')),
('day', ('igandea', 'astelehena', 'asteartea', 'asteazkena', 'osteguna', 'ostirala', 'larunbata')),
('abmon', ('urt', 'ots', 'mar', 'api', 'mai', 'eka', 'uzt', 'abu', 'ira', 'urr', 'aza', 'abe')),
('mon', ('urtarrila', 'otsaila', 'martxoa', 'apirila', 'maiatza', 'ekaina', 'uztaila', 'abuztua', 'iraila', 'urria', 'azaroa', 'abendua')),
('d_t_fmt', '%y-%m-%d %T %Z'),
('d_fmt', '%a, %Y.eko %bren %da'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[bBsSyY].*'),
('noexpr', '^[eEnN].*')]),
('fa_IR',
[('abday', ('یکشنبه', 'دوشنبه', 'سه\u200cشنبه', 'چهارشنبه', 'پنجشنبه', 'جمعه', 'شنبه')),
('day', ('یکشنبه', 'دوشنبه', 'سه\u200cشنبه', 'چهارشنبه', 'پنجشنبه', 'جمعه', 'شنبه')),
('abmon', ('ژانویه', 'فوریه', 'مارس', 'آوریل', 'مه', 'ژوئن', 'ژوئیه', 'اوت', 'سپتامبر', 'اكتبر', 'نوامبر', 'دسامبر')),
('mon', ('ژانویه', 'فوریه', 'مارس', 'آوریل', 'مه', 'ژوئن', 'ژوئیه', 'اوت', 'سپتامبر', 'اكتبر', 'نوامبر', 'دسامبر')),
('d_t_fmt', '\u202b%A %Oe %B %Oy، %OH:%OM:%OS\u202c'),
('d_fmt', '%Oy/%Om/%Od'),
('t_fmt', '%OH:%OM:%OS'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYآبHf].*'),
('noexpr', '^[nNخنok].*')]),
('ff_SN',
[('abday', ('dew', 'aaɓ', 'maw', 'nje', 'naa', 'mwd', 'hbi')),
('day', ('dewo', 'aaɓnde', 'mawbaare', 'njeslaare', 'naasaande', 'mawnde', 'hoore-biir')),
('abmon', ('sii', 'col', 'mbo', 'see', 'duu', 'kor', 'mor', 'juk', 'slt', 'yar', 'jol', 'bow')),
('mon', ('siilo', 'colte', 'mbooy', 'seeɗto', 'duujal', 'korse', 'morso', 'juko', 'siilto', 'yarkomaa', 'jolal', 'bowte')),
('d_t_fmt', '%a %d %b %Y %R %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%R'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[yYeE].*'),
('noexpr', '^[nNaA].*')]),
('fi_FI',
[('abday', ('su', 'ma', 'ti', 'ke', 'to', 'pe', 'la')),
('day', ('sunnuntai', 'maanantai', 'tiistai', 'keskiviikko', 'torstai', 'perjantai', 'lauantai')),
('abmon', ('tammi', 'helmi', 'maalis', 'huhti', 'touko', 'kesä', 'heinä', 'elo', 'syys', 'loka', 'marras', 'joulu')),
('mon', ('tammikuu', 'helmikuu', 'maaliskuu', 'huhtikuu', 'toukokuu', 'kesäkuu', 'heinäkuu', 'elokuu', 'syyskuu', 'lokakuu', 'marraskuu', 'joulukuu')),
('d_t_fmt', '%a %e. %Bta %Y %H.%M.%S'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%H.%M.%S'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^[KkYy].*'),
('noexpr', '^[EeNn].*')]),
('fil_PH',
[('abday', ('Lin', 'Lun', 'Mar', 'Miy', 'Huw', 'Biy', 'Sab')),
('day', ('Linggo', 'Lunes', 'Martes', 'Miyerkoles', 'Huwebes', 'Biyernes', 'Sabado')),
('abmon', ('Ene', 'Peb', 'Mar', 'Abr', 'May', 'Hun', 'Hul', 'Ago', 'Sep', 'Okt', 'Nob', 'Dis')),
('mon', ('Enero', 'Pebrero', 'Marso', 'Abril', 'Mayo', 'Hunyo', 'Hulyo', 'Agosto', 'Septiyembre', 'Oktubre', 'Nobiyembre', 'Disyembre')),
('d_t_fmt', '%a %d %b %Y %r %Z'),
('d_fmt', '%m/%d/%y'),
('t_fmt', '%r'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '[oOyY].*'),
('noexpr', '[hHnN].*')]),
('fo_FO',
[('abday', ('sun', 'mán', 'týs', 'mik', 'hós', 'frí', 'ley')),
('day', ('sunnudagur', 'mánadagur', 'týsdagur', 'mikudagur', 'hósdagur', 'fríggjadagur', 'leygardagur')),
('abmon', ('jan', 'feb', 'mar', 'apr', 'mai', 'jun', 'jul', 'aug', 'sep', 'okt', 'nov', 'des')),
('mon', ('januar', 'februar', 'mars', 'apríl', 'mai', 'juni', 'juli', 'august', 'september', 'oktober', 'november', 'desember')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m-%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[JjYy].*'),
('noexpr', '^[Nn].*')]),
('fr_BE',
[('abday', ('dim', 'lun', 'mar', 'mer', 'jeu', 'ven', 'sam')),
('day', ('dimanche', 'lundi', 'mardi', 'mercredi', 'jeudi', 'vendredi', 'samedi')),
('abmon', ('jan', 'fév', 'mar', 'avr', 'mai', 'jun', 'jui', 'aoû', 'sep', 'oct', 'nov', 'déc')),
('mon', ('janvier', 'février', 'mars', 'avril', 'mai', 'juin', 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[oOjJyY1].*'),
('noexpr', '^[nN0].*')]),
('fr_CA',
[('abday', ('dim', 'lun', 'mar', 'mer', 'jeu', 'ven', 'sam')),
('day', ('dimanche', 'lundi', 'mardi', 'mercredi', 'jeudi', 'vendredi', 'samedi')),
('abmon', ('jan', 'fév', 'mar', 'avr', 'mai', 'jun', 'jui', 'aoû', 'sep', 'oct', 'nov', 'déc')),
('mon', ('janvier', 'février', 'mars', 'avril', 'mai', 'juin', 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%Y-%m-%d'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ' '),
('yesexpr', '^[oOyY].*'),
('noexpr', '^[nN].*')]),
('fr_CH',
[('abday', ('dim', 'lun', 'mar', 'mer', 'jeu', 'ven', 'sam')),
('day', ('dimanche', 'lundi', 'mardi', 'mercredi', 'jeudi', 'vendredi', 'samedi')),
('abmon', ('jan', 'fév', 'mar', 'avr', 'mai', 'jun', 'jui', 'aoû', 'sep', 'oct', 'nov', 'déc')),
('mon', ('janvier', 'février', 'mars', 'avril', 'mai', 'juin', 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d. %m. %y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', "'"),
('yesexpr', '^[OojJsSyY].*'),
('noexpr', '^[nN].*')]),
('fr_FR',
[('abday', ('dim.', 'lun.', 'mar.', 'mer.', 'jeu.', 'ven.', 'sam.')),
('day', ('dimanche', 'lundi', 'mardi', 'mercredi', 'jeudi', 'vendredi', 'samedi')),
('abmon', ('janv.', 'févr.', 'mars', 'avril', 'mai', 'juin', 'juil.', 'août', 'sept.', 'oct.', 'nov.', 'déc.')),
('mon', ('janvier', 'février', 'mars', 'avril', 'mai', 'juin', 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ' '),
('yesexpr', '^[oOyY].*'),
('noexpr', '^[nN].*')]),
('fr_LU',
[('abday', ('dim', 'lun', 'mar', 'mer', 'jeu', 'ven', 'sam')),
('day', ('dimanche', 'lundi', 'mardi', 'mercredi', 'jeudi', 'vendredi', 'samedi')),
('abmon', ('jan', 'fév', 'mar', 'avr', 'mai', 'jun', 'jui', 'aoû', 'sep', 'oct', 'nov', 'déc')),
('mon', ('janvier', 'février', 'mars', 'avril', 'mai', 'juin', 'juillet', 'août', 'septembre', 'octobre', 'novembre', 'décembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ' '),
('yesexpr', '^[oOyY].*'),
('noexpr', '^[nN].*')]),
('fur_IT',
[('abday', ('Dom', 'Lun', 'Mar', 'Mia', 'Joi', 'Vin', 'Sab')),
('day', ('Domenie', 'Lunis', 'Martars', 'Miarcus', 'Joibe', 'Vinars', 'Sabide')),
('abmon', ('Zen', 'Fev', 'Mar', 'Avr', 'Mai', 'Jug', 'Lui', 'Avo', 'Set', 'Otu', 'Nov', 'Dec')),
('mon', ('Zenâr', 'Fevrâr', 'Març', 'Avrîl', 'Mai', 'Jugn', 'Lui', 'Avost', 'Setembar', 'Otubar', 'Novembar', 'Decembar')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d. %m. %y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[sSjJoOyY].*'),
('noexpr', '^[nN].*')]),
('fy_DE',
[('abday', ('Sdg', 'Mdg', 'Dsg', 'Mwk', 'Ddg', 'Fdg', 'Swd')),
('day', ('Sinndag', 'Mondag', 'Dingsdag', 'Meddwäakj', 'Donnadag', 'Friedag', 'Sinnowend')),
('abmon', ('Jan', 'Feb', 'Moz', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Now', 'Dez')),
('mon', ('Jaunuwoa', 'Februwoa', 'Moaz', 'Aprell', 'Mai', 'Juni', 'Juli', 'August', 'Septamba', 'Oktoba', 'Nowamba', 'Dezamba')),
('d_t_fmt', '%a %d. %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('fy_NL',
[('abday', ('Sn', 'Mo', 'Ti', 'Wo', 'To', 'Fr', 'Sn')),
('day', ('Snein', 'Moandei', 'Tiisdei', 'Woansdei', 'Tongersdei', 'Freed', 'Sneon')),
('abmon', ('Jan', 'Feb', 'Maa', 'Apr', 'Maa', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Des')),
('mon', ('Janaris', 'Febrewaris', 'Maart', 'April', 'Maaie', 'Juny', 'July', 'Augustus', 'Septimber', 'Oktober', 'Novimber', 'Desimber')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d-%m-%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('ga_IE',
[('abday', ('Domh', 'Luan', 'Máirt', 'Céad', 'Déar', 'Aoine', 'Sath')),
('day', ('Dé Domhnaigh', 'Dé Luain', 'Dé Máirt', 'Dé Céadaoin', 'Déardaoin', 'Dé hAoine', 'Dé Sathairn')),
('abmon', ('Ean', 'Feabh', 'Márta', 'Aib', 'Beal', 'Meith', 'Iúil', 'Lún', 'MFómh', 'DFómh', 'Samh', 'Noll')),
('mon', ('Eanáir', 'Feabhra', 'Márta', 'Aibreán', 'Bealtaine', 'Meitheamh', 'Iúil', 'Lúnasa', 'Meán Fómhair', 'Deireadh Fómhair', 'Samhain', 'Nollaig')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d.%m.%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[tTyY].*'),
('noexpr', '^[nN].*')]),
('gd_GB',
[('abday', ('DiD', 'DiL', 'DiM', 'DiC', 'Dia', 'Dih', 'DiS')),
('day', ('DiDòmhnaich', 'DiLuain', 'DiMàirt', 'DiCiadain', 'DiarDaoin', 'DihAoine', 'DiSathairne')),
('abmon', ('Faoi', 'Gearr', 'Màrt', 'Gibl', 'Mhàrt', 'Ògmh', 'Iuch', 'Lùna', 'Sult', 'Dàmh', 'Samh', 'Dùbh')),
('mon',
('Am Faoilleach',
'An Gearran',
'Am Màrt',
'An Giblean',
'An Cèitean',
'An t-Ògmhios',
'An t-Iuchar',
'An Lùnastal',
'An t-Sultain',
'An Dàmhair',
'An t-Samhain',
'An Dùbhlachd')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%r'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[tTyY].*'),
('noexpr', '^[cCnN].*')]),
('gez_ER',
[('abday', ('እኁድ', 'ሰኑይ', 'ሠሉስ', 'ራብዕ', 'ሐሙስ', 'ዓርበ', 'ቀዳሚ')),
('day', ('እኁድ', 'ሰኑይ', 'ሠሉስ', 'ራብዕ', 'ሐሙስ', 'ዓርበ', 'ቀዳሚት')),
('abmon', ('ጠሐረ', 'ከተተ', 'መገበ', 'አኀዘ', 'ግንባ', 'ሠንየ', 'ሐመለ', 'ነሐሰ', 'ከረመ', 'ጠቀመ', 'ኀደረ', 'ኀሠሠ')),
('mon', ('ጠሐረ', 'ከተተ', 'መገበ', 'አኀዘ', 'ግንባት', 'ሠንየ', 'ሐመለ', 'ነሐሰ', 'ከረመ', 'ጠቀመ', 'ኀደረ', 'ኀሠሠ')),
('d_t_fmt', '%A፥%B፡%e፡መዓልት፡%Y፡%r፡%Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X፡%p'),
('radixchar', '.'),
('thousep', ''),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('gez_ET',
[('abday', ('እኁድ', 'ሰኑይ', 'ሠሉስ', 'ራብዕ', 'ሐሙስ', 'ዓርበ', 'ቀዳሚ')),
('day', ('እኁድ', 'ሰኑይ', 'ሠሉስ', 'ራብዕ', 'ሐሙስ', 'ዓርበ', 'ቀዳሚት')),
('abmon', ('ጃንዩ', 'ፌብሩ', 'ማርች', 'ኤፕረ', 'ሜይ ', 'ጁን ', 'ጁላይ', 'ኦገስ', 'ሴፕቴ', 'ኦክተ', 'ኖቬም', 'ዲሴም')),
('mon', ('ጃንዩወሪ', 'ፌብሩወሪ', 'ማርች', 'ኤፕረል', 'ሜይ', 'ጁን', 'ጁላይ', 'ኦገስት', 'ሴፕቴምበር', 'ኦክተውበር', 'ኖቬምበር', 'ዲሴምበር')),
('d_t_fmt', '%A፥%B፡%e፡መዓልት፡%Y፡%r፡%Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X፡%p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('gl_ES',
[('abday', ('Dom', 'Lun', 'Mar', 'Mér', 'Xov', 'Ven', 'Sáb')),
('day', ('Domingo', 'Luns', 'Martes', 'Mércores', 'Xoves', 'Venres', 'Sábado')),
('abmon', ('Xan', 'Feb', 'Mar', 'Abr', 'Mai', 'Xuñ', 'Xul', 'Ago', 'Set', 'Out', 'Nov', 'Dec')),
('mon', ('Xaneiro', 'Febreiro', 'Marzo', 'Abril', 'Maio', 'Xuño', 'Xullo', 'Agosto', 'Setembro', 'Outubro', 'Novembro', 'Decembro')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('gu_IN',
[('abday', ('રવિ', 'સોમ', 'મંગળ', 'બુધ', 'ગુરુ', 'શુક્ર', 'શનિ')),
('day', ('રવિવાર', 'સોમવાર', 'મંગળવાર', 'બુધવાર', 'ગુરુવાર', 'શુક્રવાર', 'શનિવાર')),
('abmon', ('જાન', 'ફેબ', 'માર', 'એપ્ર', 'મે', 'જુન', 'જુલ', 'ઓગ', 'સપ્ટ', 'ઓક્ટ', 'નોવ', 'ડિસ')),
('mon', ('જાન્યુઆરી', 'ફેબ્રુઆરી', 'માર્ચ', 'એપ્રિલ', 'મે', 'જુન', 'જુલાઇ', 'ઓગસ્ટ', 'સપ્ટેમ્બર', 'ઓક્ટોબર', 'નવેમ્બર', 'ડિસેમ્બર')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYહ]'),
('noexpr', '^[nNન]')]),
('gv_GB',
[('abday', ('Jed', 'Jel', 'Jem', 'Jerc', 'Jerd', 'Jeh', 'Jes')),
('day', ('Jedoonee', 'Jelhein', 'Jemayrt', 'Jercean', 'Jerdein', 'Jeheiney', 'Jesarn')),
('abmon', ('J-guer', 'T-arree', 'Mayrnt', 'Avrril', 'Boaldyn', 'M-souree', 'J-souree', 'Luanistyn', 'M-fouyir', 'J-fouyir', 'M.Houney', 'M.Nollick')),
('mon',
('Jerrey-geuree',
'Toshiaght-arree',
'Mayrnt',
'Averil',
'Boaldyn',
'Mean-souree',
'Jerrey-souree',
'Luanistyn',
'Mean-fouyir',
'Jerrey-fouyir',
'Mee Houney',
'Mee ny Nollick')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('ha_NG',
[('abday', ('Lah', 'Lit', 'Tal', 'Lar', 'Alh', 'Jum', 'Asa')),
('day', ('Lahadi', 'Litini', 'Talata', 'Laraba', 'Alhamis', "Juma'a", 'Asabar')),
('abmon', ('Jan', 'Fab', 'Mar', 'Afr', 'May', 'Yun', 'Yul', 'Agu', 'Sat', 'Okt', 'Nuw', 'Dis')),
('mon', ('Janairu', 'Fabrairu', 'Maris', 'Afrilu', 'Mayu', 'Yuni', 'Yuli', 'Agusta', 'Satumba', 'Oktoba', 'Nuwamba', 'Disamba')),
('d_t_fmt', 'ranar %A, %d ga %B cikin %r %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%r'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[TtiIYy].*'),
('noexpr', '^[bBaAnN].*')]),
('hak_TW',
[('abday', ('日', '一', '二', '三', '四', '五', '六')),
('day', ('禮拜日', '禮拜一', '禮拜二', '禮拜三', '禮拜四', '禮拜五', '禮拜六')),
('abmon', (' 1月', ' 2月', ' 3月', ' 4月', ' 5月', ' 6月', ' 7月', ' 8月', ' 9月', '10月', '11月', '12月')),
('mon', ('一月', '二月', '三月', '四月', '五月', '六月', '七月', '八月', '九月', '十月', '十一月', '十二月')),
('d_t_fmt', '%Y年%m月%d日 (%A) %H點%M分%S秒'),
('d_fmt', '%Y年%m月%d日'),
('t_fmt', '%H點%M分%S秒'),
('t_fmt_ampm', '%p %I點%M分%S秒'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY係]'),
('noexpr', '^[nN毋]')]),
('he_IL',
[('abday', ("א'", "ב'", "ג'", "ד'", "ה'", "ו'", "ש'")),
('day', ('ראשון', 'שני', 'שלישי', 'רביעי', 'חמישי', 'שישי', 'שבת')),
('abmon', ('ינו', 'פבר', 'מרץ', 'אפר', 'מאי', 'יונ', 'יול', 'אוג', 'ספט', 'אוק', 'נוב', 'דצמ')),
('mon', ('ינואר', 'פברואר', 'מרץ', 'אפריל', 'מאי', 'יוני', 'יולי', 'אוגוסט', 'ספטמבר', 'אוקטובר', 'נובמבר', 'דצמבר')),
('d_t_fmt', '%Z %H:%M:%S %Y %b %d %a'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '%I:%M:%S %P'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[Yyכ].*'),
('noexpr', '^[Nnל].*')]),
('hi_IN',
[('abday', ('रवि ', 'सोम ', 'मंगल ', 'बुध ', 'गुरु ', 'शुक्र ', 'शनि ')),
('day', ('रविवार ', 'सोमवार ', 'मंगलवार ', 'बुधवार ', 'गुरुवार ', 'शुक्रवार ', 'शनिवार ')),
('abmon', ('जनवरी', 'फ़रवरी', 'मार्च', 'अप्रेल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('mon', ('जनवरी', 'फ़रवरी', 'मार्च', 'अप्रेल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('hne_IN',
[('abday', ('इत ', 'सोम ', 'मंग ', 'बुध ', 'बिर ', 'सुक', 'सनि')),
('day', ('इतवार ', 'सोमवार ', 'मंगलवार ', 'बुधवार ', 'बिरसपत ', 'सुकरवार ', 'सनिवार ')),
('abmon', ('जन', 'फर', 'मार्च', 'अप', 'मई', 'जून', 'जुला', 'अग', 'सित', 'अकटू', 'नव', 'दिस')),
('mon', ('जनवरी', 'फरवरी', 'मार्च', 'अपरेल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितमबर', 'अकटूबर', 'नवमबर', 'दिसमबर')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[हवyY]'),
('noexpr', '^[नइnN]')]),
('hr_HR',
[('abday', ('Ned', 'Pon', 'Uto', 'Sri', 'Čet', 'Pet', 'Sub')),
('day', ('Nedjelja', 'Ponedjeljak', 'Utorak', 'Srijeda', 'Četvrtak', 'Petak', 'Subota')),
('abmon', ('Sij', 'Vel', 'Ožu', 'Tra', 'Svi', 'Lip', 'Srp', 'Kol', 'Ruj', 'Lis', 'Stu', 'Pro')),
('mon', ('Siječanj', 'Veljača', 'Ožujak', 'Travanj', 'Svibanj', 'Lipanj', 'Srpanj', 'Kolovoz', 'Rujan', 'Listopad', 'Studeni', 'Prosinac')),
('d_t_fmt', '%a %d %b %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[dDyY].*'),
('noexpr', '^[nN].*')]),
('hsb_DE',
[('abday', ('Nj', 'Pó', 'Wu', 'Sr', 'Št', 'Pj', 'So')),
('day', ('Njedźela', 'Póndźela', 'Wutora', 'Srjeda', 'Štvórtk', 'Pjatk', 'Sobota')),
('abmon', ('Jan', 'Feb', 'Měr', 'Apr', 'Mej', 'Jun', 'Jul', 'Awg', 'Sep', 'Okt', 'Now', 'Dec')),
('mon', ('Januar', 'Februar', 'Měrc', 'Apryl', 'Meja', 'Junij', 'Julij', 'Awgust', 'September', 'Oktober', 'Nowember', 'December')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[hHyY].*'),
('noexpr', '^[nN].*')]),
('ht_HT',
[('abday', ('dim', 'len ', 'mad', 'mèk', 'jed', 'van', 'sam')),
('day', ('dimanch', 'lendi ', 'madi', 'mèkredi', 'jedi', 'vandredi', 'samdi')),
('abmon', ('jan', 'fev', 'mas', 'avr', 'me', 'jen', 'jiy', 'out', 'sep', 'okt', 'nov', 'des')),
('mon', ('janvye', 'fevriye', 'mas', 'avril', 'me', 'jen', 'jiyè', 'out', 'septanm', 'oktòb', 'novanm', 'desanm')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ' '),
('yesexpr', '^[wWoOyY].*'),
('noexpr', '^[nN].*')]),
('hu_HU',
[('abday', ('v', 'h', 'k', 'sze', 'cs', 'p', 'szo')),
('day', ('vasárnap', 'hétfő', 'kedd', 'szerda', 'csütörtök', 'péntek', 'szombat')),
('abmon', ('jan', 'febr', 'márc', 'ápr', 'máj', 'jún', 'júl', 'aug', 'szept', 'okt', 'nov', 'dec')),
('mon', ('január', 'február', 'március', 'április', 'május', 'június', 'július', 'augusztus', 'szeptember', 'október', 'november', 'december')),
('d_t_fmt', '%Y. %b. %e., %A, %H.%M.%S %Z'),
('d_fmt', '%Y-%m-%d'),
('t_fmt', '%H.%M.%S'),
('t_fmt_ampm', '%H.%M.%S'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[IiYy].*'),
('noexpr', '^[nN].*')]),
('hy_AM',
[('abday', ('Կրկ', 'Երկ', 'Երք', 'Չրք', 'Հնգ', 'Ուր', 'Շբթ')),
('day', ('Կիրակի', 'Երկուշաբթի', 'Երեքշաբթի', 'Չորեքշաբթի', 'Հինգշաբթի', 'Ուրբաթ', 'Շաբաթ')),
('abmon', ('Հնվ', 'Փտր', 'Մար', 'Ապր', 'Մայ', 'Հնս', 'Հլս', 'Օգս', 'Սեպ', 'Հոկ', 'Նմբ', 'Դեկ')),
('mon', ('Հունվարի', 'Փետրվարի', 'Մարտի', 'Ապրիլի', 'Մայիսի', 'Հունիսի', 'Հուլիսի', 'Օգոստոսի', 'Սեպտեմբերի', 'Հոկտեմբերի', 'Նոյեմբերի', 'Դեկտեմբերի')),
('d_t_fmt', '%a %d %b %Y %r %Z'),
('d_fmt', '%m/%d/%y'),
('t_fmt', '%r'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYsSաԱ]'),
('noexpr', '^[nNոՈ]')]),
('ia_FR',
[('abday', ('dom', 'lun', 'mar', 'mer', 'jov', 'ven', 'sab')),
('day', ('dominica', 'lunedi', 'martedi', 'mercuridi', 'jovedi', 'venerdi', 'sabbato')),
('abmon', ('jan', 'feb', 'mar', 'apr', 'mai', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec')),
('mon', ('januario', 'februario', 'martio', 'april', 'maio', 'junio', 'julio', 'augusto', 'septembre', 'octobre', 'novembre', 'decembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ' '),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('id_ID',
[('abday', ('Min', 'Sen', 'Sel', 'Rab', 'Kam', 'Jum', 'Sab')),
('day', ('Minggu', 'Senin', 'Selasa', 'Rabu', 'Kamis', 'Jumat', 'Sabtu')),
('abmon', ('Jan', 'Peb', 'Mar', 'Apr', 'Mei', 'Jun', 'Jul', 'Agu', 'Sep', 'Okt', 'Nov', 'Des')),
('mon', ('Januari', 'Pebruari', 'Maret', 'April', 'Mei', 'Juni', 'Juli', 'Agustus', 'September', 'Oktober', 'November', 'Desember')),
('d_t_fmt', '%a %d %b %Y %r %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[yY].*'),
('noexpr', '^[tTnN].*')]),
('ig_NG',
[('abday', ('sọn', 'mọn', 'tuz', 'wen', 'tọs', 'fra', 'sat')),
('day', ('sọnde', 'mọnde', 'tuzde', 'wenzde', 'tọsde', 'fraịde', 'satọde')),
('abmon', ('jen', 'feb', 'maa', 'epr', 'mee', 'juu', 'jul', 'ọgọ', 'sep', 'ọkt', 'nọv', 'dis')),
('mon', ('jenụwarị', 'febụrụwarị', 'maachị', 'epreel', 'mee', 'juun', 'julaị', 'ọgọstụ', 'septemba', 'ọktoba', 'nọvemba', 'disemba')),
('d_t_fmt', '%A, %d %B %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%r'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[EeIiYy].*'),
('noexpr', '^[0MmNn].*')]),
('ik_CA',
[('abday', ('Min', 'Sav', 'Ila', 'Qit', 'Sis', 'Tal', 'Maq')),
('day', ('Minġuiqsioiq', 'Savałłiq', 'Ilaqtchiioiq', 'Qitchiioiq', 'Sisamiioiq', 'Tallimmiioiq', 'Maqinġuoiq')),
('abmon', ('Sñt', 'Sñs', 'Pan', 'Qil', 'Sup', 'Iġñ', 'Itc', 'Tiñ', 'Ami', 'Sik', 'Nip', 'Siq')),
('mon',
('Siqiññaatchiaq',
'Siqiññaasrugruk',
'Paniqsiqsiivik',
'Qilġich Tatqiat',
'Suppivik',
'Iġñivik',
'Itchavik',
'Tiññivik',
'Amiġaiqsivik',
'Sikkuvik',
'Nippivik',
'Siqiñġiḷaq')),
('d_t_fmt', '%a %d %b %Y %r %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%r'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '[yYiIaA].*'),
('noexpr', '[nNqQ].*')]),
('is_IS',
[('abday', ('sun', 'mán', 'þri', 'mið', 'fim', 'fös', 'lau')),
('day', ('sunnudagur', 'mánudagur', 'þriðjudagur', 'miðvikudagur', 'fimmtudagur', 'föstudagur', 'laugardagur')),
('abmon', ('jan', 'feb', 'mar', 'apr', 'maí', 'jún', 'júl', 'ágú', 'sep', 'okt', 'nóv', 'des')),
('mon', ('janúar', 'febrúar', 'mars', 'apríl', 'maí', 'júní', 'júlí', 'ágúst', 'september', 'október', 'nóvember', 'desember')),
('d_t_fmt', '%a %e.%b %Y, %T %Z'),
('d_fmt', '%a %e.%b %Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('it_CH',
[('abday', ('dom', 'lun', 'mar', 'mer', 'gio', 'ven', 'sab')),
('day', ('domenica', 'lunedì', 'martedì', 'mercoledì', 'giovedì', 'venerdì', 'sabato')),
('abmon', ('gen', 'feb', 'mar', 'apr', 'mag', 'giu', 'lug', 'ago', 'set', 'ott', 'nov', 'dic')),
('mon', ('gennaio', 'febbraio', 'marzo', 'aprile', 'maggio', 'giugno', 'luglio', 'agosto', 'settembre', 'ottobre', 'novembre', 'dicembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d. %m. %y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', "'"),
('yesexpr', '^[sSjJoOyY].*'),
('noexpr', '^[nN].*')]),
('it_IT',
[('abday', ('dom', 'lun', 'mar', 'mer', 'gio', 'ven', 'sab')),
('day', ('domenica', 'lunedì', 'martedì', 'mercoledì', 'giovedì', 'venerdì', 'sabato')),
('abmon', ('gen', 'feb', 'mar', 'apr', 'mag', 'giu', 'lug', 'ago', 'set', 'ott', 'nov', 'dic')),
('mon', ('gennaio', 'febbraio', 'marzo', 'aprile', 'maggio', 'giugno', 'luglio', 'agosto', 'settembre', 'ottobre', 'novembre', 'dicembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('iu_CA',
[('abday', ('ᓈ', 'ᓇ', 'ᓕ', 'ᐱ', 'ᕿ', 'ᐅ', 'ᓯ')),
('day', ('ᓈᑦᑎᖑᔭᕐᕕᒃ', 'ᓇᒡᒐᔾᔭᐅ', 'ᓇᒡᒐᔾᔭᐅᓕᖅᑭᑦ', 'ᐱᖓᓲᓕᖅᓯᐅᑦ', 'ᕿᑎᖅᑰᑦ', 'ᐅᓪᓗᕈᓘᑐᐃᓇᖅ', 'ᓯᕙᑖᕕᒃ')),
('abmon', ('ᔮᓄ', 'ᕕᕗ', 'ᒪᔅ', 'ᐃᐳ', 'ᒪᐃ', 'ᔪᓂ', 'ᔪᓚ', 'ᐊᒋ', 'ᓯᑎ', 'ᐊᑦ', 'ᓄᕕ', 'ᑎᓯ')),
('mon', ('ᔮᓄᐊᓕ', 'ᕕᕗᐊᓕ', 'ᒪᔅᓯ', 'ᐃᐳᓗ', 'ᒪᐃ', 'ᔪᓂ', 'ᔪᓚᐃ', 'ᐊᒋᓯ', 'ᓯᑎᕙ', 'ᐊᑦᑐᕙ', 'ᓄᕕᕙ', 'ᑎᓯᕝᕙ')),
('d_t_fmt', '%a %d %b %Y %r %Z'),
('d_fmt', '%m/%d/%y'),
('t_fmt', '%r'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '[yYsS].*'),
('noexpr', '[nN].*')]),
('iw_IL',
[('abday', ("א'", "ב'", "ג'", "ד'", "ה'", "ו'", "ש'")),
('day', ('ראשון', 'שני', 'שלישי', 'רביעי', 'חמישי', 'שישי', 'שבת')),
('abmon', ('ינו', 'פבר', 'מרץ', 'אפר', 'מאי', 'יונ', 'יול', 'אוג', 'ספט', 'אוק', 'נוב', 'דצמ')),
('mon', ('ינואר', 'פברואר', 'מרץ', 'אפריל', 'מאי', 'יוני', 'יולי', 'אוגוסט', 'ספטמבר', 'אוקטובר', 'נובמבר', 'דצמבר')),
('d_t_fmt', '%Z %H:%M:%S %Y %b %d %a'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '%I:%M:%S %P'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[Yyכ].*'),
('noexpr', '^[Nnל].*')]),
('ja_JP',
[('abday', ('日', '月', '火', '水', '木', '金', '土')),
('day', ('日曜日', '月曜日', '火曜日', '水曜日', '木曜日', '金曜日', '土曜日')),
('abmon', (' 1月', ' 2月', ' 3月', ' 4月', ' 5月', ' 6月', ' 7月', ' 8月', ' 9月', '10月', '11月', '12月')),
('mon', ('1月', '2月', '3月', '4月', '5月', '6月', '7月', '8月', '9月', '10月', '11月', '12月')),
('d_t_fmt', '%Y年%m月%d日 %H時%M分%S秒'),
('d_fmt', '%Y年%m月%d日'),
('t_fmt', '%H時%M分%S秒'),
('t_fmt_ampm', '%p%I時%M分%S秒'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^([yYyY]|はい|ハイ)'),
('noexpr', '^([nNnN]|いいえ|イイエ)')]),
('ka_GE',
[('abday', ('კვი', 'ორშ', 'სამ', 'ოთხ', 'ხუთ', 'პარ', 'შაბ')),
('day', ('კვირა', 'ორშაბათი', 'სამშაბათი', 'ოთხშაბათი', 'ხუთშაბათი', 'პარასკევი', 'შაბათი')),
('abmon', ('იან', 'თებ', 'მარ', 'აპრ', 'მაი', 'ივნ', 'ივლ', 'აგვ', 'სექ', 'ოქტ', 'ნოე', 'დეკ')),
('mon', ('იანვარი', 'თებერვალი', 'მარტი', 'აპრილი', 'მაისი', 'ივნისი', 'ივლისი', 'აგვისტო', 'სექტემბერი', 'ოქტომბერი', 'ნოემბერი', 'დეკემბერი')),
('d_t_fmt', '%Y წლის %d %B, %T %Z'),
('d_fmt', '%m/%d/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[1yYkKxXხ].*'),
('noexpr', '^[0nNaAა].*')]),
('kk_KZ',
[('abday', ('Жк', 'Дс', 'Сс', 'Ср', 'Бс', 'Жм', 'Сн')),
('day', ('Жексенбі', 'Дүйсенбі', 'Сейсенбі', 'Сәрсенбі', 'Бейсенбі', 'Жұма', 'Сенбі')),
('abmon', ('Қаң', 'Ақп', 'Нау', 'Сәу', 'Мам', 'Мау', 'Шіл', 'Там', 'Қыр', 'Қаз', 'Қар', 'Жел')),
('mon', ('Қаңтар', 'Ақпан', 'Наурыз', 'Сәуір', 'Мамыр', 'Маусым', 'Шілде', 'Тамыз', 'Қыркүйек', 'Қазан', 'Қараша', 'Желтоқсан')),
('d_t_fmt', '%a %d %b %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[ИиYy].*'),
('noexpr', '^[ЖжNn].*')]),
('kl_GL',
[('abday', ('sab', 'ata', 'mar', 'pin', 'sis', 'tal', 'arf')),
('day', ('sabaat', 'ataasinngorneq', 'marlunngorneq', 'pingasunngorneq', 'sisamanngorneq', 'tallimanngorneq', 'arfininngorneq')),
('abmon', ('jan', 'feb', 'mar', 'apr', 'maj', 'jun', 'jul', 'aug', 'sep', 'okt', 'nov', 'dec')),
('mon', ('januari', 'februari', 'martsi', 'aprili', 'maji', 'juni', 'juli', 'augustusi', 'septemberi', 'oktoberi', 'novemberi', 'decemberi')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d %b %Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[JjYyAa].*'),
('noexpr', '^[Nn].*')]),
('km_KH',
[('abday', ('អា', 'ច', 'អ', 'ពុ', 'ព្រ', 'សុ', 'ស')),
('day', ('ថ្ងៃ\u200bអាទិត្យ', 'ថ្ងៃ\u200bច័ន្ទ', 'ថ្ងៃ\u200bអង្គារ', 'ថ្ងៃ\u200bពុធ', 'ថ្ងៃ\u200bព្រហស្បតិ៍', 'ថ្ងៃ\u200bសុក្រ', 'ថ្ងៃ\u200bសៅរ៍')),
('abmon', ('១', '២', '៣', '៤', '៥', '៦', '៧', '៨', '៩', '១០', '១១', '១២')),
('mon', ('មករា', 'កុម្ភៈ', 'មិនា', 'មេសា', 'ឧសភា', 'មិថុនា', 'កក្កដា', 'សីហា', 'កញ្ញា', 'តុលា', 'វិច្ឆិកា', 'ធ្នូ')),
('d_t_fmt', '%A ថ្ងៃ %e ខែ %B ឆ្នាំ %Y, %H ម៉ោង m នាទី %S វិនាទី\u200b'),
('d_fmt', '%e %B %Y'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]([eE][sS])?'),
('noexpr', '^[nN][oO]?')]),
('kn_IN',
[('abday', ('ರ', 'ಸೋ', 'ಮಂ', 'ಬು', 'ಗು', 'ಶು', 'ಶ')),
('day', ('ರವಿವಾರ', 'ಸೋಮವಾರ', 'ಮಂಗಳವಾರ', 'ಬುಧವಾರ', 'ಗುರುವಾರ', 'ಶುಕ್ರವಾರ', 'ಶನಿವಾರ')),
('abmon', ('ಜ', 'ಫೆ', 'ಮಾ', 'ಏ', 'ಮೇ', 'ಜೂ', 'ಜು', 'ಆ', 'ಸೆ', 'ಅ', 'ನ', 'ದ')),
('mon', ('ಜನವರಿ', 'ಫೆಬ್ರವರಿ', 'ಮಾರ್ಚ', 'ಏಪ್ರಿಲ್', 'ಮೇ', 'ಜೂನ್', 'ಜುಲಾಯಿ', 'ಆಗಸ್ತು', 'ಸೆಪ್ಟೆಂಬರ', 'ಅಕ್ತೂಬರ', 'ನವೆಂಬರ', 'ದಶಂಬರ')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('ko_KR',
[('abday', ('일', '월', '화', '수', '목', '금', '토')),
('day', ('일요일', '월요일', '화요일', '수요일', '목요일', '금요일', '토요일')),
('abmon', (' 1월', ' 2월', ' 3월', ' 4월', ' 5월', ' 6월', ' 7월', ' 8월', ' 9월', '10월', '11월', '12월')),
('mon', ('1월', '2월', '3월', '4월', '5월', '6월', '7월', '8월', '9월', '10월', '11월', '12월')),
('d_t_fmt', '%x (%a) %r'),
('d_fmt', '%Y년 %m월 %d일'),
('t_fmt', '%H시 %M분 %S초'),
('t_fmt_ampm', '%p %I시 %M분 %S초'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY예]'),
('noexpr', '^[nN아]')]),
('kok_IN',
[('abday', ('आयतार', 'सोमार', 'मंगळवार', 'बुधवार', 'बेरेसतार', 'शुकरार', 'शेनवार')),
('day', ('आयतार', 'सोमार', 'मंगळवार', 'बुधवार', 'बेरेसतार', 'शुकरार', 'शेनवार')),
('abmon', ('जेनवरी', 'फेब्ररी', 'मारच', 'एप्रील', 'में', 'जुन', 'जुलै', 'ओगस्ट', 'सेपटेंबर', 'ओकटोबर', 'नोवेंबर', 'दिसेंबर')),
('mon', ('जेनवरी', 'फेब्ररी', 'मारच', 'एप्रील', 'में', 'जुन', 'जुलै', 'ओगस्ट', 'सेपटेंबर', 'ओकटोबर', 'नोवेंबर', 'दिसेंबर')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^(हय|[yY])'),
('noexpr', '^(न्ही|[nN])')]),
('ks_IN',
[('abday', ('آتهوار', 'ژءنتروار', 'بوءںوار', 'بودهوار', 'برىسوار', 'جمع', 'بٹوار')),
('day', ('آتهوار', 'ژءندروار', 'بوءںوار', 'بودهوار', 'برىسوار', 'جمع', 'بٹوار')),
('abmon', ('جنوری', 'فروری', 'مارچ', 'اپریل', 'مئ', 'جون', 'جُلئ', 'اگست', 'ستنبر', 'اکتوبر', 'نوںبر', 'دسنبر')),
('mon', ('جنوری', 'فروری', 'مارچ', 'اپریل', 'مئ', 'جون', 'جُلئ', 'اگست', 'ستنبر', 'اکتوبر', 'نوںبر', 'دسنبر')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[آyY].*'),
('noexpr', '^[نnN].*')]),
('ku_TR',
[('abday', ('yêk', 'dus', 'sês', 'çar', 'pên', 'înî', 'sep')),
('day', ('yêksêm', 'dusêm', 'sêsêm', 'çarsêm', 'pêncsêm', 'înî', 'sept')),
('abmon', ('Çil', 'Sib', 'Ada', 'Nîs', 'Gul', 'Hez', 'Tîr', 'Teb', 'Îlo', 'Cot', 'Mij', 'Kan')),
('mon', ('Çile', 'Sibat', 'Adar', 'Nîsan', 'Gulan', 'Hezîran', 'Tîrmeh', 'Tebax', 'Îlon', 'Cotmeh', 'Mijdar', 'Kanûn')),
('d_t_fmt', '%A %d %B %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[eEdDyY].*'),
('noexpr', '^[nN].*')]),
('kw_GB',
[('abday', ('Sul', 'Lun', 'Mth', 'Mhr', 'Yow', 'Gwe', 'Sad')),
('day', ('De Sul', 'De Lun', 'De Merth', 'De Merher', 'De Yow', 'De Gwener', 'De Sadorn')),
('abmon', ('Gen', 'Whe>', 'Mer', 'Ebr', 'Me', 'Evn', 'Gor', 'Est', 'Gwn', 'Hed', 'Du', 'Kev')),
('mon',
('Mys Genver',
'Mys Whevrel',
'Mys Merth',
'Mys Ebrel',
'Mys Me',
'Mys Evan',
'Mys Gortheren',
'Mye Est',
'Mys Gwyngala',
'Mys Hedra',
'Mys Du',
'Mys Kevardhu')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[eEyY].*'),
('noexpr', '^[nN].*')]),
('ky_KG',
[('abday', ('жк', 'дш', 'ше', 'ша', 'бш', 'жм', 'иш')),
('day', ('жекшемби', 'дүйшөмбү', 'шейшемби', 'шаршемби', 'бейшемби', 'жума', 'ишемби')),
('abmon', ('янв', 'фев', 'мар', 'апр', 'май', 'июн', 'июл', 'авг', 'сен', 'окт', 'ноя', 'дек')),
('mon', ('январь', 'февраль', 'март', 'апрель', 'май', 'июнь', 'июль', 'август', 'сентябрь', 'октябрь', 'ноябрь', 'декабрь')),
('d_t_fmt', '%a %d %b %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ' '),
('yesexpr', '^[ОоYy].*'),
('noexpr', '^[ЖжNn].*')]),
('lb_LU',
[('abday', ('So', 'Mé', 'Dë', 'Më', 'Do', 'Fr', 'Sa')),
('day', ('Sonndeg', 'Méindeg', 'Dënschdeg', 'Mëttwoch', 'Donneschdeg', 'Freideg', 'Samschdeg')),
('abmon', ('Jan', 'Feb', 'Mäe', 'Abr', 'Mee', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dez')),
('mon', ('Januar', 'Februar', 'Mäerz', 'Abrëll', 'Mee', 'Juni', 'Juli', 'August', 'September', 'Oktober', 'November', 'Dezember')),
('d_t_fmt', '%a %d. %b %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('lg_UG',
[('abday', ('Sab', 'Bal', 'Lw2', 'Lw3', 'Lw4', 'Lw5', 'Lw6')),
('day', ('Sabiiti', 'Balaza', 'Lwakubiri', 'Lwakusatu', 'Lwakuna', 'Lwakutaano', 'Lwamukaaga')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apu', 'Maa', 'Jun', 'Jul', 'Agu', 'Seb', 'Oki', 'Nov', 'Des')),
('mon', ('Janwaliyo', 'Febwaliyo', 'Marisi', 'Apuli', 'Maayi', 'Juuni', 'Julaai', 'Agusito', 'Sebuttemba', 'Okitobba', 'Novemba', 'Desemba')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('li_BE',
[('abday', ('zón', 'mao', 'dae', 'goo', 'dón', 'vri', 'z\x91o')),
('day', ('zóndig', 'maondig', 'daensdig', 'goonsdig', 'dónderdig', 'vriedig', 'zaoterdig')),
('abmon', ('jan', 'fib', 'mie', 'epr', 'mei', 'jun', 'jul', 'aug', 'sep', 'okt', 'nov', 'des')),
('mon', ('jannewarie', 'fibberwarie', 'miert', 'eprèl', 'meij', 'junie', 'julie', 'augustus', 'september', 'oktober', 'november', 'desember')),
('d_t_fmt', '%a %d. %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('li_NL',
[('abday', ('zón', 'mao', 'dae', 'goo', 'dón', 'vri', 'z\x91o')),
('day', ('zóndig', 'maondig', 'daensdig', 'goonsdig', 'dónderdig', 'vriedig', 'zaoterdig')),
('abmon', ('jan', 'fib', 'mie', 'epr', 'mei', 'jun', 'jul', 'aug', 'sep', 'okt', 'nov', 'des')),
('mon', ('jannewarie', 'fibberwarie', 'miert', 'eprèl', 'meij', 'junie', 'julie', 'augustus', 'september', 'oktober', 'november', 'desember')),
('d_t_fmt', '%a %d. %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('lij_IT',
[('abday', ('dom', 'lûn', 'mar', 'mer', 'zêu', 'ven', 'sab')),
('day', ('domenega', 'lûnedì', 'martedì', 'mercUrdì', 'zêggia', 'venardì', 'sabbo')),
('abmon', ('zen', 'fev', 'mar', 'arv', 'maz', 'zûg', 'lûg', 'ago', 'set', 'ött', 'nov', 'dix')),
('mon', ('zenâ', 'fevrâ', 'marzo', 'avrî', 'mazzo', 'zûgno', 'lûggio', 'agosto', 'settembre', 'ottobre', 'novembre', 'dixembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[sSyY].*'),
('noexpr', '^[nN].*')]),
('lo_LA',
[('abday', ('ອາ.', 'ຈ.', 'ຄ.', 'ພ.', 'ພຫ.', 'ສ.', 'ສ.')),
('day', ('ອາທິດ', 'ຈັນ', 'ອັງຄານ', 'ພຸດ', 'ພະຫັດ', 'ສຸກ', 'ເສົາ')),
('abmon', ('ມ.ກ.', 'ກ.ພ.', 'ມ.ນ.', 'ມ.ສ.', 'ພ.ພ.', 'ມິ.ຖ.', 'ກ.ລ.', 'ສ.ຫ.', 'ກ.ຍ.', 'ຕ.ລ.', 'ພ.ຈ.', 'ທ.ວ.')),
('mon', ('ມັງກອນ', 'ກຸມຟາ', 'ມີນາ', 'ເມສາ', 'ພຶດສະພາ', 'ມິຖຸນາ', 'ກໍລະກົດ', 'ສິງຫາ', 'ກັນຍາ', 'ຕຸລາ', 'ພະຈິກ', 'ທັນວາ')),
('d_t_fmt', '%a %e %b %Ey, %H:%M:%S'),
('d_fmt', '%d/%m/%Ey'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYມ]'),
('noexpr', '^[nNບ]')]),
('lt_LT',
[('abday', ('Sk', 'Pr', 'An', 'Tr', 'Kt', 'Pn', 'Št')),
('day', ('Sekmadienis', 'Pirmadienis', 'Antradienis', 'Trečiadienis', 'Ketvirtadienis', 'Penktadienis', 'Šeštadienis')),
('abmon', ('Sau', 'Vas', 'Kov', 'Bal', 'Geg', 'Bir', 'Lie', 'Rgp', 'Rgs', 'Spa', 'Lap', 'Grd')),
('mon', ('sausio', 'vasario', 'kovo', 'balandžio', 'gegužės', 'birželio', 'liepos', 'rugpjūčio', 'rugsėjo', 'spalio', 'lapkričio', 'gruodžio')),
('d_t_fmt', '%Y m. %B %d d. %T'),
('d_fmt', '%Y.%m.%d'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[TtYy].*'),
('noexpr', '^[Nn].*')]),
('lv_LV',
[('abday', ('Sv', 'P\xa0', 'O\xa0', 'T\xa0', 'C\xa0', 'Pk', 'S\xa0')),
('day', ('svētdiena', 'pirmdiena', 'otrdiena', 'trešdiena', 'ceturtdiena', 'piektdiena', 'sestdiena')),
('abmon', ('jan', 'feb', 'mar', 'apr', 'mai', 'jūn', 'jūl', 'aug', 'sep', 'okt', 'nov', 'dec')),
('mon', ('janvāris', 'februāris', 'marts', 'aprīlis', 'maijs', 'jūnijs', 'jūlijs', 'augusts', 'septembris', 'oktobris', 'novembris', 'decembris')),
('d_t_fmt', '%A, %Y. gada %e. %B, plkst. %H un %M'),
('d_fmt', '%Y.%m.%d.'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^[JjYy].*'),
('noexpr', '^[Nn].*')]),
('lzh_TW',
[('abday', ('日', '一', '二', '三', '四', '五', '六')),
('day', ('週日', '週一', '週二', '週三', '週四', '週五', '週六')),
('abmon', (' 一 ', ' 二 ', ' 三 ', ' 四 ', ' 五 ', ' 六 ', ' 七 ', ' 八 ', ' 九 ', ' 十 ', '十一', '十二')),
('mon', ('一月', '二月', '三月', '四月', '五月', '六月', '七月', '八月', '九月', '十月', '十一月', '十二月')),
('d_t_fmt', '%OC%Oy年%B%Od日 (%A) %OH時%OM分%OS秒'),
('d_fmt', '%OC%Oy年%B%Od日'),
('t_fmt', '%OH時%OM分%OS秒'),
('t_fmt_ampm', '%p %OI時%OM分%OS秒'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY是]'),
('noexpr', '^[nN非]')]),
('mag_IN',
[('abday', ('एतवार ', 'सोमार ', 'मंगर ', 'बुध ', 'बिफे ', 'सूक ', 'सनिचर ')),
('day', ('एतवार ', 'सोमार ', 'मंगर ', 'बुध ', 'बिफे ', 'सूक ', 'सनिचर ')),
('abmon', ('जनवरी', 'फ़रवरी', 'मार्च', 'अप्रेल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('mon', ('जनवरी', 'फ़रवरी', 'मार्च', 'अप्रेल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('mai_IN',
[('abday', ('रवि ', 'सोम ', 'मंगल ', 'बुध ', 'गुरु ', 'शुक्र ', 'शनि ')),
('day', ('रविवार ', 'सोमवार ', 'मंगलवार ', 'बुधवार ', 'गुरुवार ', 'शुक्रवार ', 'शनिवार ')),
('abmon', ('जनवरी', 'फ़रवरी', 'मार्च', 'अप्रेल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('mon', ('जनवरी', 'फ़रवरी', 'मार्च', 'अप्रेल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('mg_MG',
[('abday', ('lhd', 'lts', 'tlt', 'lrb', 'lkm', 'zom', 'sab')),
('day', ('alahady', 'alatsinainy', 'talata', 'alarobia', 'alakamisy', 'zoma', 'sabotsy')),
('abmon', ('jan', 'feb', 'mar', 'apr', 'mey', 'jon', 'jol', 'aog', 'sep', 'okt', 'nov', 'des')),
('mon', ('janoary', 'febroary', 'martsa', 'aprily', 'mey', 'jona', 'jolay', 'aogositra', 'septambra', 'oktobra', 'novambra', 'desambra')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[eEyY].*'),
('noexpr', '^[tTnN].*')]),
('mhr_RU',
[('abday', ('Ршр', 'Шчм', 'Кжм', 'Вгч', 'Изр', 'Кгр', 'Шмт')),
('day', ('Рушарня', 'Шочмо', 'Кушкыжмо', 'Вӱргече', 'Изарня', 'Кугарня', 'Шуматкече')),
('abmon', ('Шрк', 'Пгж', 'Ӱрн', 'Вшр', 'Ага', 'Пдш', 'Срм', 'Срл', 'Идм', 'Шыж', 'Клм', 'Тел')),
('mon', ('Шорыкйол', 'Пургыж', 'Ӱярня', 'Вӱдшор', 'Ага', 'Пеледыш', 'Сӱрем', 'Сорла', 'Идым', 'Шыжа', 'Кылме', 'Теле')),
('d_t_fmt', '%A %Y %B %d %T'),
('d_fmt', '%Y.%m.%d'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^[ТтYy].*'),
('noexpr', '^[УуNn].*')]),
('mi_NZ',
[('abday', ('Ta', 'Ma', 'Tū', 'We', 'Tāi', 'Pa', 'Hā')),
('day', ('Rātapu', 'Mane', 'Tūrei', 'Wenerei', 'Tāite', 'Paraire', 'Hātarei')),
('abmon', ('Kohi', 'Hui', 'Pou', 'Pae', 'Hara', 'Pipi', 'Hōngoi', 'Here', 'Mahu', 'Whi-nu', 'Whi-ra', 'Haki')),
('mon',
('Kohi-tātea',
'Hui-tanguru',
'Poutū-te-rangi',
'Paenga-whāwhā',
'Haratua',
'Pipiri',
'Hōngoingoi',
'Here-turi-kōkā',
'Mahuru',
'Whiringa-ā-nuku',
'Whiringa-ā-rangi',
'Hakihea')),
('d_t_fmt', 'Te %A, te %d o %B, %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[1yYāĀäÄaA].*'),
('noexpr', '^[0nNkK].*')]),
('mk_MK',
[('abday', ('нед', 'пон', 'вто', 'сре', 'чет', 'пет', 'саб')),
('day', ('недела', 'понеделник', 'вторник', 'среда', 'четврток', 'петок', 'сабота')),
('abmon', ('јан', 'фев', 'мар', 'апр', 'мај', 'јун', 'јул', 'авг', 'сеп', 'окт', 'ное', 'дек')),
('mon', ('јануари', 'февруари', 'март', 'април', 'мај', 'јуни', 'јули', 'август', 'септември', 'октомври', 'ноември', 'декември')),
('d_t_fmt', '%a, %d %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ' '),
('yesexpr', '^[ДдDdYy1].*'),
('noexpr', '^[НнNn0].*')]),
('ml_IN',
[('abday', ('ഞാ', 'തി', 'ചൊ', 'ബു', 'വ്യാ', 'വെ', 'ശ')),
('day', ('ഞായര്\u200d', 'തിങ്കള്\u200d', 'ചൊവ്വ', 'ബുധന്\u200d', 'വ്യാഴം', 'വെള്ളി', 'ശനി')),
('abmon', ('ജനു', 'ഫെബ്', 'മാര്\u200d', 'ഏപ്ര', 'മെ', 'ജൂണ്\u200d', 'ജൂലൈ', 'ആഗ്', 'സെപ്', 'ഒക്ടോ', 'നവം', 'ഡിസം')),
('mon',
('ജനുവരി',
'ഫെബ്രുവരി',
'മാര്\u200dച്ച്',
'ഏപ്രില്\u200d ',
'മെയ്',
'ജൂണ്\u200d',
'ജൂലൈ',
'ആഗസ്റ്റ്',
'സെപ്റ്റംബര്\u200d',
'ഒക്ടോബര്\u200d',
'നവംബര്\u200d',
'ഡിസംബര്\u200d')),
('d_t_fmt', '%A %d %B %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %B %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[അതെyY]'),
('noexpr', '^[അല്ലnN]')]),
('mn_MN',
[('abday', ('Ня', 'Да', 'Мя', 'Лх', 'Пү', 'Ба', 'Бя')),
('day', ('Ням', 'Даваа', 'Мягмар', 'Лхагва', 'Пүрэв', 'Баасан', 'Бямба')),
('abmon', ('Хул', 'Үхэ', 'Бар', 'Туу', 'Луу', 'Мог', 'Мор', 'Хон', 'Бич', 'Тах', 'Нох', 'Гах')),
('mon',
('Хулгана сарын',
'Үхэр сарын',
'Бар сарын',
'Туулай сарын',
'Луу сарын',
'Могой сарын',
'Морь сарын',
'Хонь сарын',
'Бич сарын',
'Тахиа сарын',
'Нохой сарын',
'Гахай сарын')),
('d_t_fmt', '%Y %b %d, %a %T'),
('d_fmt', '%Y.%m.%d'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[тТyY].*'),
('noexpr', '^[үҮnN].*')]),
('mni_IN',
[('abday', ('নোং', 'নিং', 'লৈবাক', 'য়ুম', 'শগোল', 'ইরা', 'থাং')),
('day', ('নোংমাইজিং', 'নিংথৌকাবা', 'লৈবাকপোকপা', 'য়ুমশকৈশা', 'শগোলশেন', 'ইরাই', 'থাংজ')),
('abmon', ('জান', 'ফেব', 'মার', 'এপ্রি', 'মে', 'জুন', 'জুল', 'আগ', 'সেপ', 'ওক্ত', 'নবে', 'ডিস')),
('mon', ('জানুৱারি', 'ফেব্রুৱারি', 'মার্চ', 'এপ্রিল', 'মে', 'জুন', 'জুলাই', 'আগষ্ট', 'সেপ্তেম্বর', 'ওক্তোবর', 'নবেম্বর', 'ডিসেম্বর')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('mr_IN',
[('abday', ('रवि', 'सोम', 'मंगळ', 'बुध', 'गुरु', 'शुक्र', 'शनि')),
('day', ('रविवार', 'सोमवार', 'मंगळवार', 'बुधवार', 'गुरुवार', 'शुक्रवार', 'शनिवार')),
('abmon', ('जानेवारी', 'फेबृवारी', 'मार्च', 'एप्रिल', 'मे', 'जून', 'जुलै', 'ऑगस्ट', 'सप्टेंबर', 'ऑक्टोबर', 'नोव्हेंबर', 'डिसेंबर')),
('mon', ('जानेवारी', 'फेबृवारी', 'मार्च', 'एप्रिल', 'मे', 'जून', 'जुलै', 'ऑगस्ट', 'सप्टेंबर', 'ऑक्टोबर', 'नोव्हेंबर', 'डिसेंबर')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^(Yes|[yY])'),
('noexpr', '^(No|[nN])')]),
('ms_MY',
[('abday', ('Ahd', 'Isn', 'Sel', 'Rab', 'Kha', 'Jum', 'Sab')),
('day', ('Ahad', 'Isnin', 'Selasa', 'Rabu', 'Khamis', 'Jumaat', 'Sabtu')),
('abmon', ('Jan', 'Feb', 'Mac', 'Apr', 'Mei', 'Jun', 'Jul', 'Ogos', 'Sep', 'Okt', 'Nov', 'Dis')),
('mon', ('Januari', 'Februari', 'Mac', 'April', 'Mei', 'Jun', 'Julai', 'Ogos', 'September', 'Oktober', 'November', 'Disember')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[tT]')]),
('mt_MT',
[('abday', ('Ħad', 'Tne', 'Tli', 'Erb', 'Ħam', 'Ġim', 'Sib')),
('day', ('il-Ħadd', 'it-Tnejn', 'it-Tlieta', 'l-Erbgħa', 'il-Ħamis', 'il-Ġimgħa', 'is-Sibt')),
('abmon', ('Jan', 'Fra', 'Mar', 'Apr', 'Mej', 'Ġun', 'Lul', 'Aww', 'Set', 'Ott', 'Nov', 'Diċ')),
('mon', ('Jannar', 'Frar', 'Marzu', 'April', 'Mejju', 'Ġunju', 'Lulju', 'Awwissu', 'Settembru', 'Ottubru', 'Novembru', 'Diċembru ')),
('d_t_fmt', '%A, %d ta %b, %Y %I:%M:%S %p %Z'),
('d_fmt', '%A, %d ta %b, %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^(Yes|[yY])'),
('noexpr', '^(No|[nN])')]),
('my_MM',
[('abday', ('နွေ', 'လာ', 'ဂါ', 'ဟူး', 'တေး', 'သော', 'နေ')),
('day', ('တနင်္ဂနွေ', 'တနင်္လာ', 'အင်္ဂါ', 'ဗုဒ္ဓဟူး', 'ကြာသပတေး', 'သောကြာ', 'စနေ')),
('abmon', ('ဇန်', 'ဖေ', 'မတ်', 'ဧပြီ', 'မေ', 'ဇွန်', 'ဇူ', 'ဩ', 'စက်', 'အောက်', 'နို', 'ဒီ')),
('mon', ('ဇန်နဝါရီ', 'ဖေဖော်ဝါရီ', 'မတ်', 'ဧပြီ', 'မေ', 'ဇွန်', 'ဇူလိုင်', 'ဩဂုတ်', 'စက်တင်ဘာ', 'အောက်တိုဘာ', 'နိုဝင်ဘာ', 'ဒီဇင်ဘာ')),
('d_t_fmt', '%OC%Oy %b %Od %A %OI:%OM:%OS %Op %Z'),
('d_fmt', '%OC%Oy %b %Od %A'),
('t_fmt', '%OI:%OM:%OS %p'),
('t_fmt_ampm', '%OI:%OM:%OS %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYဟ].*'),
('noexpr', '^[nNမ].*')]),
('nan_TW',
[('abday', ('日', '一', '二', '三', '四', '五', '六')),
('day', ('禮拜日', '禮拜一', '禮拜二', '禮拜三', '禮拜四', '禮拜五', '禮拜六')),
('abmon', (' 1月', ' 2月', ' 3月', ' 4月', ' 5月', ' 6月', ' 7月', ' 8月', ' 9月', '10月', '11月', '12月')),
('mon', ('一月', '二月', '三月', '四月', '五月', '六月', '七月', '八月', '九月', '十月', '十一月', '十二月')),
('d_t_fmt', '%Y年%m月%d日 (%A) %H點%M分%S秒'),
('d_fmt', '%Y年%m月%d日'),
('t_fmt', '%H點%M分%S秒'),
('t_fmt_ampm', '%p %I點%M分%S秒'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY是]'),
('noexpr', '^[nN伓]')]),
('nb_NO',
[('abday', ('sø.', 'ma.', 'ti.', 'on.', 'to.', 'fr.', 'lø.')),
('day', ('søndag', 'mandag', 'tirsdag', 'onsdag', 'torsdag', 'fredag', 'lørdag')),
('abmon', ('jan.', 'feb.', 'mars', 'april', 'mai', 'juni', 'juli', 'aug.', 'sep.', 'okt.', 'nov.', 'des.')),
('mon', ('januar', 'februar', 'mars', 'april', 'mai', 'juni', 'juli', 'august', 'september', 'oktober', 'november', 'desember')),
('d_t_fmt', '%a %d. %b %Y kl. %H.%M %z'),
('d_fmt', '%d. %b %Y'),
('t_fmt', 'kl. %H.%M %z'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^[JjYy].*'),
('noexpr', '^[Nn].*')]),
('nds_DE',
[('abday', ('Sdag', 'Maan', 'Ding', 'Migg', 'Dunn', 'Free', 'Svd.')),
('day', ('Sünndag', 'Maandag', 'Dingsdag', 'Middeweek', 'Dunnersdag', 'Freedag', 'Sünnavend')),
('abmon', ('Jan', 'Feb', 'Mär', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dez')),
('mon', ('Jannuaar', 'Feberwaar', 'März', 'April', 'Mai', 'Juni', 'Juli', 'August', 'September', 'Oktober', 'November', 'Dezember')),
('d_t_fmt', '%a %d. %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('nds_NL',
[('abday', ('Sdg', 'Mdg', 'Dsg', 'Mwk', 'Ddg', 'Fdg', 'Swd')),
('day', ('Sinndag', 'Mondag', 'Dingsdag', 'Meddwäakj', 'Donnadag', 'Friedag', 'Sinnowend')),
('abmon', ('Jan', 'Feb', 'Moz', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Now', 'Dez')),
('mon', ('Jaunuwoa', 'Februwoa', 'Moaz', 'Aprell', 'Mai', 'Juni', 'Juli', 'August', 'Septamba', 'Oktoba', 'Nowamba', 'Dezamba')),
('d_t_fmt', '%a %d. %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('ne_NP',
[('abday', ('आइत ', 'सोम ', 'मंगल ', 'बुध ', 'बिहि ', 'शुक्र ', 'शनि ')),
('day', ('आइतबार ', 'सोमबार ', 'मंगलबार ', 'बुधबार ', 'बिहिबार ', 'शुक्रबार ', 'शनिबार ')),
('abmon', ('जनवरी', 'फ़रवरी', 'मार्च', 'अप्रेल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('mon', ('जनवरी', 'फ़रवरी', 'मार्च', 'अप्रेल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('nhn_MX',
[('abday', ('teo', 'cei', 'ome', 'yei', 'nau', 'mac', 'chi')),
('day', ('teoilhuitl', 'ceilhuitl', 'omeilhuitl', 'yeilhuitl', 'nahuilhuitl', 'macuililhuitl', 'chicuaceilhuitl')),
('abmon', ('ene', 'feb', 'mar', 'abr', 'may', 'jun', 'jul', 'ago', 'sep', 'oct', 'nov', 'dic')),
('mon', ('enero', 'febrero', 'marzo', 'abril', 'mayo', 'junio', 'julio', 'agosto', 'septiembre', 'octubre', 'noviembre', 'diciembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', '\u2009'),
('yesexpr', '^[sSqQyY].*'),
('noexpr', '^[nNaA].*')]),
('niu_NU',
[('abday', ('Tapu', 'Gofua', 'Ua', 'Lotu', 'Tuloto', 'Falaile', 'Faiumu')),
('day', ('Aho Tapu', 'Aho Gofua', 'Aho Ua', 'Aho Lotu', 'Aho Tuloto', 'Aho Falaile', 'Aho Faiumu')),
('abmon', ('Ian', 'Fep', 'Mas', 'Ape', 'Me', 'Iun', 'Iul', 'Aok', 'Sep', 'Oke', 'Nov', 'Tes')),
('mon', ('Ianuali', 'Fepuali', 'Masi', 'Apelila', 'Me', 'Iuni', 'Iulai', 'Aokuso', 'Sepetema', 'Oketopa', 'Novema', 'Tesemo')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[ēĒyY].*'),
('noexpr', '^[nN].*')]),
('niu_NZ',
[('abday', ('Tapu', 'Gofua', 'Ua', 'Lotu', 'Tuloto', 'Falaile', 'Faiumu')),
('day', ('Aho Tapu', 'Aho Gofua', 'Aho Ua', 'Aho Lotu', 'Aho Tuloto', 'Aho Falaile', 'Aho Faiumu')),
('abmon', ('Ian', 'Fep', 'Mas', 'Ape', 'Me', 'Iun', 'Iul', 'Aok', 'Sep', 'Oke', 'Nov', 'Tes')),
('mon', ('Ianuali', 'Fepuali', 'Masi', 'Apelila', 'Me', 'Iuni', 'Iulai', 'Aokuso', 'Sepetema', 'Oketopa', 'Novema', 'Tesemo')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[ēĒyY].*'),
('noexpr', '^[nN].*')]),
('nl_AW',
[('abday', ('zo', 'ma', 'di', 'wo', 'do', 'vr', 'za')),
('day', ('zondag', 'maandag', 'dinsdag', 'woensdag', 'donderdag', 'vrijdag', 'zaterdag')),
('abmon', ('jan', 'feb', 'mrt', 'apr', 'mei', 'jun', 'jul', 'aug', 'sep', 'okt', 'nov', 'dec')),
('mon', ('januari', 'februari', 'maart', 'april', 'mei', 'juni', 'juli', 'augustus', 'september', 'oktober', 'november', 'december')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d-%m-%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('nl_BE',
[('abday', ('zo', 'ma', 'di', 'wo', 'do', 'vr', 'za')),
('day', ('zondag', 'maandag', 'dinsdag', 'woensdag', 'donderdag', 'vrijdag', 'zaterdag')),
('abmon', ('jan', 'feb', 'mrt', 'apr', 'mei', 'jun', 'jul', 'aug', 'sep', 'okt', 'nov', 'dec')),
('mon', ('januari', 'februari', 'maart', 'april', 'mei', 'juni', 'juli', 'augustus', 'september', 'oktober', 'november', 'december')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d-%m-%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('nl_NL',
[('abday', ('zo', 'ma', 'di', 'wo', 'do', 'vr', 'za')),
('day', ('zondag', 'maandag', 'dinsdag', 'woensdag', 'donderdag', 'vrijdag', 'zaterdag')),
('abmon', ('jan', 'feb', 'mrt', 'apr', 'mei', 'jun', 'jul', 'aug', 'sep', 'okt', 'nov', 'dec')),
('mon', ('januari', 'februari', 'maart', 'april', 'mei', 'juni', 'juli', 'augustus', 'september', 'oktober', 'november', 'december')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d-%m-%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('nn_NO',
[('abday', ('su.', 'må.', 'ty.', 'on.', 'to.', 'fr.', 'la.')),
('day', ('sundag ', 'måndag ', 'tysdag ', 'onsdag ', 'torsdag ', 'fredag ', 'laurdag ')),
('abmon', ('jan.', 'feb.', 'mars', 'april', 'mai', 'juni', 'juli', 'aug.', 'sep.', 'okt.', 'nov.', 'des.')),
('mon', ('januar', 'februar', 'mars', 'april', 'mai', 'juni', 'juli', 'august', 'september', 'oktober', 'november', 'desember')),
('d_t_fmt', '%a %d. %b %Y kl. %H.%M %z'),
('d_fmt', '%d. %b %Y'),
('t_fmt', 'kl. %H.%M %z'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^[JjYy].*'),
('noexpr', '^[Nn].*')]),
('nr_ZA',
[('abday', ('Son', 'Mvu', 'Bil', 'Tha', 'Ne', 'Hla', 'Gqi')),
('day', ('uSonto', 'uMvulo', 'uLesibili', 'lesithathu', 'uLesine', 'ngoLesihlanu', 'umGqibelo')),
('abmon', ('Jan', 'Feb', 'Mat', 'Apr', 'Mey', 'Jun', 'Jul', 'Arh', 'Sep', 'Okt', 'Usi', 'Dis')),
('mon', ('Janabari', 'uFeberbari', 'uMatjhi', 'u-Apreli', 'Meyi', 'Juni', 'Julayi', 'Arhostosi', 'Septemba', 'Oktoba', 'Usinyikhaba', 'Disemba')),
('d_t_fmt', '%a %-e %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('nso_ZA',
[('abday', ('Son', 'Moš', 'Bed', 'Rar', 'Ne', 'Hla', 'Mok')),
('day', ('LaMorena', 'Mošupologo', 'Labobedi', 'Laboraro', 'Labone', 'Labohlano', 'Mokibelo')),
('abmon', ('Jan', 'Feb', 'Mat', 'Apr', 'Mei', 'Jun', 'Jul', 'Ago', 'Set', 'Okt', 'Nof', 'Dis')),
('mon', ('Janaware', 'Febereware', 'Matšhe', 'Aprele', 'Mei', 'June', 'Julae', 'Agostose', 'Setemere', 'Oktobere', 'Nofemere', 'Disemere')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYeE]'),
('noexpr', '^[nNaA]')]),
('oc_FR',
[('abday', ('dim', 'lun', 'mar', 'mec', 'jòu', 'ven', 'sab')),
('day', ('dimenge', 'diluns', 'dimars', 'dimecres', 'dijóus', 'divendres', 'disabte')),
('abmon', ('gen', 'feb', 'mar', 'abr', 'mai', 'jun', 'jul', 'ago', 'set', 'oct', 'nov', 'dec')),
('mon', ('genièr', 'febrièr', 'març', 'abrial', 'mai', 'junh', 'julhet', 'agost', 'setembre', 'octobre', 'novembre', 'decembre')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ' '),
('yesexpr', '^[oOsSyY].*'),
('noexpr', '^[nN].*')]),
('om_ET',
[('abday', ('Dil', 'Wix', 'Qib', 'Rob', 'Kam', 'Jim', 'San')),
('day', ('Dilbata', 'Wiixata', 'Qibxata', 'Roobii', 'Kamiisa', 'Jimaata', 'Sanbata')),
('abmon', ('Ama', 'Gur', 'Bit', 'Elb', 'Cam', 'Wax', 'Ado', 'Hag', 'Ful', 'Onk', 'Sad', 'Mud')),
('mon',
('Amajjii', 'Guraandhala', 'Bitooteessa', 'Elba', 'Caamsa', 'Waxabajjii', 'Adooleessa', 'Hagayya', 'Fuulbana', 'Onkololeessa', 'Sadaasa', 'Muddee')),
('d_t_fmt', '%A, %B %e, %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('om_KE',
[('abday', ('Dil', 'Wix', 'Qib', 'Rob', 'Kam', 'Jim', 'San')),
('day', ('Dilbata', 'Wiixata', 'Qibxata', 'Roobii', 'Kamiisa', 'Jimaata', 'Sanbata')),
('abmon', ('Ama', 'Gur', 'Bit', 'Elb', 'Cam', 'Wax', 'Ado', 'Hag', 'Ful', 'Onk', 'Sad', 'Mud')),
('mon',
('Amajjii', 'Guraandhala', 'Bitooteessa', 'Elba', 'Caamsa', 'Waxabajjii', 'Adooleessa', 'Hagayya', 'Fuulbana', 'Onkololeessa', 'Sadaasa', 'Muddee')),
('d_t_fmt', '%A, %B %e, %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('or_IN',
[('abday', ('ରବି', 'ସୋମ', 'ମଙ୍ଗଳ', 'ବୁଧ', 'ଗୁରୁ', 'ଶୁକ୍ର', 'ଶନି')),
('day', ('ରବିବାର', 'ସୋମବାର', 'ମଙ୍ଗଳବାର', 'ବୁଧବାର', 'ଗୁରୁବାର', 'ଶୁକ୍ରବାର', 'ଶନିବାର')),
('abmon', ('1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12')),
('mon', ('ଜାନୁଆରୀ', 'ଫେବୃଆରୀ', 'ମାର୍ଚ୍ଚ', 'ଅପ୍ରେଲ', 'ମଇ', 'ଜୁନ', 'ଜୁଲାଇ', 'ଅଗଷ୍ଟ', 'ସେପ୍ଟେମ୍ବର', 'ଅକ୍ଟୋବର', 'ନଭେମ୍ବର', 'ଡିସେମ୍ବର')),
('d_t_fmt', '%Oe %B %Oy %OI:%OM:%OS %p %Z'),
('d_fmt', '%Od-%Om-%Oy'),
('t_fmt', '%OI:%OM:%OS %p'),
('t_fmt_ampm', '%OI:%OM:%OS %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('os_RU',
[('abday', ('Хцб', 'Крс', 'Дцг', 'Æрт', 'Цпр', 'Мрб', 'Сбт')),
('day', ('Хуыцаубон', 'Къуырисæр', 'Дыццæг', 'Æртыццæг', 'Цыппæрæм', 'Майрæмбон', 'Сабат')),
('abmon', ('Янв', 'Фев', 'Мар', 'Апр', 'Май', 'Июн', 'Июл', 'Авг', 'Сен', 'Окт', 'Ноя', 'Дек')),
('mon', ('Январь', 'Февраль', 'Мартъи', 'Апрель', 'Май', 'Июнь', 'Июль', 'Август', 'Сентябрь', 'Октябрь', 'Ноябрь', 'Декабрь')),
('d_t_fmt', '%a %d %b %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^[УдYy].*'),
('noexpr', '^[унNn].*')]),
('pa_IN',
[('abday', ('ਐਤ ', 'ਸੋਮ ', 'ਮੰਗਲ ', 'ਬੁੱਧ ', 'ਵੀਰ ', 'ਸ਼ੁੱਕਰ ', 'ਸ਼ਨਿੱਚਰ ')),
('day', ('ਐਤਵਾਰ ', 'ਸੋਮਵਾਰ ', 'ਮੰਗਲਵਾਰ ', 'ਬੁੱਧਵਾਰ ', 'ਵੀਰਵਾਰ ', 'ਸ਼ੁੱਕਰਵਾਰ ', 'ਸ਼ਨਿੱਚਰਵਾਰ ')),
('abmon', ('ਜਨਵਰੀ', 'ਫ਼ਰਵਰੀ', 'ਮਾਰਚ', 'ਅਪਰੈਲ', 'ਮਈ', 'ਜੂਨ', 'ਜੁਲਾਈ', 'ਅਗਸਤ', 'ਸਤੰਬਰ', 'ਅਕਤੂਬਰ', 'ਨਵੰਬਰ', 'ਦਸੰਬਰ')),
('mon', ('ਜਨਵਰੀ', 'ਫ਼ਰਵਰੀ', 'ਮਾਰਚ', 'ਅਪਰੈਲ', 'ਮਈ', 'ਜੂਨ', 'ਜੁਲਾਈ', 'ਅਗਸਤ', 'ਸਤੰਬਰ', 'ਅਕਤੂਬਰ', 'ਨਵੰਬਰ', 'ਦਸੰਬਰ')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('pa_PK',
[('abday', ('اتوار', 'پير', 'منگل', 'بدھ', 'جمعرات', 'جمعه', 'هفته')),
('day', ('اتوار', 'پير', 'منگل', 'بدھ', 'جمعرات', 'جمعه', 'هفته')),
('abmon', ('جنوري', 'فروري', 'مارچ', 'اپريل', 'مٓی', 'جون', 'جولاي', 'اگست', 'ستمبر', 'اكتوبر', 'نومبر', 'دسمبر')),
('mon', ('جنوري', 'فروري', 'مارچ', 'اپريل', 'مٓی', 'جون', 'جولاي', 'اگست', 'ستمبر', 'اكتوبر', 'نومبر', 'دسمبر')),
('d_t_fmt', 'و %H:%M:%S %Z ت %d %B %Y'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '%P %I:%M:%S'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYهبf].*'),
('noexpr', '^[nNنo].*')]),
('pap_AN',
[('abday', ('do', 'lu', 'ma', 'we', 'ra', 'bi', 'sa')),
('day', ('Djadomingo', 'Djaluna', 'Djamars', 'Djawebs', 'Djarason', 'Djabierne', 'Djasabra')),
('abmon', ('Yan', 'Feb', 'Mar', 'Apr', 'Mei', 'Yün', 'Yül', 'Oug', 'Sèp', 'Okt', 'Nov', 'Des')),
('mon', ('Yanüari', 'Febrüari', 'Mart', 'Aprel', 'Mei', 'Yüni', 'Yüli', 'Ougùstùs', 'Sèptèmber', 'Oktober', 'Novèmber', 'Desèmber')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d-%m-%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('pap_AW',
[('abday', ('do', 'lu', 'ma', 'we', 'ra', 'bi', 'sa')),
('day', ('Djadomingo', 'Djaluna', 'Djamars', 'Djawebs', 'Djarason', 'Djabierne', 'Djasabra')),
('abmon', ('Yan', 'Feb', 'Mar', 'Apr', 'Mei', 'Yün', 'Yül', 'Oug', 'Sèp', 'Okt', 'Nov', 'Des')),
('mon', ('Yanüari', 'Febrüari', 'Mart', 'Aprel', 'Mei', 'Yüni', 'Yüli', 'Ougùstùs', 'Sèptèmber', 'Oktober', 'Novèmber', 'Desèmber')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d-%m-%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('pap_CW',
[('abday', ('do', 'lu', 'ma', 'we', 'ra', 'bi', 'sa')),
('day', ('Djadomingo', 'Djaluna', 'Djamars', 'Djawebs', 'Djarason', 'Djabierne', 'Djasabra')),
('abmon', ('Yan', 'Feb', 'Mar', 'Apr', 'Mei', 'Yün', 'Yül', 'Oug', 'Sèp', 'Okt', 'Nov', 'Des')),
('mon', ('Yanüari', 'Febrüari', 'Mart', 'Aprel', 'Mei', 'Yüni', 'Yüli', 'Ougùstùs', 'Sèptèmber', 'Oktober', 'Novèmber', 'Desèmber')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d-%m-%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('pl_PL',
[('abday', ('nie', 'pon', 'wto', 'śro', 'czw', 'pią', 'sob')),
('day', ('niedziela', 'poniedziałek', 'wtorek', 'środa', 'czwartek', 'piątek', 'sobota')),
('abmon', ('sty', 'lut', 'mar', 'kwi', 'maj', 'cze', 'lip', 'sie', 'wrz', 'paź', 'lis', 'gru')),
('mon', ('styczeń', 'luty', 'marzec', 'kwiecień', 'maj', 'czerwiec', 'lipiec', 'sierpień', 'wrzesień', 'październik', 'listopad', 'grudzień')),
('d_t_fmt', '%a, %-d %b %Y, %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[TtYy].*'),
('noexpr', '^[nN].*')]),
('ps_AF',
[('abday', ('ی.', 'د.', 'س.', 'چ.', 'پ.', 'ج.', 'ش.')),
('day', ('یکشنبه', 'دوشنبه', 'سه\u200cشنبه', 'چارشنبه', 'پنجشنبه', 'جمعه', 'شنبه')),
('abmon', ('جنو', 'فبر', 'مار', 'اپر', 'مـې', 'جون', 'جول', 'اګس', 'سپت', 'اکت', 'نوم', 'دسم')),
('mon', ('جنوري', 'فبروري', 'مارچ', 'اپریل', 'مې', 'جون', 'جولاي', 'اګست', 'سپتمبر', 'اکتوبر', 'نومبر', 'دسمبر')),
('d_t_fmt', '%A د %Y د %B %e، %Z %H:%M:%S'),
('d_fmt', 'د %Y د %B %e'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '\u202b%I:%M:%S %p\u202c'),
('radixchar', '٫'),
('thousep', '٬'),
('yesexpr', '^[yYبf].*'),
('noexpr', '^[nNخنo].*')]),
('pt_BR',
[('abday', ('Dom', 'Seg', 'Ter', 'Qua', 'Qui', 'Sex', 'Sáb')),
('day', ('domingo', 'segunda', 'terça', 'quarta', 'quinta', 'sexta', 'sábado')),
('abmon', ('Jan', 'Fev', 'Mar', 'Abr', 'Mai', 'Jun', 'Jul', 'Ago', 'Set', 'Out', 'Nov', 'Dez')),
('mon', ('janeiro', 'fevereiro', 'março', 'abril', 'maio', 'junho', 'julho', 'agosto', 'setembro', 'outubro', 'novembro', 'dezembro')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d-%m-%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[SsyY].*'),
('noexpr', '^[nN].*')]),
('pt_PT',
[('abday', ('Dom', 'Seg', 'Ter', 'Qua', 'Qui', 'Sex', 'Sáb')),
('day', ('Domingo', 'Segunda', 'Terça', 'Quarta', 'Quinta', 'Sexta', 'Sábado')),
('abmon', ('Jan', 'Fev', 'Mar', 'Abr', 'Mai', 'Jun', 'Jul', 'Ago', 'Set', 'Out', 'Nov', 'Dez')),
('mon', ('Janeiro', 'Fevereiro', 'Março', 'Abril', 'Maio', 'Junho', 'Julho', 'Agosto', 'Setembro', 'Outubro', 'Novembro', 'Dezembro')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d-%m-%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[SsyY].*'),
('noexpr', '^[nN].*')]),
('quz_PE',
[('abday', ('tum', 'lun', 'mar', 'miy', 'juy', 'wiy', 'saw')),
('day', ('tuminku', 'lunis', 'martis', 'miyirkulis', 'juywis', 'wiyirnis', 'sawatu')),
('abmon', ('ini', 'phi', 'mar', 'awr', 'may', 'hun', 'hul', 'agu', 'sip', 'ukt', 'nuw', 'tis')),
('mon', ('iniru', 'phiwriru', 'marsu', 'awril', 'mayu', 'huniyu', 'huliyu', 'agustu', 'siptiyimri', 'uktuwri', 'nuwiyimri', 'tisiyimri')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%I:%M:%S %p'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[aAsSyY].*'),
('noexpr', '^[mMnN].*')]),
('ro_RO',
[('abday', ('Du', 'Lu', 'Ma', 'Mi', 'Jo', 'Vi', 'Sb')),
('day', ('duminică', 'luni', 'marţi', 'miercuri', 'joi', 'vineri', 'sâmbătă')),
('abmon', ('ian', 'feb', 'mar', 'apr', 'mai', 'iun', 'iul', 'aug', 'sep', 'oct', 'nov', 'dec')),
('mon', ('ianuarie', 'februarie', 'martie', 'aprilie', 'mai', 'iunie', 'iulie', 'august', 'septembrie', 'octombrie', 'noiembrie', 'decembrie')),
('d_t_fmt', '%a %d %b %Y %T %z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[DdYy].*'),
('noexpr', '^[nN].*')]),
('ru_RU',
[('abday', ('Вс', 'Пн', 'Вт', 'Ср', 'Чт', 'Пт', 'Сб')),
('day', ('Воскресенье', 'Понедельник', 'Вторник', 'Среда', 'Четверг', 'Пятница', 'Суббота')),
('abmon', ('янв', 'фев', 'мар', 'апр', 'май', 'июн', 'июл', 'авг', 'сен', 'окт', 'ноя', 'дек')),
('mon', ('Январь', 'Февраль', 'Март', 'Апрель', 'Май', 'Июнь', 'Июль', 'Август', 'Сентябрь', 'Октябрь', 'Ноябрь', 'Декабрь')),
('d_t_fmt', '%a %d %b %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^[ДдYy].*'),
('noexpr', '^[НнNn].*')]),
('ru_UA',
[('abday', ('Вск', 'Пнд', 'Вто', 'Срд', 'Чтв', 'Птн', 'Суб')),
('day', ('Воскресенье', 'Понедельник', 'Вторник', 'Среда', 'Четверг', 'Пятница', 'Суббота')),
('abmon', ('Янв', 'Фев', 'Мар', 'Апр', 'Май', 'Июн', 'Июл', 'Авг', 'Сен', 'Окт', 'Ноя', 'Дек')),
('mon', ('Январь', 'Февраль', 'Март', 'Апрель', 'Май', 'Июнь', 'Июль', 'Август', 'Сентябрь', 'Октябрь', 'Ноябрь', 'Декабрь')),
('d_t_fmt', '%a %d %b %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[ДдYy].*'),
('noexpr', '^[НнNn].*')]),
('rw_RW',
[('abday', ('Mwe', 'Mbe', 'Kab', 'Gtu', 'Kan', 'Gnu', 'Gnd')),
('day', ('Ku cyumweru', 'Kuwa mbere', 'Kuwa kabiri', 'Kuwa gatatu', 'Kuwa kane', 'Kuwa gatanu', 'Kuwa gatandatu')),
('abmon', ('Mut', 'Gas', 'Wer', 'Mat', 'Gic', 'Kam', 'Nya', 'Kan', 'Nze', 'Ukw', 'Ugu', 'Uku')),
('mon', ('Mutarama', 'Gashyantare', 'Werurwe', 'Mata', 'Gicuransi', 'Kamena', 'Nyakanga', 'Kanama', 'Nzeli', 'Ukwakira', 'Ugushyingo', 'Ukuboza')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[yY]'),
('noexpr', '^[nNoO]')]),
('sa_IN',
[('abday', ('रविः', 'सोम:', 'मंगल:', 'बुध:', 'बृहस्पतिः', 'शुक्र', 'शनि:')),
('day', ('रविवासर:', 'सोमवासर:', 'मंगलवासर:', 'बुधवासर:', 'बृहस्पतिवासरः', 'शुक्रवासर', 'शनिवासर:')),
('abmon', ('जनवरी', 'फ़रवरी', 'मार्च', 'अप्रेल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('mon', ('जनवरी', 'फ़रवरी', 'मार्च', 'अप्रेल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[aAyY].*'),
('noexpr', '^[nN].*')]),
('sat_IN',
[('abday', ('सिंगे', 'ओते', 'बाले', 'सागुन', 'सारदी', 'जारुम', 'ञुहुम')),
('day', ('सिंगेमाँहाँ', 'ओतेमाँहाँ', 'बालेमाँहाँ', 'सागुनमाँहाँ', 'सारदीमाँहाँ', 'जारुममाँहाँ', 'ञुहुममाँहाँ')),
('abmon', ('जनवरी', 'फरवरी', 'मार्च', 'अप्रेल', 'मई', 'जुन', 'जुलाई', 'अगस्त', 'सितम्बर', 'अखथबर', 'नवम्बर', 'दिसम्बर')),
('mon', ('जनवरी', 'फरवरी', 'मार्च', 'अप्रेल', 'मई', 'जुन', 'जुलाई', 'अगस्त', 'सितम्बर', 'अखथबर', 'नवम्बर', 'दिसम्बर')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^(होय|[yY])'),
('noexpr', '^(बाङ|[nN])')]),
('sc_IT',
[('abday', ('Dom', 'Lun', 'Mar', 'Mèr', 'Jòb', 'Cen', 'Sàb')),
('day', ('Domìngu', 'Lunis', 'Martis', 'Mèrcuris', 'Jòbia', 'Cenàbara', 'Sàbadu')),
('abmon', ('Gen', 'Fri', 'Mar', 'Abr', 'May', 'Làm', 'Arj', 'Aus', 'Cab', 'Lad', 'Don', 'Ida')),
('mon', ('Gennarju', 'Friarju', 'Martzu', 'Abrili', 'Mayu', 'Làmpadas', 'Arjolas', 'Austu', 'Cabudanni', 'Ladàmini', 'Donnyasantu', 'Idas')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d. %m. %y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[sSjJoOyY].*'),
('noexpr', '^[nN].*')]),
('sd_IN',
[('abday', ('آرتوارُ', 'سومرُ', 'منگلُ', 'ٻُڌرُ', 'وسپت', 'جُمو', 'ڇنڇر')),
('day', ('آرتوارُ', 'سومرُ', 'منگلُ', 'ٻُڌرُ', 'وسپت', 'جُمو', 'ڇنڇر')),
('abmon', ('جنوري', 'فبروري', 'مارچ', 'اپريل', 'مي', 'جون', 'جولاءِ', 'آگسٽ', 'سيپٽيمبر', 'آڪٽوبر', 'نومبر', 'ڊسمبر')),
('mon', ('جنوري', 'فبروري', 'مارچ', 'اپريل', 'مي', 'جون', 'جولاءِ', 'آگسٽ', 'سيپٽيمبر', 'آڪٽوبر', 'نومبر', 'ڊسمبر')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[نyY].*'),
('noexpr', '^[لnN].*')]),
('se_NO',
[('abday', ('sotn', 'vuos', 'maŋ', 'gask', 'duor', 'bear', 'láv')),
('day', ('sotnabeaivi', 'vuossárga', 'maŋŋebarga', 'gaskavahkku', 'duorasdat', 'bearjadat', 'lávvardat')),
('abmon', ('ođđj', 'guov', 'njuk', 'cuoŋ', 'mies', 'geas', 'suoi', 'borg', 'čakč', 'golg', 'skáb', 'juov')),
('mon',
('ođđajagemánu',
'guovvamánu',
'njukčamánu',
'cuoŋománu',
'miessemánu',
'geassemánu',
'suoidnemánu',
'borgemánu',
'čakčamánu',
'golggotmánu',
'skábmamánu',
'juovlamánu')),
('d_t_fmt', '%a, %b %e. b. %Y %T %Z'),
('d_fmt', '%Y-%m-%d'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[JjYy].*'),
('noexpr', '^[Ii].*')]),
('shs_CA',
[('abday', ('Sxe', 'Spe', 'Sel', 'Ske', 'Sme', 'Sts', 'Stq')),
('day', ('Sxetspesq̓t', 'Spetkesq̓t', 'Selesq̓t', 'Skellesq̓t', 'Smesesq̓t', 'Stselkstesq̓t', 'Stqmekstesq̓t')),
('abmon', ('Kwe', 'Tsi', 'Sqe', 'Éwt', 'Ell', 'Tsp', 'Tqw', 'Ct̓é', 'Qel', 'Wél', 'U7l', 'Tet')),
('mon',
('Pellkwet̓min',
'Pelctsipwen̓ten',
'Pellsqépts',
'Peslléwten',
'Pell7ell7é7llqten',
'Pelltspéntsk',
'Pelltqwelq̓wél̓t',
'Pellct̓éxel̓cten',
'Pesqelqlélten',
'Pesllwélsten',
'Pellc7ell7é7llcwten̓',
'Pelltetétq̓em')),
('d_t_fmt', '%a %d %b %Y %r %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%r'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYoO].*'),
('noexpr', '^[nN].*')]),
('si_LK',
[('abday', ('ඉ', 'ස', 'අ', 'බ', 'බ්\u200dර', 'සි', 'සෙ')),
('day', ('ඉරිදා', 'සඳුදා', 'අඟහරුවාදා', 'බදාදා', 'බ්\u200dරහස්පතින්දා', 'සිකුරාදා', 'සෙනසුරාදා')),
('abmon', ('ජන', 'පෙබ', 'මාර්', 'අප්\u200dරි', 'මැයි', 'ජූනි', 'ජූලි', 'අගෝ', 'සැප්', 'ඔක්', 'නෙවැ', 'දෙසැ')),
('mon', ('ජනවාරි', 'පෙබරවාරි', 'මාර්තු', 'අප්\u200dරියෙල්', 'මැයි', 'ජූනි', 'ජූලි', 'අගෝස්තු', 'සැප්තැම්බර්', 'ඔක්තෝබර්', 'නොවැම්බර්', 'දෙසැම්බර්')),
('d_t_fmt', '%Y-%m-%d %H:%M:%S %z'),
('d_fmt', '%Y-%m-%d'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '%p %I:%M:%S'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[ඔYy]'),
('noexpr', '^[නNn]')]),
('sid_ET',
[('abday', ('Sam', 'San', 'Mak', 'Row', 'Ham', 'Arb', 'Qid')),
('day', ('Sambata', 'Sanyo', 'Maakisanyo', 'Roowe', 'Hamuse', 'Arbe', 'Qidaame')),
('abmon', ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec')),
('mon', ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December')),
('d_t_fmt', '%A, %B %e, %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('sk_SK',
[('abday', ('Ne', 'Po', 'Ut', 'St', 'Št', 'Pi', 'So')),
('day', ('Nedeľa', 'Pondelok', 'Utorok', 'Streda', 'Štvrtok', 'Piatok', 'Sobota')),
('abmon', ('jan', 'feb', 'mar', 'apr', 'máj', 'jún', 'júl', 'aug', 'sep', 'okt', 'nov', 'dec')),
('mon', ('január', 'február', 'marec', 'apríl', 'máj', 'jún', 'júl', 'august', 'september', 'október', 'november', 'december')),
('d_t_fmt', '%a\xa0%e.\xa0%B\xa0%Y,\xa0%H:%M:%S\xa0%Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '%I:%M:%S'),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^[aAáÁyY].*'),
('noexpr', '^[nN].*')]),
('sl_SI',
[('abday', ('ned', 'pon', 'tor', 'sre', 'čet', 'pet', 'sob')),
('day', ('nedelja', 'ponedeljek', 'torek', 'sreda', 'četrtek', 'petek', 'sobota')),
('abmon', ('jan', 'feb', 'mar', 'apr', 'maj', 'jun', 'jul', 'avg', 'sep', 'okt', 'nov', 'dec')),
('mon', ('januar', 'februar', 'marec', 'april', 'maj', 'junij', 'julij', 'avgust', 'september', 'oktober', 'november', 'december')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d. %m. %Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ' '),
('yesexpr', '^[YyJj].*'),
('noexpr', '^[Nn].*')]),
('so_DJ',
[('abday', ('axa', 'isn', 'sal', 'arb', 'kha', 'jim', 'sab')),
('day', ('Axad', 'Isniin', 'Salaaso', 'Arbaco', 'Khamiis', 'Jimco', 'Sabti')),
('abmon', ('kob', 'lab', 'sad', 'afr', 'sha', 'lix', 'tod', 'sid', 'sag', 'tob', 'kit', 'lit')),
('mon',
('Bisha Koobaad',
'Bisha Labaad',
'Bisha Saddexaad',
'Bisha Afraad',
'Bisha Shanaad',
'Bisha Lixaad',
'Bisha Todobaad',
'Bisha Sideedaad',
'Bisha Sagaalaad',
'Bisha Tobnaad',
'Bisha Kow iyo Tobnaad',
'Bisha Laba iyo Tobnaad')),
('d_t_fmt', '%a %d %b %Y %r %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ''),
('yesexpr', '^[oOyY].*'),
('noexpr', '^[nN].*')]),
('so_ET',
[('abday', ('Axa', 'Isn', 'Sal', 'Arb', 'Kha', 'Jim', 'Sab')),
('day', ('Axad', 'Isniin', 'Salaaso', 'Arbaco', 'Khamiis', 'Jimco', 'Sabti')),
('abmon', ('Kob', 'Lab', 'Sad', 'Afr', 'Sha', 'Lix', 'Tod', 'Sid', 'Sag', 'Tob', 'KIT', 'LIT')),
('mon',
('Bisha Koobaad',
'Bisha Labaad',
'Bisha Saddexaad',
'Bisha Afraad',
'Bisha Shanaad',
'Bisha Lixaad',
'Bisha Todobaad',
'Bisha Sideedaad',
'Bisha Sagaalaad',
'Bisha Tobnaad',
'Bisha Kow iyo Tobnaad',
'Bisha Laba iyo Tobnaad')),
('d_t_fmt', '%A, %B %e, %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('so_KE',
[('abday', ('Axa', 'Isn', 'Sal', 'Arb', 'Kha', 'Jim', 'Sab')),
('day', ('Axad', 'Isniin', 'Salaaso', 'Arbaco', 'Khamiis', 'Jimco', 'Sabti')),
('abmon', ('Kob', 'Lab', 'Sad', 'Afr', 'Sha', 'Lix', 'Tod', 'Sid', 'Sag', 'Tob', 'KIT', 'LIT')),
('mon',
('Bisha Koobaad',
'Bisha Labaad',
'Bisha Saddexaad',
'Bisha Afraad',
'Bisha Shanaad',
'Bisha Lixaad',
'Bisha Todobaad',
'Bisha Sideedaad',
'Bisha Sagaalaad',
'Bisha Tobnaad',
'Bisha Kow iyo Tobnaad',
'Bisha Laba iyo Tobnaad')),
('d_t_fmt', '%A, %B %e, %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('so_SO',
[('abday', ('Axa', 'Isn', 'Sal', 'Arb', 'Kha', 'Jim', 'Sab')),
('day', ('Axad', 'Isniin', 'Salaaso', 'Arbaco', 'Khamiis', 'Jimco', 'Sabti')),
('abmon', ('Kob', 'Lab', 'Sad', 'Afr', 'Sha', 'Lix', 'Tod', 'Sid', 'Sag', 'Tob', 'KIT', 'LIT')),
('mon',
('Bisha Koobaad',
'Bisha Labaad',
'Bisha Saddexaad',
'Bisha Afraad',
'Bisha Shanaad',
'Bisha Lixaad',
'Bisha Todobaad',
'Bisha Sideedaad',
'Bisha Sagaalaad',
'Bisha Tobnaad',
'Bisha Kow iyo Tobnaad',
'Bisha Laba iyo Tobnaad')),
('d_t_fmt', '%A, %B %e, %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('sq_AL',
[('abday', ('Die ', 'Hën ', 'Mar ', 'Mër ', 'Enj ', 'Pre ', 'Sht ')),
('day', ('e diel ', 'e hënë ', 'e martë ', 'e mërkurë ', 'e enjte ', 'e premte ', 'e shtunë ')),
('abmon', ('Jan', 'Shk', 'Mar', 'Pri', 'Maj', 'Qer', 'Kor', 'Gsh', 'Sht', 'Tet', 'Nën', 'Dhj')),
('mon', ('janar', 'shkurt', 'mars', 'prill', 'maj', 'qershor', 'korrik', 'gusht', 'shtator', 'tetor', 'nëntor', 'dhjetor')),
('d_t_fmt', '%Y-%b-%d %I.%M.%S.%p %Z'),
('d_fmt', '%Y-%b-%d'),
('t_fmt', '%I.%M.%S. %Z'),
('t_fmt_ampm', '%I.%M.%S.%p %Z'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[yYpP].*'),
('noexpr', '^[nNjJ].*')]),
('sq_MK',
[('abday', ('Die ', 'Hën ', 'Mar ', 'Mër ', 'Enj ', 'Pre ', 'Sht ')),
('day', ('e diel ', 'e hënë ', 'e martë ', 'e mërkurë ', 'e enjte ', 'e premte ', 'e shtunë ')),
('abmon', ('Jan', 'Shk', 'Mar', 'Pri', 'Maj', 'Qer', 'Kor', 'Gsh', 'Sht', 'Tet', 'Nën', 'Dhj')),
('mon', ('janar', 'shkurt', 'mars', 'prill', 'maj', 'qershor', 'korrik', 'gusht', 'shtator', 'tetor', 'nëntor', 'dhjetor')),
('d_t_fmt', '%Y-%b-%d %I.%M.%S.%p %Z'),
('d_fmt', '%Y-%b-%d'),
('t_fmt', '%I.%M.%S. %Z'),
('t_fmt_ampm', '%I.%M.%S.%p %Z'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[yYpP].*'),
('noexpr', '^[nNjJ].*')]),
('sr_ME',
[('abday', ('нед', 'пон', 'уто', 'сри', 'чет', 'пет', 'суб')),
('day', ('недјеља', 'понедељак', 'уторак', 'сриједа', 'четвртак', 'петак', 'субота')),
('abmon', ('јан', 'феб', 'мар', 'апр', 'мај', 'јун', 'јул', 'авг', 'сеп', 'окт', 'нов', 'дец')),
('mon', ('јануар', 'фебруар', 'март', 'април', 'мај', 'јун', 'јул', 'август', 'септембар', 'октобар', 'новембар', 'децембар')),
('d_t_fmt', '%A, %d. %B %Y. %T %Z'),
('d_fmt', '%d.%m.%Y.'),
('t_fmt', '%T'),
('t_fmt_ampm', '%T'),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[ДдDdYy]'),
('noexpr', '^[НнNn]')]),
('sr_RS',
[('abday', ('нед', 'пон', 'уто', 'сре', 'чет', 'пет', 'суб')),
('day', ('недеља', 'понедељак', 'уторак', 'среда', 'четвртак', 'петак', 'субота')),
('abmon', ('јан', 'феб', 'мар', 'апр', 'мај', 'јун', 'јул', 'авг', 'сеп', 'окт', 'нов', 'дец')),
('mon', ('јануар', 'фебруар', 'март', 'април', 'мај', 'јун', 'јул', 'август', 'септембар', 'октобар', 'новембар', 'децембар')),
('d_t_fmt', '%A, %d. %B %Y. %T %Z'),
('d_fmt', '%d.%m.%Y.'),
('t_fmt', '%T'),
('t_fmt_ampm', '%T'),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[ДдDdYy]'),
('noexpr', '^[НнNn]')]),
('ss_ZA',
[('abday', ('Son', 'Mso', 'Bil', 'Tsa', 'Ne', 'Hla', 'Mgc')),
('day', ('Lisontfo', 'uMsombuluko', 'Lesibili', 'Lesitsatfu', 'Lesine', 'Lesihlanu', 'uMgcibelo')),
('abmon', ('Bhi', 'Van', 'Vul', 'Mab', 'Khk', 'Nhl', 'Kho', 'Ngc', 'Nyo', 'Imp', 'Lwe', 'Ngo')),
('mon',
('Bhimbidvwane', 'iNdlovane', 'iNdlovulenkhulu', 'Mabasa', 'Inkhwenkhweti', 'iNhlaba', 'Kholwane', 'iNgci', 'iNyoni', 'Impala', 'Lweti', 'iNgongoni')),
('d_t_fmt', '%a %-e %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nNaA]')]),
('st_ZA',
[('abday', ('Son', 'Mma', 'Bed', 'Rar', 'Ne', 'Hla', 'Moq')),
('day', ('Sontaha', 'Mantaha', 'Labobedi', 'Laboraro', 'Labone', 'Labohlano', 'Moqebelo')),
('abmon', ('Phe', 'Hla', 'TlH', 'Mme', 'Mot', 'Jan', 'Upu', 'Pha', 'Leo', 'Mph', 'Pud', 'Tsh')),
('mon', ('Pherekgong', 'Hlakola', 'Tlhakubele', 'Mmese', 'Motsheanong', 'Phupjane', 'Phupu', 'Phato', 'Leotse', 'Mphalane', 'Pudungwana', 'Tshitwe')),
('d_t_fmt', '%a %-e %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('sv_FI',
[('abday', ('sön', 'mån', 'tis', 'ons', 'tor', 'fre', 'lör')),
('day', ('söndag', 'måndag', 'tisdag', 'onsdag', 'torsdag', 'fredag', 'lördag')),
('abmon', ('jan', 'feb', 'mar', 'apr', 'maj', 'jun', 'jul', 'aug', 'sep', 'okt', 'nov', 'dec')),
('mon', ('januari', 'februari', 'mars', 'april', 'maj', 'juni', 'juli', 'augusti', 'september', 'oktober', 'november', 'december')),
('d_t_fmt', '%a %e. %B %Y %H.%M.%S'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%H.%M.%S'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('sv_SE',
[('abday', ('sön', 'mån', 'tis', 'ons', 'tor', 'fre', 'lör')),
('day', ('söndag', 'måndag', 'tisdag', 'onsdag', 'torsdag', 'fredag', 'lördag')),
('abmon', ('jan', 'feb', 'mar', 'apr', 'maj', 'jun', 'jul', 'aug', 'sep', 'okt', 'nov', 'dec')),
('mon', ('januari', 'februari', 'mars', 'april', 'maj', 'juni', 'juli', 'augusti', 'september', 'oktober', 'november', 'december')),
('d_t_fmt', '%a %e %b %Y %H:%M:%S'),
('d_fmt', '%Y-%m-%d'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ' '),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('sw_KE',
[('abday', ('J2', 'J3', 'J4', 'J5', 'Alh', 'Ij', 'J1')),
('day', ('Jumapili', 'Jumatatu', 'Jumanne', 'Jumatano', 'Alhamisi', 'Ijumaa', 'Jumamosi')),
('abmon', ('Jan', 'Feb', 'Mac', 'Apr', 'Mei', 'Jun', 'Jul', 'Ago', 'Sep', 'Okt', 'Nov', 'Des')),
('mon', ('Januari', 'Februari', 'Machi', 'Aprili', 'Mei', 'Juni', 'Julai', 'Agosti', 'Septemba', 'Oktoba', 'Novemba', 'Desemba')),
('d_t_fmt', '%e %B %Y %I:%M:%S %p %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%I:%M:%S %p'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[nNyY].*'),
('noexpr', '^[hHlL].*')]),
('sw_TZ',
[('abday', ('J2', 'J3', 'J4', 'J5', 'Alh', 'Ij', 'J1')),
('day', ('Jumapili', 'Jumatatu', 'Jumanne', 'Jumatano', 'Alhamisi', 'Ijumaa', 'Jumamosi')),
('abmon', ('Jan', 'Feb', 'Mac', 'Apr', 'Mei', 'Jun', 'Jul', 'Ago', 'Sep', 'Okt', 'Nov', 'Des')),
('mon', ('Januari', 'Februari', 'Machi', 'Aprili', 'Mei', 'Juni', 'Julai', 'Agosti', 'Septemba', 'Oktoba', 'Novemba', 'Desemba')),
('d_t_fmt', '%e %B %Y %I:%M:%S %p %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%I:%M:%S %p'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[nNyY].*'),
('noexpr', '^[hHlL].*')]),
('szl_PL',
[('abday', ('niy', 'pyń', 'wtŏ', 'str', 'szt', 'pjō', 'sob')),
('day', ('niydziela', 'pyńdziŏek', 'wtŏrek', 'strzŏda', 'sztwortek', 'pjōntek', 'sobŏta')),
('abmon', ('sty', 'lut', 'mer', 'kwj', 'moj', 'czy', 'lip', 'siy', 'wrz', 'paź', 'lis', 'gru')),
('mon', ('styczyń', 'luty', 'merc', 'kwjeciyń', 'moj', 'czyrwjyń', 'lipjyń', 'siyrpjyń', 'wrzesiyń', 'październik', 'listopad', 'grudziyń')),
('d_t_fmt', '%a, %-d %b %Y, %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[JjTtYy].*'),
('noexpr', '^[nN].*')]),
('ta_IN',
[('abday', ('ஞா', 'தி', 'செ', 'பு', 'வி', 'வெ', 'ச')),
('day', ('ஞாயிறு', 'திங்கள்', 'செவ்வாய்', 'புதன்', 'வியாழன்', 'வெள்ளி', 'சனி')),
('abmon', ('ஜன', 'பிப்', 'மார்', 'ஏப்', 'மே', 'ஜூன்', 'ஜூலை', 'ஆக', 'செப்', 'அக்', 'நவ', 'டிச')),
('mon', ('ஜனவரி', 'பிப்ரவரி', 'மார்ச்', 'ஏப்ரல்', 'மே', 'ஜூன்', 'ஜூலை', 'ஆகஸ்ட்', 'செப்டம்பர்', 'அக்டோபர்', 'நவம்பர்', 'டிசம்பர்')),
('d_t_fmt', '%A %d %B %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %B %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[ஆம்yY]'),
('noexpr', '^[இல்லைnN]')]),
('ta_LK',
[('abday', ('ஞா', 'தி', 'செ', 'பு', 'வி', 'வெ', 'ச')),
('day', ('ஞாயிறு', 'திங்கள்', 'செவ்வாய்', 'புதன்', 'வியாழன்', 'வெள்ளி', 'சனி')),
('abmon', ('ஜன', 'பிப்', 'மார்', 'ஏப்', 'மே', 'ஜூன்', 'ஜூலை', 'ஆக', 'செப்', 'அக்', 'நவ', 'டிச')),
('mon', ('ஜனவரி', 'பிப்ரவரி', 'மார்ச்', 'ஏப்ரல்', 'மே', 'ஜூன்', 'ஜூலை', 'ஆகஸ்ட்', 'செப்டம்பர்', 'அக்டோபர்', 'நவம்பர்', 'டிசம்பர்')),
('d_t_fmt', '%A %d %B %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %B %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[ஆம்yY]'),
('noexpr', '^[இல்லைnN]')]),
('te_IN',
[('abday', ('ఆది', 'సోమ', 'మంగళ', 'బుధ', 'గురు', 'శుక్ర', 'శని')),
('day', ('ఆదివారం', 'సోమవారం', 'మంగళవారం', 'బుధవారం', 'గురువారం', 'శుక్రవారం', 'శనివారం')),
('abmon', ('జనవరి', 'ఫిబ్రవరి', 'మార్చి', 'ఏప్రిల్', 'మే', 'జూన్', 'జూలై', 'ఆగస్టు', 'సెప్టెంబరు', 'అక్టోబరు', 'నవంబరు', 'డిసెంబరు')),
('mon', ('జనవరి', 'ఫిబ్రవరి', 'మార్చి', 'ఏప్రిల్', 'మే', 'జూన్', 'జూలై', 'ఆగస్టు', 'సెప్టెంబరు', 'అక్టోబరు', 'నవంబరు', 'డిసెంబరు')),
('d_t_fmt', '%B %d %A %Y %p%I.%M.%S %Z'),
('d_fmt', '%B %d %A %Y'),
('t_fmt', '%p%I.%M.%S %Z'),
('t_fmt_ampm', '%p%I.%M.%S %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYఅ].*'),
('noexpr', '^[nNక].*')]),
('tg_TJ',
[('abday', ('Вск', 'Пнд', 'Втр', 'Срд', 'Чтв', 'Птн', 'Сбт')),
('day', ('Воскресенье', 'Понедельник', 'Вторник', 'Среда', 'Четверг', 'Пятница', 'Суббота')),
('abmon', ('Янв', 'Фев', 'Мар', 'Апр', 'Май', 'Июн', 'Июл', 'Авг', 'Сен', 'Окт', 'Ноя', 'Дек')),
('mon', ('Января', 'Февраля', 'Марта', 'Апреля', 'Мая', 'Июня', 'Июля', 'Августа', 'Сентября', 'Октября', 'Ноября', 'Декабря')),
('d_t_fmt', '%a %d %b %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[ҲҳХхДдYy].*'),
('noexpr', '^[НнNn].*')]),
('th_TH',
[('abday', ('อา.', 'จ.', 'อ.', 'พ.', 'พฤ.', 'ศ.', 'ส.')),
('day', ('อาทิตย์', 'จันทร์', 'อังคาร', 'พุธ', 'พฤหัสบดี', 'ศุกร์', 'เสาร์')),
('abmon', ('ม.ค.', 'ก.พ.', 'มี.ค.', 'เม.ย.', 'พ.ค.', 'มิ.ย.', 'ก.ค.', 'ส.ค.', 'ก.ย.', 'ต.ค.', 'พ.ย.', 'ธ.ค.')),
('mon', ('มกราคม', 'กุมภาพันธ์', 'มีนาคม', 'เมษายน', 'พฤษภาคม', 'มิถุนายน', 'กรกฎาคม', 'สิงหาคม', 'กันยายน', 'ตุลาคม', 'พฤศจิกายน', 'ธันวาคม')),
('d_t_fmt', '%a %e %b %Ey, %H:%M:%S'),
('d_fmt', '%d/%m/%Ey'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYช]'),
('noexpr', '^[nNม]')]),
('the_NP',
[('abday', ('आइत ', 'सोम ', 'मंगल ', 'बुध ', 'बिहि ', 'शुक्र ', 'शनि ')),
('day', ('आइतबार ', 'सोमबार ', 'मंगलबार ', 'बुधबार ', 'बिहिबार ', 'शुक्रबार ', 'शनिबार ')),
('abmon', ('जनवरी', 'फ़रवरी', 'मार्च', 'अप्रेल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('mon', ('जनवरी', 'फ़रवरी', 'मार्च', 'अप्रेल', 'मई', 'जून', 'जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('ti_ER',
[('abday', ('ሰንበ', 'ሰኑይ', 'ሰሉስ', 'ረቡዕ', 'ሓሙስ', 'ዓርቢ', 'ቀዳም')),
('day', ('ሰንበት', 'ሰኑይ', 'ሰሉስ', 'ረቡዕ', 'ሓሙስ', 'ዓርቢ', 'ቀዳም')),
('abmon', ('ጥሪ ', 'ለካቲ', 'መጋቢ', 'ሚያዝ', 'ግንቦ', 'ሰነ ', 'ሓምለ', 'ነሓሰ', 'መስከ', 'ጥቅም', 'ሕዳር', 'ታሕሳ')),
('mon', ('ጥሪ', 'ለካቲት', 'መጋቢት', 'ሚያዝያ', 'ግንቦት', 'ሰነ', 'ሓምለ', 'ነሓሰ', 'መስከረም', 'ጥቅምቲ', 'ሕዳር', 'ታሕሳስ')),
('d_t_fmt', '%A፡ %B %e መዓልቲ %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ''),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('ti_ET',
[('abday', ('ሰንበ', 'ሰኑይ', 'ሰሉስ', 'ረቡዕ', 'ሓሙስ', 'ዓርቢ', 'ቀዳም')),
('day', ('ሰንበት', 'ሰኑይ', 'ሰሉስ', 'ረቡዕ', 'ሓሙስ', 'ዓርቢ', 'ቀዳም')),
('abmon', ('ጃንዩ', 'ፌብሩ', 'ማርች', 'ኤፕረ', 'ሜይ ', 'ጁን ', 'ጁላይ', 'ኦገስ', 'ሴፕቴ', 'ኦክተ', 'ኖቬም', 'ዲሴም')),
('mon', ('ጃንዩወሪ', 'ፌብሩወሪ', 'ማርች', 'ኤፕረል', 'ሜይ', 'ጁን', 'ጁላይ', 'ኦገስት', 'ሴፕቴምበር', 'ኦክተውበር', 'ኖቬምበር', 'ዲሴምበር')),
('d_t_fmt', '%A፣ %B %e መዓልቲ %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('tig_ER',
[('abday', ('ሰ/ዓ', 'ሰኖ ', 'ታላሸ', 'ኣረር', 'ከሚሽ', 'ጅምዓ', 'ሰ/ን')),
('day', ('ሰንበት ዓባይ', 'ሰኖ', 'ታላሸኖ', 'ኣረርባዓ', 'ከሚሽ', 'ጅምዓት', 'ሰንበት ንኢሽ')),
('abmon', ('ጥሪ ', 'ለካቲ', 'መጋቢ', 'ሚያዝ', 'ግንቦ', 'ሰነ ', 'ሓምለ', 'ነሓሰ', 'መስከ', 'ጥቅም', 'ሕዳር', 'ታሕሳ')),
('mon', ('ጥሪ', 'ለካቲት', 'መጋቢት', 'ሚያዝያ', 'ግንቦት', 'ሰነ', 'ሓምለ', 'ነሓሰ', 'መስከረም', 'ጥቅምቲ', 'ሕዳር', 'ታሕሳስ')),
('d_t_fmt', '%A፡ %B %e ዮም %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ''),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('tk_TM',
[('abday', ('Duş', 'Siş', 'Çar', 'Pen', 'Ann', 'Şen', 'Ýek')),
('day', ('Duşenbe', 'Sişenbe', 'Çarşenbe', 'Penşenbe', 'Anna', 'Şenbe', 'Ýekşenbe')),
('abmon', ('Ýan', 'Few', 'Mar', 'Apr', 'Maý', 'Iýn', 'Iýl', 'Awg', 'Sen', 'Okt', 'Noý', 'Dek')),
('mon', ('Ýanwar', 'Fewral', 'Mart', 'Aprel', 'Maý', 'Iýun', 'Iýul', 'Awgust', 'Sentýabr', 'Oktýabr', 'Noýabr', 'Dekabr')),
('d_t_fmt', '%d.%m.%Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[hH].*'),
('noexpr', '^[ýÝnN].*')]),
('tl_PH',
[('abday', ('Lin', 'Lun', 'Mar', 'Miy', 'Huw', 'Biy', 'Sab')),
('day', ('Linggo', 'Lunes', 'Martes', 'Miyerkoles', 'Huwebes', 'Biyernes', 'Sabado')),
('abmon', ('Ene', 'Peb', 'Mar', 'Abr', 'May', 'Hun', 'Hul', 'Ago', 'Sep', 'Okt', 'Nob', 'Dis')),
('mon', ('Enero', 'Pebrero', 'Marso', 'Abril', 'Mayo', 'Hunyo', 'Hulyo', 'Agosto', 'Septiyembre', 'Oktubre', 'Nobiyembre', 'Disyembre')),
('d_t_fmt', '%a %d %b %Y %r %Z'),
('d_fmt', '%m/%d/%y'),
('t_fmt', '%r'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('tn_ZA',
[('abday', ('Tsh', 'Mos', 'Bed', 'Rar', 'Ne', 'Tlh', 'Mat')),
('day', ('laTshipi', 'Mosupologo', 'Labobedi', 'Laboraro', 'Labone', 'Labotlhano', 'Lamatlhatso')),
('abmon', ('Fer', 'Tlh', 'Mop', 'Mor', 'Mot', 'See', 'Phu', 'Pha', 'Lwe', 'Dip', 'Ngw', 'Sed')),
('mon',
('Ferikgong',
'Tlhakole',
'Mopitlwe',
'Moranang',
'Motsheganong',
'Seetebosigo',
'Phukwi',
'Phatwe',
'Lwetse',
'Diphalane',
'Ngwanatsele',
'Sedimonthole')),
('d_t_fmt', '%a %-e %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYeE]'),
('noexpr', '^[nN]')]),
('tr_CY',
[('abday', ('Paz', 'Pzt', 'Sal', 'Çrş', 'Prş', 'Cum', 'Cts')),
('day', ('Pazar', 'Pazartesi', 'Salı', 'Çarşamba', 'Perşembe', 'Cuma', 'Cumartesi')),
('abmon', ('Oca', 'Şub', 'Mar', 'Nis', 'May', 'Haz', 'Tem', 'Ağu', 'Eyl', 'Eki', 'Kas', 'Ara')),
('mon', ('Ocak', 'Şubat', 'Mart', 'Nisan', 'Mayıs', 'Haziran', 'Temmuz', 'Ağustos', 'Eylül', 'Ekim', 'Kasım', 'Aralık')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d-%m-%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[yYeE]'),
('noexpr', '^[nNhH]')]),
('tr_TR',
[('abday', ('Paz', 'Pzt', 'Sal', 'Çrş', 'Prş', 'Cum', 'Cts')),
('day', ('Pazar', 'Pazartesi', 'Salı', 'Çarşamba', 'Perşembe', 'Cuma', 'Cumartesi')),
('abmon', ('Oca', 'Şub', 'Mar', 'Nis', 'May', 'Haz', 'Tem', 'Ağu', 'Eyl', 'Eki', 'Kas', 'Ara')),
('mon', ('Ocak', 'Şubat', 'Mart', 'Nisan', 'Mayıs', 'Haziran', 'Temmuz', 'Ağustos', 'Eylül', 'Ekim', 'Kasım', 'Aralık')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d-%m-%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[yYeE]'),
('noexpr', '^[nNhH]')]),
('ts_ZA',
[('abday', ('Son', 'Mus', 'Bir', 'Har', 'Ne', 'Tlh', 'Mug')),
('day', ('Sonto', 'Musumbhunuku', 'Ravumbirhi', 'Ravunharhu', 'Ravumune', 'Ravuntlhanu', 'Mugqivela')),
('abmon', ('Sun', 'Yan', 'Kul', 'Dzi', 'Mud', 'Kho', 'Maw', 'Mha', 'Ndz', 'Nhl', 'Huk', "N'w")),
('mon',
('Sunguti',
'Nyenyenyani',
'Nyenyankulu',
'Dzivamisoko',
'Mudyaxihi',
'Khotavuxika',
'Mawuwani',
'Mhawuri',
'Ndzhati',
'Nhlangula',
'Hukuri',
"N'wendzamhala")),
('d_t_fmt', '%a %-e %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('tt_RU',
[('abday', ('Якш', 'Дыш', 'Сиш', 'Чәрш', 'Пәнҗ', 'Җом', 'Шим')),
('day', ('Якшәмбе', 'Дышәмбе', 'Сишәмбе', 'Чәршәәмбе', 'Пәнҗешмбе', 'Җомга', 'Шимбә')),
('abmon', ('Янв', 'Фев', 'Мар', 'Апр', 'Май', 'Июн', 'Июл', 'Авг', 'Сен', 'Окт', 'Ноя', 'Дек')),
('mon', ('Января', 'Февраля', 'Марта', 'Апреля', 'Мая', 'Июня', 'Июля', 'Августа', 'Сентября', 'Октября', 'Ноября', 'Декабря')),
('d_t_fmt', '%a %d %b %Y %T'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^[ДдYy].*'),
('noexpr', '^[НнNn].*')]),
('ug_CN',
[('abday', ('ي', 'د', 'س', 'چ', 'پ', 'ج', 'ش')),
('day', ('يەكشەنبە', 'دۈشەنبە', 'سەيشەنبە', 'چارشەنبە', 'پەيشەنبە', 'جۈمە', 'شەنبە')),
('abmon', ('قەھرىتان', 'ھۇت', 'نورۇز', 'ئۈمىد', 'باھار', 'سەپەر', 'چىللە', 'تومۇز', 'مىزان', 'ئوغۇز', 'ئوغلاق', 'كۆنەك')),
('mon', ('قەھرىتان', 'ھۇت', 'نورۇز', 'ئۈمىد', 'باھار', 'سەپەر', 'چىللە', 'تومۇز', 'مىزان', 'ئوغۇز', 'ئوغلاق', 'كۆنەك')),
('d_t_fmt', '%a، %d-%m-%Y، %T'),
('d_fmt', '%a، %d-%m-%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%T'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('uk_UA',
[('abday', ('нд', 'пн', 'вт', 'ср', 'чт', 'пт', 'сб')),
('day', ('неділя', 'понеділок', 'вівторок', 'середа', 'четвер', "п'ятниця", 'субота')),
('abmon', ('січ', 'лют', 'бер', 'кві', 'тра', 'чер', 'лип', 'сер', 'вер', 'жов', 'лис', 'гру')),
('mon', ('січень', 'лютий', 'березень', 'квітень', 'травень', 'червень', 'липень', 'серпень', 'вересень', 'жовтень', 'листопад', 'грудень')),
('d_t_fmt', '%a, %d-%b-%Y %X %z'),
('d_fmt', '%d.%m.%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', '\xa0'),
('yesexpr', '^([Yy+]|[Тт][Аа][Кк]?)$'),
('noexpr', '^([Nn-]|[Нн][Іі])$')]),
('unm_US',
[('abday', ('ken', 'man', 'tus', 'lel', 'tas', 'pel', 'sat')),
('day', ('kentuwei', 'manteke', 'tusteke', 'lelai', 'tasteke', 'pelaiteke', 'sateteke')),
('abmon', ('eni', 'chk', 'xam', 'kwe', 'tai', 'nip', 'lai', 'win', 'tah', 'puk', 'kun', 'mux')),
('mon',
('enikwsi', 'chkwali', 'xamokhwite', 'kwetayoxe', 'tainipen', 'kichinipen', 'lainipen', 'winaminke', 'kichitahkok', 'puksit', 'wini', 'muxkotae')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ' '),
('yesexpr', '^[yY].*'),
('noexpr', '^[kKmM].*')]),
('ur_IN',
[('abday', ('اتوار', 'پیر', 'منگل', 'بدھ', 'جمعرات', 'جمعہ', 'سنیچر')),
('day', ('اتوار', 'پیر', 'منگل', 'بدھ', 'جمعرات', 'جمعہ', 'سنیچر')),
('abmon', ('جنوری', 'فروری', 'مارچ', 'اپریل', 'مئی', 'جون', 'جولائی', 'اگست', 'ستمبر', 'اكتوبر', 'نومبر', 'دسمبر')),
('mon', ('جنوری', 'فروری', 'مارچ', 'اپریل', 'مئی', 'جون', 'جولائی', 'اگست', 'ستمبر', 'اكتوبر', 'نومبر', 'دسمبر')),
('d_t_fmt', '%A %d %b %Y %I:%M:%S %p %Z'),
('d_fmt', '%A %d %b %Y'),
('t_fmt', '%I:%M:%S %Z'),
('t_fmt_ampm', '%I:%M:%S %p %Z'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[ہyY].*'),
('noexpr', '^[نnN].*')]),
('ur_PK',
[('abday', ('اتوار', 'پير', 'منگل', 'بدھ', 'جمعرات', 'جمعه', 'هفته')),
('day', ('اتوار', 'پير', 'منگل', 'بدھ', 'جمعرات', 'جمعه', 'هفته')),
('abmon', ('جنوري', 'فروري', 'مارچ', 'اپريل', 'مٓی', 'جون', 'جولاي', 'اگست', 'ستمبر', 'اكتوبر', 'نومبر', 'دسمبر')),
('mon', ('جنوري', 'فروري', 'مارچ', 'اپريل', 'مٓی', 'جون', 'جولاي', 'اگست', 'ستمبر', 'اكتوبر', 'نومبر', 'دسمبر')),
('d_t_fmt', 'و %H:%M:%S %Z ت %d %B %Y'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '%P %I:%M:%S'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYهبf].*'),
('noexpr', '^[nNنo].*')]),
('ve_ZA',
[('abday', ('Swo', 'Mus', 'Vhi', 'Rar', 'ṋa', 'Ṱan', 'Mug')),
('day', ('Swondaha', 'Musumbuluwo', 'Ḽavhuvhili', 'Ḽavhuraru', 'Ḽavhuṋa', 'Ḽavhuṱanu', 'Mugivhela')),
('abmon', ('Pha', 'Luh', 'Fam', 'Lam', 'Shu', 'Lwi', 'Lwa', 'Ngu', 'Khu', 'Tsh', 'Ḽar', 'Nye')),
('mon',
('Phando', 'Luhuhi', 'Ṱhafamuhwe', 'Lambamai', 'Shundunthule', 'Fulwi', 'Fulwana', 'Ṱhangule', 'Khubvumedzi', 'Tshimedzi', 'Ḽara', 'Nyendavhusiku')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYeE]'),
('noexpr', '^[nNhH]')]),
('vi_VN',
[('abday', ('CN', 'T2', 'T3', 'T4', 'T5', 'T6', 'T7')),
('day', ('Chủ nhật', 'Thứ hai', 'Thứ ba', 'Thứ tư', 'Thứ năm', 'Thứ sáu', 'Thứ bảy')),
('abmon', ('Th01', 'Th02', 'Th03', 'Th04', 'Th05', 'Th06', 'Th07', 'Th08', 'Th09', 'Th10', 'Th11', 'Th12')),
('mon',
('Tháng một',
'Tháng hai',
'Tháng ba',
'Tháng tư',
'Tháng năm',
'Tháng sáu',
'Tháng bảy',
'Tháng tám',
'Tháng chín',
'Tháng mười',
'Tháng mười một',
'Tháng mười hai')),
('d_t_fmt', '%A, %d %B Năm %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', '%I:%M %p'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[1yYcC].*'),
('noexpr', '^[0nNkK].*')]),
('wa_BE',
[('abday', ('dim', 'lon', 'mår', 'mie', 'dju', 'vén', 'sem')),
('day', ('dimegne', 'londi', 'mårdi', 'mierkidi', 'djudi', 'vénrdi', 'semdi')),
('abmon', ('dja', 'fev', 'mås', 'avr', 'may', 'djn', 'djl', 'awo', 'set', 'oct', 'nôv', 'dec')),
('mon', ('djanvî', 'fevrî', 'måss', 'avri', 'may', 'djun', 'djulete', 'awousse', 'setimbe', 'octôbe', 'nôvimbe', 'decimbe')),
('d_t_fmt', 'Li %A %d di %B %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', ','),
('thousep', '.'),
('yesexpr', '^[oOyYaAwW].*'),
('noexpr', '^[nN].*')]),
('wae_CH',
[('abday', ('Sun', 'Män', 'Zis', 'Mit', 'Fro', 'Fri', 'Sam')),
('day', ('Suntag', 'Mäntag', 'Zischtag', 'Mittwuch', 'Frontag', 'Fritag', 'Samschtag')),
('abmon', ('Jen', 'Hor', 'Mär', 'Abr', 'Mei', 'Brá', 'Hei', 'Öig', 'Her', 'Wím', 'Win', 'Chr')),
('mon', ('Jener', 'Hornig', 'Märze', 'Abrille', 'Meije', 'Bráchet', 'Heiwet', 'Öigschte', 'Herbschtmánet', 'Wímánet', 'Wintermánet', 'Chrischtmánet')),
('d_t_fmt', '%a %d. %b %Y %T %Z'),
('d_fmt', '%Y-%m-%d'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', "'"),
('yesexpr', '^[jJyY].*'),
('noexpr', '^[nN].*')]),
('wal_ET',
[('abday', ('ወጋ ', 'ሳይኖ', 'ማቆሳ', 'አሩዋ', 'ሃሙሳ', 'አርባ', 'ቄራ ')),
('day', ('ወጋ', 'ሳይኖ', 'ማቆሳኛ', 'አሩዋ', 'ሃሙሳ', 'አርባ', 'ቄራ')),
('abmon', ('ጃንዩ', 'ፌብሩ', 'ማርች', 'ኤፕረ', 'ሜይ ', 'ጁን ', 'ጁላይ', 'ኦገስ', 'ሴፕቴ', 'ኦክተ', 'ኖቬም', 'ዲሴም')),
('mon', ('ጃንዩወሪ', 'ፌብሩወሪ', 'ማርች', 'ኤፕረል', 'ሜይ', 'ጁን', 'ጁላይ', 'ኦገስት', 'ሴፕቴምበር', 'ኦክተውበር', 'ኖቬምበር', 'ዲሴምበር')),
('d_t_fmt', '%A፣ %B %e ጋላሳ %Y %r %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%l:%M:%S'),
('t_fmt_ampm', '%X %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY].*'),
('noexpr', '^[nN].*')]),
('wo_SN',
[('abday', ('dib', 'alt', 'tal', 'all', 'alx', 'ajj', 'gaa')),
('day', ("dib'eer", 'altine', 'talaata', 'allarba', 'alxames', 'ajjuma', 'gaawu')),
('abmon', ('san', 'fee', 'mar', 'awr', 'me ', 'suw', 'sul', 'uut', 'sep', 'okt', 'now', 'des')),
('mon', ("sanwiy'e", "feebriy'e", 'mars', 'awril', 'me', 'suwen', 'sulet', 'uut', 'septaambar', 'oktoobar', 'nowaambar', 'desaambar')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d.%m.%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', ','),
('thousep', ''),
('yesexpr', '^[wWyY].*'),
('noexpr', '^[dDnN].*')]),
('xh_ZA',
[('abday', ('Caw', 'Mvu', 'Bin', 'Tha', 'Sin', 'Hla', 'Mgq')),
('day', ('iCawa', 'uMvulo', 'lwesiBini', 'lwesiThathu', 'ulweSine', 'lwesiHlanu', 'uMgqibelo')),
('abmon', ('Mqu', 'Mdu', 'Kwi', 'Tsh', 'Can', 'Sil', 'Kha', 'Thu', 'Msi', 'Dwa', 'Nka', 'Mng')),
('mon',
('eyoMqungu',
'eyoMdumba',
'eyoKwindla',
'uTshazimpuzi',
'uCanzibe',
'eyeSilimela',
'eyeKhala',
'eyeThupa',
'eyoMsintsi',
'eyeDwarha',
'eyeNkanga',
'eyoMnga')),
('d_t_fmt', '%a %-e %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yYeE]'),
('noexpr', '^[nNhH]')]),
('yi_US',
[('abday', ("זונ'", "מאָנ'", "דינ'", "מיט'", "דאָנ'", "פֿרײַ'", 'שבת')),
('day', ('זונטיק', 'מאָנטיק', 'דינסטיק', 'מיטװאָך', 'דאָנערשטיק', 'פֿרײַטיק', 'שבת')),
('abmon', ('יאַנ', 'פֿעב', 'מאַר', 'אַפּר', 'מײַ ', 'יונ', 'יול', 'אױג', 'סעפּ', 'אָקט', 'נאָװ', 'דעצ')),
('mon', ('יאַנואַר', 'פֿעברואַר', 'מאַרץ', 'אַפּריל', 'מײַ', 'יוני', 'יולי', 'אױגוסט', 'סעפּטעמבער', 'אָקטאָבער', 'נאָװעמבער', 'דעצעמבער')),
('d_t_fmt', '%Z %H:%M:%S %Y %b %d %a'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%H:%M:%S'),
('t_fmt_ampm', '%I:%M:%S %P'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[Yyי].*'),
('noexpr', '^[Nnנק].*')]),
('yo_NG',
[('abday', ('SUN', 'MON', 'TUE', 'WED', 'THU', 'FRI', 'SAT')),
('day', ('Àìkú', 'Ajé', 'Ìṣẹ́gun', 'Ọjọ́rú', 'Ọjọ́bọ̀', 'Ẹ̀tì', 'Àbámẹ́ta')),
('abmon', ('JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC')),
('mon', ('Jánúárì', 'Fẹ́búárì', 'Máàṣì', 'Épírì', 'Méè', 'Júùnù', 'Júláì', 'Ọ́ọ́gọsì', 'Sẹ̀tẹ̀ńbà', 'Ọtóbà', 'Nòfẹ̀ńbà', 'Dìsẹ̀ńbà')),
('d_t_fmt', 'ọjọ́ %A, %d oṣù %B ọdún %Y %T %Z'),
('d_fmt', '%d/%m/%y'),
('t_fmt', '%r'),
('t_fmt_ampm', '%I:%M:%S %p'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[EeyY].*'),
('noexpr', '^[rROoKkNn].*')]),
('yue_HK',
[('abday', ('日', '一', '二', '三', '四', '五', '六')),
('day', ('星期日', '星期一', '星期二', '星期三', '星期四', '星期五', '星期六')),
('abmon', ('1月', '2月', '3月', '4月', '5月', '6月', '7月', '8月', '9月', '10月', '11月', '12月')),
('mon', ('一月', '二月', '三月', '四月', '五月', '六月', '七月', '八月', '九月', '十月', '十一月', '十二月')),
('d_t_fmt', '%Y年%m月%d日 %A %H點%M分%S秒'),
('d_fmt', '%Y年%m月%d日 %A'),
('t_fmt', '%H點%M分%S秒'),
('t_fmt_ampm', '%p%I點%M分%S秒'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('zh_CN',
[('abday', ('日', '一', '二', '三', '四', '五', '六')),
('day', ('星期日', '星期一', '星期二', '星期三', '星期四', '星期五', '星期六')),
('abmon', ('1月', '2月', '3月', '4月', '5月', '6月', '7月', '8月', '9月', '10月', '11月', '12月')),
('mon', ('一月', '二月', '三月', '四月', '五月', '六月', '七月', '八月', '九月', '十月', '十一月', '十二月')),
('d_t_fmt', '%Y年%m月%d日 %A %H时%M分%S秒'),
('d_fmt', '%Y年%m月%d日'),
('t_fmt', '%H时%M分%S秒'),
('t_fmt_ampm', '%p %I时%M分%S秒'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY是]'),
('noexpr', '^[nN不否]')]),
('zh_HK',
[('abday', ('日', '一', '二', '三', '四', '五', '六')),
('day', ('星期日', '星期一', '星期二', '星期三', '星期四', '星期五', '星期六')),
('abmon', ('1月', '2月', '3月', '4月', '5月', '6月', '7月', '8月', '9月', '10月', '11月', '12月')),
('mon', ('一月', '二月', '三月', '四月', '五月', '六月', '七月', '八月', '九月', '十月', '十一月', '十二月')),
('d_t_fmt', '%Y年%m月%d日 %A %H:%M:%S'),
('d_fmt', '%Y年%m月%d日 %A'),
('t_fmt', '%I時%M分%S秒 %Z'),
('t_fmt_ampm', '%p %I:%M:%S'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY是]'),
('noexpr', '^[nN不否]')]),
('zh_SG',
[('abday', ('星期日', '星期一', '星期二', '星期三', '星期四', '星期五', '星期六')),
('day', ('星期日', '星期一', '星期二', '星期三', '星期四', '星期五', '星期六')),
('abmon', ('一月', '二月', '三月', '四月', '五月', '六月', '七月', '八月', '九月', '十月', '十一月', '十二月')),
('mon', ('一月', '二月', '三月', '四月', '五月', '六月', '七月', '八月', '九月', '十月', '十一月', '十二月')),
('d_t_fmt', '%Y年%m月%d日 %H时%M分%S秒 %Z'),
('d_fmt', '%Y年%m月%d日'),
('t_fmt', '%H时%M分%S秒 %Z'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nN]')]),
('zh_TW',
[('abday', ('日', '一', '二', '三', '四', '五', '六')),
('day', ('週日', '週一', '週二', '週三', '週四', '週五', '週六')),
('abmon', (' 1月', ' 2月', ' 3月', ' 4月', ' 5月', ' 6月', ' 7月', ' 8月', ' 9月', '10月', '11月', '12月')),
('mon', ('一月', '二月', '三月', '四月', '五月', '六月', '七月', '八月', '九月', '十月', '十一月', '十二月')),
('d_t_fmt', '西元%Y年%m月%d日 (%A) %H時%M分%S秒'),
('d_fmt', '西元%Y年%m月%d日'),
('t_fmt', '%H時%M分%S秒'),
('t_fmt_ampm', '%p %I時%M分%S秒'),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY是]'),
('noexpr', '^[nN不否]')]),
('zu_ZA',
[('abday', ('Son', 'Mso', 'Bil', 'Tha', 'Sin', 'Hla', 'Mgq')),
('day', ('iSonto', 'uMsombuluko', 'uLwesibili', 'uLwesithathu', 'uLwesine', 'uLwesihlanu', 'uMgqibelo')),
('abmon', ('Mas', 'Ola', 'Nda', 'Mba', 'Aba', 'Ang', 'Ntu', 'Ncw', 'Man', 'Mfu', 'Lwe', 'Zib')),
('mon',
('uMasingana', 'uNhlolanja', 'uNdasa', 'uMbasa', 'uNhlaba', 'uNhlangulana', 'uNtulikazi', 'uNcwaba', 'uMandulo', 'uMfumfu', 'uLwezi', 'uZibandlela')),
('d_t_fmt', '%a %d %b %Y %T %Z'),
('d_fmt', '%d/%m/%Y'),
('t_fmt', '%T'),
('t_fmt_ampm', ''),
('radixchar', '.'),
('thousep', ','),
('yesexpr', '^[yY]'),
('noexpr', '^[nNcC]')])]
# }}}
|
ashang/calibre
|
setup/lc_data.py
|
Python
|
gpl-3.0
| 203,677
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.naming import make_autoname
from frappe import _, msgprint, throw
import frappe.defaults
from frappe.utils import flt
from erpnext.utilities.transaction_base import TransactionBase
from erpnext.utilities.address_and_contact import load_address_and_contact
class Customer(TransactionBase):
def get_feed(self):
return self.customer_name
def onload(self):
"""Load address and contacts in `__onload`"""
load_address_and_contact(self, "customer")
def autoname(self):
cust_master_name = frappe.defaults.get_global_default('cust_master_name')
if cust_master_name == 'Customer Name':
self.name = self.customer_name
else:
self.name = make_autoname(self.naming_series+'.#####')
def validate_values(self):
if frappe.defaults.get_global_default('cust_master_name') == 'Naming Series' and not self.naming_series:
frappe.throw(_("Series is mandatory"), frappe.MandatoryError)
def validate(self):
self.validate_values()
def update_lead_status(self):
if self.lead_name:
frappe.db.sql("update `tabLead` set status='Converted' where name = %s", self.lead_name)
def update_address(self):
frappe.db.sql("""update `tabAddress` set customer_name=%s, modified=NOW()
where customer=%s""", (self.customer_name, self.name))
def update_contact(self):
frappe.db.sql("""update `tabContact` set customer_name=%s, modified=NOW()
where customer=%s""", (self.customer_name, self.name))
def create_lead_address_contact(self):
if self.lead_name:
if not frappe.db.get_value("Address", {"lead": self.lead_name, "customer": self.name}):
frappe.db.sql("""update `tabAddress` set customer=%s, customer_name=%s where lead=%s""",
(self.name, self.customer_name, self.lead_name))
lead = frappe.db.get_value("Lead", self.lead_name, ["lead_name", "email_id", "phone", "mobile_no"], as_dict=True)
c = frappe.new_doc('Contact')
c.first_name = lead.lead_name
c.email_id = lead.email_id
c.phone = lead.phone
c.mobile_no = lead.mobile_no
c.customer = self.name
c.customer_name = self.customer_name
c.is_primary_contact = 1
c.flags.ignore_permissions = self.flags.ignore_permissions
c.autoname()
if not frappe.db.exists("Contact", c.name):
c.insert()
def on_update(self):
self.validate_name_with_customer_group()
self.update_lead_status()
self.update_address()
self.update_contact()
self.create_lead_address_contact()
def validate_name_with_customer_group(self):
if frappe.db.exists("Customer Group", self.name):
frappe.throw(_("A Customer Group exists with same name please change the Customer name or rename the Customer Group"), frappe.NameError)
def delete_customer_address(self):
addresses = frappe.db.sql("""select name, lead from `tabAddress`
where customer=%s""", (self.name,))
for name, lead in addresses:
if lead:
frappe.db.sql("""update `tabAddress` set customer=null, customer_name=null
where name=%s""", name)
else:
frappe.db.sql("""delete from `tabAddress` where name=%s""", name)
def delete_customer_contact(self):
for contact in frappe.db.sql_list("""select name from `tabContact`
where customer=%s""", self.name):
frappe.delete_doc("Contact", contact)
def on_trash(self):
self.delete_customer_address()
self.delete_customer_contact()
if self.lead_name:
frappe.db.sql("update `tabLead` set status='Interested' where name=%s",self.lead_name)
def after_rename(self, olddn, newdn, merge=False):
set_field = ''
if frappe.defaults.get_global_default('cust_master_name') == 'Customer Name':
frappe.db.set(self, "customer_name", newdn)
self.update_contact()
set_field = ", customer_name=%(newdn)s"
self.update_customer_address(newdn, set_field)
def update_customer_address(self, newdn, set_field):
frappe.db.sql("""update `tabAddress` set address_title=%(newdn)s
{set_field} where customer=%(newdn)s"""\
.format(set_field=set_field), ({"newdn": newdn}))
@frappe.whitelist()
def get_dashboard_info(customer):
if not frappe.has_permission("Customer", "read", customer):
frappe.msgprint(_("Not permitted"), raise_exception=True)
out = {}
for doctype in ["Opportunity", "Quotation", "Sales Order", "Delivery Note", "Sales Invoice"]:
out[doctype] = frappe.db.get_value(doctype,
{"customer": customer, "docstatus": ["!=", 2] }, "count(*)")
billing_this_year = frappe.db.sql("""select sum(base_grand_total)
from `tabSales Invoice`
where customer=%s and docstatus = 1 and fiscal_year = %s""",
(customer, frappe.db.get_default("fiscal_year")))
total_unpaid = frappe.db.sql("""select sum(outstanding_amount)
from `tabSales Invoice`
where customer=%s and docstatus = 1""", customer)
out["billing_this_year"] = billing_this_year[0][0] if billing_this_year else 0
out["total_unpaid"] = total_unpaid[0][0] if total_unpaid else 0
out["company_currency"] = frappe.db.sql_list("select distinct default_currency from tabCompany")
return out
def get_customer_list(doctype, txt, searchfield, start, page_len, filters):
if frappe.db.get_default("cust_master_name") == "Customer Name":
fields = ["name", "customer_group", "territory"]
else:
fields = ["name", "customer_name", "customer_group", "territory"]
return frappe.db.sql("""select %s from `tabCustomer` where docstatus < 2
and (%s like %s or customer_name like %s) order by
case when name like %s then 0 else 1 end,
case when customer_name like %s then 0 else 1 end,
name, customer_name limit %s, %s""" %
(", ".join(fields), searchfield, "%s", "%s", "%s", "%s", "%s", "%s"),
("%%%s%%" % txt, "%%%s%%" % txt, "%%%s%%" % txt, "%%%s%%" % txt, start, page_len))
def check_credit_limit(customer, company):
customer_outstanding = get_customer_outstanding(customer, company)
credit_limit = get_credit_limit(customer, company)
if credit_limit > 0 and flt(customer_outstanding) > credit_limit:
msgprint(_("Credit limit has been crossed for customer {0} {1}/{2}")
.format(customer, customer_outstanding, credit_limit))
# If not authorized person raise exception
credit_controller = frappe.db.get_value('Accounts Settings', None, 'credit_controller')
if not credit_controller or credit_controller not in frappe.get_roles():
throw(_("Please contact to the user who have Sales Master Manager {0} role")
.format(" / " + credit_controller if credit_controller else ""))
def get_customer_outstanding(customer, company):
# Outstanding based on GL Entries
outstanding_based_on_gle = frappe.db.sql("""select sum(ifnull(debit, 0)) - sum(ifnull(credit, 0))
from `tabGL Entry` where party_type = 'Customer' and party = %s and company=%s""", (customer, company))
outstanding_based_on_gle = flt(outstanding_based_on_gle[0][0]) if outstanding_based_on_gle else 0
# Outstanding based on Sales Order
outstanding_based_on_so = frappe.db.sql("""
select sum(base_grand_total*(100 - ifnull(per_billed, 0))/100)
from `tabSales Order`
where customer=%s and docstatus = 1 and company=%s
and ifnull(per_billed, 0) < 100 and status != 'Stopped'""", (customer, company))
outstanding_based_on_so = flt(outstanding_based_on_so[0][0]) if outstanding_based_on_so else 0.0
# Outstanding based on Delivery Note
outstanding_based_on_dn = frappe.db.sql("""
select
sum(
(
(ifnull(dn_item.amount, 0) - ifnull((select sum(ifnull(amount, 0))
from `tabSales Invoice Item`
where ifnull(dn_detail, '') = dn_item.name and docstatus = 1), 0)
)/dn.base_net_total
)*dn.base_grand_total
)
from `tabDelivery Note` dn, `tabDelivery Note Item` dn_item
where
dn.name = dn_item.parent and dn.customer=%s and dn.company=%s
and dn.docstatus = 1 and dn.status != 'Stopped'
and ifnull(dn_item.against_sales_order, '') = ''
and ifnull(dn_item.against_sales_invoice, '') = ''
and ifnull(dn_item.amount, 0) > ifnull((select sum(ifnull(amount, 0))
from `tabSales Invoice Item`
where ifnull(dn_detail, '') = dn_item.name and docstatus = 1), 0)""", (customer, company))
outstanding_based_on_dn = flt(outstanding_based_on_dn[0][0]) if outstanding_based_on_dn else 0.0
return outstanding_based_on_gle + outstanding_based_on_so + outstanding_based_on_dn
def get_credit_limit(customer, company):
credit_limit, customer_group = frappe.db.get_value("Customer", customer, ["credit_limit", "customer_group"])
if not credit_limit:
credit_limit = frappe.db.get_value("Customer Group", customer_group, "credit_limit") or \
frappe.db.get_value("Company", company, "credit_limit")
return credit_limit
|
treejames/erpnext
|
erpnext/selling/doctype/customer/customer.py
|
Python
|
agpl-3.0
| 8,704
|
from pkg3 import B
A<caret>
|
siosio/intellij-community
|
python/testData/completion/className/alreadyImportedModulesPreference/alreadyImportedModulesPreference.py
|
Python
|
apache-2.0
| 28
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import codecs
import numpy as np
import re
import itertools
from collections import Counter
import os
# from gensim.models import word2vec
def clean_str(string):
"""
Tokenization/string cleaning for all datasets except for SST.
Original taken from https://github.com/yoonkim/CNN_sentence/blob/master/process_data.py
"""
string = re.sub(r"[^A-Za-z0-9(),!?\'\`]", " ", string)
string = re.sub(r"\'s", " \'s", string)
string = re.sub(r"\'ve", " \'ve", string)
string = re.sub(r"n\'t", " n\'t", string)
string = re.sub(r"\'re", " \'re", string)
string = re.sub(r"\'d", " \'d", string)
string = re.sub(r"\'ll", " \'ll", string)
string = re.sub(r",", " , ", string)
string = re.sub(r"!", " ! ", string)
string = re.sub(r"\(", " \( ", string)
string = re.sub(r"\)", " \) ", string)
string = re.sub(r"\?", " \? ", string)
string = re.sub(r"\s{2,}", " ", string)
return string.strip().lower()
def get_chinese_text():
if not os.path.isdir("data/"):
os.system("mkdir data/")
if (not os.path.exists('data/pos.txt')) or \
(not os.path.exists('data/neg')):
os.system("wget -q https://raw.githubusercontent.com/dmlc/web-data/master/mxnet/example/chinese_text.zip -P data/")
os.chdir("./data")
os.system("unzip -u chinese_text.zip")
os.chdir("..")
def load_data_and_labels():
"""
Loads MR polarity data from files, splits the data into words and generates labels.
Returns split sentences and labels.
"""
# download dataset
get_chinese_text()
# Load data from files
positive_examples = list(codecs.open("./data/pos.txt", "r", "utf-8").readlines())
positive_examples = [s.strip() for s in positive_examples]
positive_examples = [pe for pe in positive_examples if len(pe) < 100]
negative_examples = list(codecs.open("./data/neg.txt", "r", "utf-8").readlines())
negative_examples = [s.strip() for s in negative_examples]
negative_examples = [ne for ne in negative_examples if len(ne) < 100]
# Split by words
x_text = positive_examples + negative_examples
# x_text = [clean_str(sent) for sent in x_text]
x_text = [list(s) for s in x_text]
# Generate labels
positive_labels = [[0, 1] for _ in positive_examples]
negative_labels = [[1, 0] for _ in negative_examples]
y = np.concatenate([positive_labels, negative_labels], 0)
return [x_text, y]
def pad_sentences(sentences, padding_word="</s>"):
"""
Pads all sentences to the same length. The length is defined by the longest sentence.
Returns padded sentences.
"""
sequence_length = max(len(x) for x in sentences)
padded_sentences = []
for i in range(len(sentences)):
sentence = sentences[i]
num_padding = sequence_length - len(sentence)
new_sentence = sentence + [padding_word] * num_padding
padded_sentences.append(new_sentence)
return padded_sentences
def build_vocab(sentences):
"""
Builds a vocabulary mapping from word to index based on the sentences.
Returns vocabulary mapping and inverse vocabulary mapping.
"""
# Build vocabulary
word_counts = Counter(itertools.chain(*sentences))
# Mapping from index to word
vocabulary_inv = [x[0] for x in word_counts.most_common()]
# Mapping from word to index
vocabulary = {x: i for i, x in enumerate(vocabulary_inv)}
return [vocabulary, vocabulary_inv]
def build_input_data(sentences, labels, vocabulary):
"""
Maps sentencs and labels to vectors based on a vocabulary.
"""
x = np.array([[vocabulary[word] for word in sentence] for sentence in sentences])
y = np.array(labels)
return [x, y]
def build_input_data_with_word2vec(sentences, labels, word2vec):
"""Map sentences and labels to vectors based on a pretrained word2vec"""
x_vec = []
for sent in sentences:
vec = []
for word in sent:
if word in word2vec:
vec.append(word2vec[word])
else:
vec.append(word2vec['</s>'])
x_vec.append(vec)
x_vec = np.array(x_vec)
y_vec = np.array(labels)
return [x_vec, y_vec]
def load_data_with_word2vec(word2vec):
"""
Loads and preprocessed data for the MR dataset.
Returns input vectors, labels, vocabulary, and inverse vocabulary.
"""
# Load and preprocess data
sentences, labels = load_data_and_labels()
sentences_padded = pad_sentences(sentences)
# vocabulary, vocabulary_inv = build_vocab(sentences_padded)
return build_input_data_with_word2vec(sentences_padded, labels, word2vec)
def load_data():
"""
Loads and preprocessed data for the MR dataset.
Returns input vectors, labels, vocabulary, and inverse vocabulary.
"""
# Load and preprocess data
sentences, labels = load_data_and_labels()
sentences_padded = pad_sentences(sentences)
vocabulary, vocabulary_inv = build_vocab(sentences_padded)
x, y = build_input_data(sentences_padded, labels, vocabulary)
return [x, y, vocabulary, vocabulary_inv]
def batch_iter(data, batch_size, num_epochs):
"""
Generates a batch iterator for a dataset.
"""
data = np.array(data)
data_size = len(data)
num_batches_per_epoch = int(len(data) / batch_size) + 1
for epoch in range(num_epochs):
# Shuffle the data at each epoch
shuffle_indices = np.random.permutation(np.arange(data_size))
shuffled_data = data[shuffle_indices]
for batch_num in range(num_batches_per_epoch):
start_index = batch_num * batch_size
end_index = min((batch_num + 1) * batch_size, data_size)
yield shuffled_data[start_index:end_index]
def load_pretrained_word2vec(infile):
if isinstance(infile, str):
infile = open(infile)
word2vec = {}
for idx, line in enumerate(infile):
if idx == 0:
vocab_size, dim = line.strip().split()
else:
tks = line.strip().split()
word2vec[tks[0]] = map(float, tks[1:])
return word2vec
def load_google_word2vec(path):
model = word2vec.Word2Vec.load_word2vec_format(path, binary=True)
return model
|
kkk669/mxnet
|
example/cnn_chinese_text_classification/data_helpers.py
|
Python
|
apache-2.0
| 7,021
|
import unittest
import textwrap
import antlr3
import testbase
class T(testbase.ANTLRTest):
def testbug(self):
self.compileGrammar()
cStream = antlr3.StringStream("public fooze")
lexer = self.getLexer(cStream)
tStream = antlr3.CommonTokenStream(lexer)
parser = self.getParser(tStream)
parser.r()
if __name__ == '__main__':
unittest.main()
|
pballand/congress
|
thirdparty/antlr3-antlr-3.5/runtime/Python3/tests/t045dfabug.py
|
Python
|
apache-2.0
| 409
|
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import copy
import json
from ansible import constants as C
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connection
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.module_utils.network.common.utils import load_provider
from ansible.module_utils.network.ironware.ironware import ironware_provider_spec
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
del tmp # tmp no longer has any effect
if self._play_context.connection != 'local':
return dict(
failed=True,
msg='invalid connection specified, expected connection=local, '
'got %s' % self._play_context.connection
)
provider = load_provider(ironware_provider_spec, self._task.args)
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'ironware'
pc.remote_addr = provider['host'] or self._play_context.remote_addr
pc.port = int(provider['port'] or self._play_context.port or 22)
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file
pc.timeout = int(provider['timeout'] or C.PERSISTENT_COMMAND_TIMEOUT)
pc.become = provider['authorize'] or False
if pc.become:
pc.become_method = 'enable'
pc.become_pass = provider['auth_pass']
display.vvv('using connection plugin %s (was local)' % pc.connection, pc.remote_addr)
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
socket_path = connection.run()
display.vvvv('socket_path: %s' % socket_path, pc.remote_addr)
if not socket_path:
return {'failed': True,
'msg': 'unable to open shell. Please see: ' +
'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'}
# make sure we are in the right cli context which should be
# enable mode and not config module
conn = Connection(socket_path)
out = conn.get_prompt()
if to_text(out, errors='surrogate_then_replace').strip().endswith(')#'):
display.vvvv('wrong context, sending end to device', self._play_context.remote_addr)
conn.send_command('end')
task_vars['ansible_socket'] = socket_path
if self._play_context.become_method == 'enable':
self._play_context.become = False
self._play_context.become_method = None
result = super(ActionModule, self).run(task_vars=task_vars)
return result
|
mheap/ansible
|
lib/ansible/plugins/action/ironware.py
|
Python
|
gpl-3.0
| 3,772
|
class SingleAPISpec:
"""
Convenience class used to help DataverseAPILink when making API functions
"""
ATTR_NAMES = ['new_function_name', 'name', 'url_path', 'use_api_key', 'num_id_vals', 'use_param_dict']
URL_PLACEHOLDER = '{{ID_VAL}}'
def __init__(self, spec_list):
if not type(spec_list) in (list,tuple):
raise Exception('Bad spec. Expected list or tuple.\nReceived: %s' % type(spec_list))
num_params = len(spec_list)
if not num_params in (5,6):
raise Exception('Expected 5 or 6 values.\nReceived: %s' % spec_list)
# Lazy way to add attributes
for idx, attr in enumerate(self.ATTR_NAMES):
if (idx) == num_params:
self.__dict__[attr] = None # only 5 params given, param_dict not needed
else:
self.__dict__[attr] = spec_list[idx]
# e.g., 1st iteration is equivalent of "self.new_function_name = spec_list[0]"
def get_code_str(self, dv_link_function_to_call='make_api_get_call'):
"""
Used to create functions within the DataverseAPILink class
"""
if self.use_param_dict is True:
# call_name, url_path, use_api_key=False, id_val=None, params_dict={}
code_str = """
def %s(self, param_dict, *args):
url_path = '%s'
if args:
for val in args:
if not type(val) in (str, unicode):
val = `val`
url_path = url_path.replace('%s', val, 1)
#url_path += '/' + str(id_val)
print 'OK!'
print 'param_dict', param_dict
return self.%s('%s', url_path, %s, None, param_dict)""" \
% (self.new_function_name\
, self.url_path
, SingleAPISpec.URL_PLACEHOLDER
, dv_link_function_to_call
, self.name
, self.use_api_key)
else:
code_str = """
def %s(self, *args):
url_path = '%s'
if args:
for val in args:
if not type(val) in (str, unicode):
val = `val`
url_path = url_path.replace('%s', val, 1)
#url_path += '/' + str(id_val)
return self.%s('%s', url_path, %s)""" \
% (self.new_function_name\
, self.url_path
, SingleAPISpec.URL_PLACEHOLDER
, dv_link_function_to_call
, self.name
, self.use_api_key)
print code_str
return code_str
|
bmckinney/dataverse-canonical
|
scripts/api/py_api_wrapper/single_api_spec.py
|
Python
|
apache-2.0
| 2,768
|
"""Default tags used by the template system, available to all templates."""
import os
import sys
import re
from datetime import datetime
from itertools import groupby, cycle as itertools_cycle
from django.conf import settings
from django.template.base import (Node, NodeList, Template, Library,
TemplateSyntaxError, VariableDoesNotExist, InvalidTemplateLibrary,
BLOCK_TAG_START, BLOCK_TAG_END, VARIABLE_TAG_START, VARIABLE_TAG_END,
SINGLE_BRACE_START, SINGLE_BRACE_END, COMMENT_TAG_START, COMMENT_TAG_END,
VARIABLE_ATTRIBUTE_SEPARATOR, get_library, token_kwargs, kwarg_re)
from django.template.smartif import IfParser, Literal
from django.template.defaultfilters import date
from django.utils.encoding import smart_str, smart_unicode
from django.utils.safestring import mark_safe
from django.utils import timezone
register = Library()
class AutoEscapeControlNode(Node):
"""Implements the actions of the autoescape tag."""
def __init__(self, setting, nodelist):
self.setting, self.nodelist = setting, nodelist
def render(self, context):
old_setting = context.autoescape
context.autoescape = self.setting
output = self.nodelist.render(context)
context.autoescape = old_setting
if self.setting:
return mark_safe(output)
else:
return output
class CommentNode(Node):
def render(self, context):
return ''
class CsrfTokenNode(Node):
def render(self, context):
csrf_token = context.get('csrf_token', None)
if csrf_token:
if csrf_token == 'NOTPROVIDED':
return mark_safe(u"")
else:
return mark_safe(u"<div style='display:none'><input type='hidden' name='csrfmiddlewaretoken' value='%s' /></div>" % csrf_token)
else:
# It's very probable that the token is missing because of
# misconfiguration, so we raise a warning
from django.conf import settings
if settings.DEBUG:
import warnings
warnings.warn("A {% csrf_token %} was used in a template, but the context did not provide the value. This is usually caused by not using RequestContext.")
return u''
class CycleNode(Node):
def __init__(self, cyclevars, variable_name=None, silent=False):
self.cyclevars = cyclevars
self.variable_name = variable_name
self.silent = silent
def render(self, context):
if self not in context.render_context:
# First time the node is rendered in template
context.render_context[self] = itertools_cycle(self.cyclevars)
cycle_iter = context.render_context[self]
value = cycle_iter.next().resolve(context)
if self.variable_name:
context[self.variable_name] = value
if self.silent:
return ''
return value
class DebugNode(Node):
def render(self, context):
from pprint import pformat
output = [pformat(val) for val in context]
output.append('\n\n')
output.append(pformat(sys.modules))
return ''.join(output)
class FilterNode(Node):
def __init__(self, filter_expr, nodelist):
self.filter_expr, self.nodelist = filter_expr, nodelist
def render(self, context):
output = self.nodelist.render(context)
# Apply filters.
context.update({'var': output})
filtered = self.filter_expr.resolve(context)
context.pop()
return filtered
class FirstOfNode(Node):
def __init__(self, vars):
self.vars = vars
def render(self, context):
for var in self.vars:
value = var.resolve(context, True)
if value:
return smart_unicode(value)
return u''
class ForNode(Node):
child_nodelists = ('nodelist_loop', 'nodelist_empty')
def __init__(self, loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty=None):
self.loopvars, self.sequence = loopvars, sequence
self.is_reversed = is_reversed
self.nodelist_loop = nodelist_loop
if nodelist_empty is None:
self.nodelist_empty = NodeList()
else:
self.nodelist_empty = nodelist_empty
def __repr__(self):
reversed_text = self.is_reversed and ' reversed' or ''
return "<For Node: for %s in %s, tail_len: %d%s>" % \
(', '.join(self.loopvars), self.sequence, len(self.nodelist_loop),
reversed_text)
def __iter__(self):
for node in self.nodelist_loop:
yield node
for node in self.nodelist_empty:
yield node
def render(self, context):
if 'forloop' in context:
parentloop = context['forloop']
else:
parentloop = {}
context.push()
try:
values = self.sequence.resolve(context, True)
except VariableDoesNotExist:
values = []
if values is None:
values = []
if not hasattr(values, '__len__'):
values = list(values)
len_values = len(values)
if len_values < 1:
context.pop()
return self.nodelist_empty.render(context)
nodelist = NodeList()
if self.is_reversed:
values = reversed(values)
unpack = len(self.loopvars) > 1
# Create a forloop value in the context. We'll update counters on each
# iteration just below.
loop_dict = context['forloop'] = {'parentloop': parentloop}
for i, item in enumerate(values):
# Shortcuts for current loop iteration number.
loop_dict['counter0'] = i
loop_dict['counter'] = i+1
# Reverse counter iteration numbers.
loop_dict['revcounter'] = len_values - i
loop_dict['revcounter0'] = len_values - i - 1
# Boolean values designating first and last times through loop.
loop_dict['first'] = (i == 0)
loop_dict['last'] = (i == len_values - 1)
pop_context = False
if unpack:
# If there are multiple loop variables, unpack the item into
# them.
try:
unpacked_vars = dict(zip(self.loopvars, item))
except TypeError:
pass
else:
pop_context = True
context.update(unpacked_vars)
else:
context[self.loopvars[0]] = item
# In TEMPLATE_DEBUG mode provide source of the node which
# actually raised the exception
if settings.TEMPLATE_DEBUG:
for node in self.nodelist_loop:
try:
nodelist.append(node.render(context))
except Exception, e:
if not hasattr(e, 'django_template_source'):
e.django_template_source = node.source
raise
else:
for node in self.nodelist_loop:
nodelist.append(node.render(context))
if pop_context:
# The loop variables were pushed on to the context so pop them
# off again. This is necessary because the tag lets the length
# of loopvars differ to the length of each set of items and we
# don't want to leave any vars from the previous loop on the
# context.
context.pop()
context.pop()
return nodelist.render(context)
class IfChangedNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, nodelist_true, nodelist_false, *varlist):
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self._last_seen = None
self._varlist = varlist
self._id = str(id(self))
def render(self, context):
if 'forloop' in context and self._id not in context['forloop']:
self._last_seen = None
context['forloop'][self._id] = 1
try:
if self._varlist:
# Consider multiple parameters. This automatically behaves
# like an OR evaluation of the multiple variables.
compare_to = [var.resolve(context, True) for var in self._varlist]
else:
compare_to = self.nodelist_true.render(context)
except VariableDoesNotExist:
compare_to = None
if compare_to != self._last_seen:
self._last_seen = compare_to
content = self.nodelist_true.render(context)
return content
elif self.nodelist_false:
return self.nodelist_false.render(context)
return ''
class IfEqualNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false')
def __init__(self, var1, var2, nodelist_true, nodelist_false, negate):
self.var1, self.var2 = var1, var2
self.nodelist_true, self.nodelist_false = nodelist_true, nodelist_false
self.negate = negate
def __repr__(self):
return "<IfEqualNode>"
def render(self, context):
val1 = self.var1.resolve(context, True)
val2 = self.var2.resolve(context, True)
if (self.negate and val1 != val2) or (not self.negate and val1 == val2):
return self.nodelist_true.render(context)
return self.nodelist_false.render(context)
class IfNode(Node):
def __init__(self, conditions_nodelists):
self.conditions_nodelists = conditions_nodelists
def __repr__(self):
return "<IfNode>"
def __iter__(self):
for _, nodelist in self.conditions_nodelists:
for node in nodelist:
yield node
@property
def nodelist(self):
return NodeList(node for _, nodelist in self.conditions_nodelists for node in nodelist)
def render(self, context):
for condition, nodelist in self.conditions_nodelists:
if condition is not None: # if / elif clause
try:
match = condition.eval(context)
except VariableDoesNotExist:
match = None
else: # else clause
match = True
if match:
return nodelist.render(context)
return ''
class RegroupNode(Node):
def __init__(self, target, expression, var_name):
self.target, self.expression = target, expression
self.var_name = var_name
def resolve_expression(self, obj, context):
# This method is called for each object in self.target. See regroup()
# for the reason why we temporarily put the object in the context.
context[self.var_name] = obj
return self.expression.resolve(context, True)
def render(self, context):
obj_list = self.target.resolve(context, True)
if obj_list == None:
# target variable wasn't found in context; fail silently.
context[self.var_name] = []
return ''
# List of dictionaries in the format:
# {'grouper': 'key', 'list': [list of contents]}.
context[self.var_name] = [
{'grouper': key, 'list': list(val)}
for key, val in
groupby(obj_list, lambda obj: self.resolve_expression(obj, context))
]
return ''
def include_is_allowed(filepath):
filepath = os.path.abspath(filepath)
for root in settings.ALLOWED_INCLUDE_ROOTS:
if filepath.startswith(root):
return True
return False
class SsiNode(Node):
def __init__(self, filepath, parsed, legacy_filepath=True):
self.filepath = filepath
self.parsed = parsed
self.legacy_filepath = legacy_filepath
def render(self, context):
filepath = self.filepath
if not self.legacy_filepath:
filepath = filepath.resolve(context)
if not include_is_allowed(filepath):
if settings.DEBUG:
return "[Didn't have permission to include file]"
else:
return '' # Fail silently for invalid includes.
try:
fp = open(filepath, 'r')
output = fp.read()
fp.close()
except IOError:
output = ''
if self.parsed:
try:
t = Template(output, name=filepath)
return t.render(context)
except TemplateSyntaxError, e:
if settings.DEBUG:
return "[Included template had syntax error: %s]" % e
else:
return '' # Fail silently for invalid included templates.
return output
class LoadNode(Node):
def render(self, context):
return ''
class NowNode(Node):
def __init__(self, format_string):
self.format_string = format_string
def render(self, context):
tzinfo = timezone.get_current_timezone() if settings.USE_TZ else None
return date(datetime.now(tz=tzinfo), self.format_string)
class SpacelessNode(Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
from django.utils.html import strip_spaces_between_tags
return strip_spaces_between_tags(self.nodelist.render(context).strip())
class TemplateTagNode(Node):
mapping = {'openblock': BLOCK_TAG_START,
'closeblock': BLOCK_TAG_END,
'openvariable': VARIABLE_TAG_START,
'closevariable': VARIABLE_TAG_END,
'openbrace': SINGLE_BRACE_START,
'closebrace': SINGLE_BRACE_END,
'opencomment': COMMENT_TAG_START,
'closecomment': COMMENT_TAG_END,
}
def __init__(self, tagtype):
self.tagtype = tagtype
def render(self, context):
return self.mapping.get(self.tagtype, '')
class URLNode(Node):
def __init__(self, view_name, args, kwargs, asvar, legacy_view_name=True):
self.view_name = view_name
self.legacy_view_name = legacy_view_name
self.args = args
self.kwargs = kwargs
self.asvar = asvar
def render(self, context):
from django.core.urlresolvers import reverse, NoReverseMatch
args = [arg.resolve(context) for arg in self.args]
kwargs = dict([(smart_str(k, 'ascii'), v.resolve(context))
for k, v in self.kwargs.items()])
view_name = self.view_name
if not self.legacy_view_name:
view_name = view_name.resolve(context)
# Try to look up the URL twice: once given the view name, and again
# relative to what we guess is the "main" app. If they both fail,
# re-raise the NoReverseMatch unless we're using the
# {% url ... as var %} construct in which cause return nothing.
url = ''
try:
url = reverse(view_name, args=args, kwargs=kwargs, current_app=context.current_app)
except NoReverseMatch, e:
if settings.SETTINGS_MODULE:
project_name = settings.SETTINGS_MODULE.split('.')[0]
try:
url = reverse(project_name + '.' + view_name,
args=args, kwargs=kwargs,
current_app=context.current_app)
except NoReverseMatch:
if self.asvar is None:
# Re-raise the original exception, not the one with
# the path relative to the project. This makes a
# better error message.
raise e
else:
if self.asvar is None:
raise e
if self.asvar:
context[self.asvar] = url
return ''
else:
return url
class WidthRatioNode(Node):
def __init__(self, val_expr, max_expr, max_width):
self.val_expr = val_expr
self.max_expr = max_expr
self.max_width = max_width
def render(self, context):
try:
value = self.val_expr.resolve(context)
max_value = self.max_expr.resolve(context)
max_width = int(self.max_width.resolve(context))
except VariableDoesNotExist:
return ''
except ValueError:
raise TemplateSyntaxError("widthratio final argument must be an number")
try:
value = float(value)
max_value = float(max_value)
ratio = (value / max_value) * max_width
except ZeroDivisionError:
return '0'
except ValueError:
return ''
return str(int(round(ratio)))
class WithNode(Node):
def __init__(self, var, name, nodelist, extra_context=None):
self.nodelist = nodelist
# var and name are legacy attributes, being left in case they are used
# by third-party subclasses of this Node.
self.extra_context = extra_context or {}
if name:
self.extra_context[name] = var
def __repr__(self):
return "<WithNode>"
def render(self, context):
values = dict([(key, val.resolve(context)) for key, val in
self.extra_context.iteritems()])
context.update(values)
output = self.nodelist.render(context)
context.pop()
return output
@register.tag
def autoescape(parser, token):
"""
Force autoescape behavior for this block.
"""
args = token.contents.split()
if len(args) != 2:
raise TemplateSyntaxError("'autoescape' tag requires exactly one argument.")
arg = args[1]
if arg not in (u'on', u'off'):
raise TemplateSyntaxError("'autoescape' argument should be 'on' or 'off'")
nodelist = parser.parse(('endautoescape',))
parser.delete_first_token()
return AutoEscapeControlNode((arg == 'on'), nodelist)
@register.tag
def comment(parser, token):
"""
Ignores everything between ``{% comment %}`` and ``{% endcomment %}``.
"""
parser.skip_past('endcomment')
return CommentNode()
@register.tag
def cycle(parser, token):
"""
Cycles among the given strings each time this tag is encountered.
Within a loop, cycles among the given strings each time through
the loop::
{% for o in some_list %}
<tr class="{% cycle 'row1' 'row2' %}">
...
</tr>
{% endfor %}
Outside of a loop, give the values a unique name the first time you call
it, then use that name each sucessive time through::
<tr class="{% cycle 'row1' 'row2' 'row3' as rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr>
You can use any number of values, separated by spaces. Commas can also
be used to separate values; if a comma is used, the cycle values are
interpreted as literal strings.
The optional flag "silent" can be used to prevent the cycle declaration
from returning any value::
{% cycle 'row1' 'row2' as rowcolors silent %}{# no value here #}
{% for o in some_list %}
<tr class="{% cycle rowcolors %}">{# first value will be "row1" #}
...
</tr>
{% endfor %}
"""
# Note: This returns the exact same node on each {% cycle name %} call;
# that is, the node object returned from {% cycle a b c as name %} and the
# one returned from {% cycle name %} are the exact same object. This
# shouldn't cause problems (heh), but if it does, now you know.
#
# Ugly hack warning: This stuffs the named template dict into parser so
# that names are only unique within each template (as opposed to using
# a global variable, which would make cycle names have to be unique across
# *all* templates.
args = token.split_contents()
if len(args) < 2:
raise TemplateSyntaxError("'cycle' tag requires at least two arguments")
if ',' in args[1]:
# Backwards compatibility: {% cycle a,b %} or {% cycle a,b as foo %}
# case.
args[1:2] = ['"%s"' % arg for arg in args[1].split(",")]
if len(args) == 2:
# {% cycle foo %} case.
name = args[1]
if not hasattr(parser, '_namedCycleNodes'):
raise TemplateSyntaxError("No named cycles in template. '%s' is not defined" % name)
if not name in parser._namedCycleNodes:
raise TemplateSyntaxError("Named cycle '%s' does not exist" % name)
return parser._namedCycleNodes[name]
as_form = False
if len(args) > 4:
# {% cycle ... as foo [silent] %} case.
if args[-3] == "as":
if args[-1] != "silent":
raise TemplateSyntaxError("Only 'silent' flag is allowed after cycle's name, not '%s'." % args[-1])
as_form = True
silent = True
args = args[:-1]
elif args[-2] == "as":
as_form = True
silent = False
if as_form:
name = args[-1]
values = [parser.compile_filter(arg) for arg in args[1:-2]]
node = CycleNode(values, name, silent=silent)
if not hasattr(parser, '_namedCycleNodes'):
parser._namedCycleNodes = {}
parser._namedCycleNodes[name] = node
else:
values = [parser.compile_filter(arg) for arg in args[1:]]
node = CycleNode(values)
return node
@register.tag
def csrf_token(parser, token):
return CsrfTokenNode()
@register.tag
def debug(parser, token):
"""
Outputs a whole load of debugging information, including the current
context and imported modules.
Sample usage::
<pre>
{% debug %}
</pre>
"""
return DebugNode()
@register.tag('filter')
def do_filter(parser, token):
"""
Filters the contents of the block through variable filters.
Filters can also be piped through each other, and they can have
arguments -- just like in variable syntax.
Sample usage::
{% filter force_escape|lower %}
This text will be HTML-escaped, and will appear in lowercase.
{% endfilter %}
Note that the ``escape`` and ``safe`` filters are not acceptable arguments.
Instead, use the ``autoescape`` tag to manage autoescaping for blocks of
template code.
"""
_, rest = token.contents.split(None, 1)
filter_expr = parser.compile_filter("var|%s" % (rest))
for func, unused in filter_expr.filters:
if getattr(func, '_decorated_function', func).__name__ in ('escape', 'safe'):
raise TemplateSyntaxError('"filter %s" is not permitted. Use the "autoescape" tag instead.' % func.__name__)
nodelist = parser.parse(('endfilter',))
parser.delete_first_token()
return FilterNode(filter_expr, nodelist)
@register.tag
def firstof(parser, token):
"""
Outputs the first variable passed that is not False, without escaping.
Outputs nothing if all the passed variables are False.
Sample usage::
{% firstof var1 var2 var3 %}
This is equivalent to::
{% if var1 %}
{{ var1|safe }}
{% else %}{% if var2 %}
{{ var2|safe }}
{% else %}{% if var3 %}
{{ var3|safe }}
{% endif %}{% endif %}{% endif %}
but obviously much cleaner!
You can also use a literal string as a fallback value in case all
passed variables are False::
{% firstof var1 var2 var3 "fallback value" %}
If you want to escape the output, use a filter tag::
{% filter force_escape %}
{% firstof var1 var2 var3 "fallback value" %}
{% endfilter %}
"""
bits = token.split_contents()[1:]
if len(bits) < 1:
raise TemplateSyntaxError("'firstof' statement requires at least one argument")
return FirstOfNode([parser.compile_filter(bit) for bit in bits])
@register.tag('for')
def do_for(parser, token):
"""
Loops over each item in an array.
For example, to display a list of athletes given ``athlete_list``::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
</ul>
You can loop over a list in reverse by using
``{% for obj in list reversed %}``.
You can also unpack multiple values from a two-dimensional array::
{% for key,value in dict.items %}
{{ key }}: {{ value }}
{% endfor %}
The ``for`` tag can take an optional ``{% empty %}`` clause that will
be displayed if the given array is empty or could not be found::
<ul>
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% empty %}
<li>Sorry, no athletes in this list.</li>
{% endfor %}
<ul>
The above is equivalent to -- but shorter, cleaner, and possibly faster
than -- the following::
<ul>
{% if althete_list %}
{% for athlete in athlete_list %}
<li>{{ athlete.name }}</li>
{% endfor %}
{% else %}
<li>Sorry, no athletes in this list.</li>
{% endif %}
</ul>
The for loop sets a number of variables available within the loop:
========================== ================================================
Variable Description
========================== ================================================
``forloop.counter`` The current iteration of the loop (1-indexed)
``forloop.counter0`` The current iteration of the loop (0-indexed)
``forloop.revcounter`` The number of iterations from the end of the
loop (1-indexed)
``forloop.revcounter0`` The number of iterations from the end of the
loop (0-indexed)
``forloop.first`` True if this is the first time through the loop
``forloop.last`` True if this is the last time through the loop
``forloop.parentloop`` For nested loops, this is the loop "above" the
current one
========================== ================================================
"""
bits = token.contents.split()
if len(bits) < 4:
raise TemplateSyntaxError("'for' statements should have at least four"
" words: %s" % token.contents)
is_reversed = bits[-1] == 'reversed'
in_index = is_reversed and -3 or -2
if bits[in_index] != 'in':
raise TemplateSyntaxError("'for' statements should use the format"
" 'for x in y': %s" % token.contents)
loopvars = re.split(r' *, *', ' '.join(bits[1:in_index]))
for var in loopvars:
if not var or ' ' in var:
raise TemplateSyntaxError("'for' tag received an invalid argument:"
" %s" % token.contents)
sequence = parser.compile_filter(bits[in_index+1])
nodelist_loop = parser.parse(('empty', 'endfor',))
token = parser.next_token()
if token.contents == 'empty':
nodelist_empty = parser.parse(('endfor',))
parser.delete_first_token()
else:
nodelist_empty = None
return ForNode(loopvars, sequence, is_reversed, nodelist_loop, nodelist_empty)
def do_ifequal(parser, token, negate):
bits = list(token.split_contents())
if len(bits) != 3:
raise TemplateSyntaxError("%r takes two arguments" % bits[0])
end_tag = 'end' + bits[0]
nodelist_true = parser.parse(('else', end_tag))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse((end_tag,))
parser.delete_first_token()
else:
nodelist_false = NodeList()
val1 = parser.compile_filter(bits[1])
val2 = parser.compile_filter(bits[2])
return IfEqualNode(val1, val2, nodelist_true, nodelist_false, negate)
@register.tag
def ifequal(parser, token):
"""
Outputs the contents of the block if the two arguments equal each other.
Examples::
{% ifequal user.id comment.user_id %}
...
{% endifequal %}
{% ifnotequal user.id comment.user_id %}
...
{% else %}
...
{% endifnotequal %}
"""
return do_ifequal(parser, token, False)
@register.tag
def ifnotequal(parser, token):
"""
Outputs the contents of the block if the two arguments are not equal.
See ifequal.
"""
return do_ifequal(parser, token, True)
class TemplateLiteral(Literal):
def __init__(self, value, text):
self.value = value
self.text = text # for better error messages
def display(self):
return self.text
def eval(self, context):
return self.value.resolve(context, ignore_failures=True)
class TemplateIfParser(IfParser):
error_class = TemplateSyntaxError
def __init__(self, parser, *args, **kwargs):
self.template_parser = parser
super(TemplateIfParser, self).__init__(*args, **kwargs)
def create_var(self, value):
return TemplateLiteral(self.template_parser.compile_filter(value), value)
@register.tag('if')
def do_if(parser, token):
"""
The ``{% if %}`` tag evaluates a variable, and if that variable is "true"
(i.e., exists, is not empty, and is not a false boolean value), the
contents of the block are output:
::
{% if athlete_list %}
Number of athletes: {{ athlete_list|count }}
{% elif athlete_in_locker_room_list %}
Athletes should be out of the locker room soon!
{% else %}
No athletes.
{% endif %}
In the above, if ``athlete_list`` is not empty, the number of athletes will
be displayed by the ``{{ athlete_list|count }}`` variable.
As you can see, the ``if`` tag may take one or several `` {% elif %}``
clauses, as well as an ``{% else %}`` clause that will be displayed if all
previous conditions fail. These clauses are optional.
``if`` tags may use ``or``, ``and`` or ``not`` to test a number of
variables or to negate a given variable::
{% if not athlete_list %}
There are no athletes.
{% endif %}
{% if athlete_list or coach_list %}
There are some athletes or some coaches.
{% endif %}
{% if athlete_list and coach_list %}
Both atheletes and coaches are available.
{% endif %}
{% if not athlete_list or coach_list %}
There are no athletes, or there are some coaches.
{% endif %}
{% if athlete_list and not coach_list %}
There are some athletes and absolutely no coaches.
{% endif %}
Comparison operators are also available, and the use of filters is also
allowed, for example::
{% if articles|length >= 5 %}...{% endif %}
Arguments and operators _must_ have a space between them, so
``{% if 1>2 %}`` is not a valid if tag.
All supported operators are: ``or``, ``and``, ``in``, ``not in``
``==`` (or ``=``), ``!=``, ``>``, ``>=``, ``<`` and ``<=``.
Operator precedence follows Python.
"""
# {% if ... %}
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists = [(condition, nodelist)]
token = parser.next_token()
# {% elif ... %} (repeatable)
while token.contents.startswith('elif'):
bits = token.split_contents()[1:]
condition = TemplateIfParser(parser, bits).parse()
nodelist = parser.parse(('elif', 'else', 'endif'))
conditions_nodelists.append((condition, nodelist))
token = parser.next_token()
# {% else %} (optional)
if token.contents == 'else':
nodelist = parser.parse(('endif',))
conditions_nodelists.append((None, nodelist))
token = parser.next_token()
# {% endif %}
assert token.contents == 'endif'
return IfNode(conditions_nodelists)
@register.tag
def ifchanged(parser, token):
"""
Checks if a value has changed from the last iteration of a loop.
The ``{% ifchanged %}`` block tag is used within a loop. It has two
possible uses.
1. Checks its own rendered contents against its previous state and only
displays the content if it has changed. For example, this displays a
list of days, only displaying the month if it changes::
<h1>Archive for {{ year }}</h1>
{% for date in days %}
{% ifchanged %}<h3>{{ date|date:"F" }}</h3>{% endifchanged %}
<a href="{{ date|date:"M/d"|lower }}/">{{ date|date:"j" }}</a>
{% endfor %}
2. If given one or more variables, check whether any variable has changed.
For example, the following shows the date every time it changes, while
showing the hour if either the hour or the date has changed::
{% for date in days %}
{% ifchanged date.date %} {{ date.date }} {% endifchanged %}
{% ifchanged date.hour date.date %}
{{ date.hour }}
{% endifchanged %}
{% endfor %}
"""
bits = token.contents.split()
nodelist_true = parser.parse(('else', 'endifchanged'))
token = parser.next_token()
if token.contents == 'else':
nodelist_false = parser.parse(('endifchanged',))
parser.delete_first_token()
else:
nodelist_false = NodeList()
values = [parser.compile_filter(bit) for bit in bits[1:]]
return IfChangedNode(nodelist_true, nodelist_false, *values)
@register.tag
def ssi(parser, token):
"""
Outputs the contents of a given file into the page.
Like a simple "include" tag, the ``ssi`` tag includes the contents
of another file -- which must be specified using an absolute path --
in the current page::
{% ssi /home/html/ljworld.com/includes/right_generic.html %}
If the optional "parsed" parameter is given, the contents of the included
file are evaluated as template code, with the current context::
{% ssi /home/html/ljworld.com/includes/right_generic.html parsed %}
"""
import warnings
warnings.warn('The syntax for the ssi template tag is changing. Load the `ssi` tag from the `future` tag library to start using the new behavior.',
category=DeprecationWarning)
bits = token.contents.split()
parsed = False
if len(bits) not in (2, 3):
raise TemplateSyntaxError("'ssi' tag takes one argument: the path to"
" the file to be included")
if len(bits) == 3:
if bits[2] == 'parsed':
parsed = True
else:
raise TemplateSyntaxError("Second (optional) argument to %s tag"
" must be 'parsed'" % bits[0])
return SsiNode(bits[1], parsed, legacy_filepath=True)
@register.tag
def load(parser, token):
"""
Loads a custom template tag set.
For example, to load the template tags in
``django/templatetags/news/photos.py``::
{% load news.photos %}
Can also be used to load an individual tag/filter from
a library::
{% load byline from news %}
"""
bits = token.contents.split()
if len(bits) >= 4 and bits[-2] == "from":
try:
taglib = bits[-1]
lib = get_library(taglib)
except InvalidTemplateLibrary, e:
raise TemplateSyntaxError("'%s' is not a valid tag library: %s" %
(taglib, e))
else:
temp_lib = Library()
for name in bits[1:-2]:
if name in lib.tags:
temp_lib.tags[name] = lib.tags[name]
# a name could be a tag *and* a filter, so check for both
if name in lib.filters:
temp_lib.filters[name] = lib.filters[name]
elif name in lib.filters:
temp_lib.filters[name] = lib.filters[name]
else:
raise TemplateSyntaxError("'%s' is not a valid tag or filter in tag library '%s'" %
(name, taglib))
parser.add_library(temp_lib)
else:
for taglib in bits[1:]:
# add the library to the parser
try:
lib = get_library(taglib)
parser.add_library(lib)
except InvalidTemplateLibrary, e:
raise TemplateSyntaxError("'%s' is not a valid tag library: %s" %
(taglib, e))
return LoadNode()
@register.tag
def now(parser, token):
"""
Displays the date, formatted according to the given string.
Uses the same format as PHP's ``date()`` function; see http://php.net/date
for all the possible values.
Sample usage::
It is {% now "jS F Y H:i" %}
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'now' statement takes one argument")
format_string = bits[1][1:-1]
return NowNode(format_string)
@register.tag
def regroup(parser, token):
"""
Regroups a list of alike objects by a common attribute.
This complex tag is best illustrated by use of an example: say that
``people`` is a list of ``Person`` objects that have ``first_name``,
``last_name``, and ``gender`` attributes, and you'd like to display a list
that looks like:
* Male:
* George Bush
* Bill Clinton
* Female:
* Margaret Thatcher
* Colendeeza Rice
* Unknown:
* Pat Smith
The following snippet of template code would accomplish this dubious task::
{% regroup people by gender as grouped %}
<ul>
{% for group in grouped %}
<li>{{ group.grouper }}
<ul>
{% for item in group.list %}
<li>{{ item }}</li>
{% endfor %}
</ul>
{% endfor %}
</ul>
As you can see, ``{% regroup %}`` populates a variable with a list of
objects with ``grouper`` and ``list`` attributes. ``grouper`` contains the
item that was grouped by; ``list`` contains the list of objects that share
that ``grouper``. In this case, ``grouper`` would be ``Male``, ``Female``
and ``Unknown``, and ``list`` is the list of people with those genders.
Note that ``{% regroup %}`` does not work when the list to be grouped is not
sorted by the key you are grouping by! This means that if your list of
people was not sorted by gender, you'd need to make sure it is sorted
before using it, i.e.::
{% regroup people|dictsort:"gender" by gender as grouped %}
"""
firstbits = token.contents.split(None, 3)
if len(firstbits) != 4:
raise TemplateSyntaxError("'regroup' tag takes five arguments")
target = parser.compile_filter(firstbits[1])
if firstbits[2] != 'by':
raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'")
lastbits_reversed = firstbits[3][::-1].split(None, 2)
if lastbits_reversed[1][::-1] != 'as':
raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must"
" be 'as'")
var_name = lastbits_reversed[0][::-1]
# RegroupNode will take each item in 'target', put it in the context under
# 'var_name', evaluate 'var_name'.'expression' in the current context, and
# group by the resulting value. After all items are processed, it will
# save the final result in the context under 'var_name', thus clearing the
# temporary values. This hack is necessary because the template engine
# doesn't provide a context-aware equivalent of Python's getattr.
expression = parser.compile_filter(var_name +
VARIABLE_ATTRIBUTE_SEPARATOR +
lastbits_reversed[2][::-1])
return RegroupNode(target, expression, var_name)
@register.tag
def spaceless(parser, token):
"""
Removes whitespace between HTML tags, including tab and newline characters.
Example usage::
{% spaceless %}
<p>
<a href="foo/">Foo</a>
</p>
{% endspaceless %}
This example would return this HTML::
<p><a href="foo/">Foo</a></p>
Only space between *tags* is normalized -- not space between tags and text.
In this example, the space around ``Hello`` won't be stripped::
{% spaceless %}
<strong>
Hello
</strong>
{% endspaceless %}
"""
nodelist = parser.parse(('endspaceless',))
parser.delete_first_token()
return SpacelessNode(nodelist)
@register.tag
def templatetag(parser, token):
"""
Outputs one of the bits used to compose template tags.
Since the template system has no concept of "escaping", to display one of
the bits used in template tags, you must use the ``{% templatetag %}`` tag.
The argument tells which template bit to output:
================== =======
Argument Outputs
================== =======
``openblock`` ``{%``
``closeblock`` ``%}``
``openvariable`` ``{{``
``closevariable`` ``}}``
``openbrace`` ``{``
``closebrace`` ``}``
``opencomment`` ``{#``
``closecomment`` ``#}``
================== =======
"""
bits = token.contents.split()
if len(bits) != 2:
raise TemplateSyntaxError("'templatetag' statement takes one argument")
tag = bits[1]
if tag not in TemplateTagNode.mapping:
raise TemplateSyntaxError("Invalid templatetag argument: '%s'."
" Must be one of: %s" %
(tag, TemplateTagNode.mapping.keys()))
return TemplateTagNode(tag)
@register.tag
def url(parser, token):
"""
Returns an absolute URL matching given view with its parameters.
This is a way to define links that aren't tied to a particular URL
configuration::
{% url path.to.some_view arg1 arg2 %}
or
{% url path.to.some_view name1=value1 name2=value2 %}
The first argument is a path to a view. It can be an absolute python path
or just ``app_name.view_name`` without the project name if the view is
located inside the project. Other arguments are comma-separated values
that will be filled in place of positional and keyword arguments in the
URL. All arguments for the URL should be present.
For example if you have a view ``app_name.client`` taking client's id and
the corresponding line in a URLconf looks like this::
('^client/(\d+)/$', 'app_name.client')
and this app's URLconf is included into the project's URLconf under some
path::
('^clients/', include('project_name.app_name.urls'))
then in a template you can create a link for a certain client like this::
{% url app_name.client client.id %}
The URL will look like ``/clients/client/123/``.
"""
import warnings
warnings.warn('The syntax for the url template tag is changing. Load the `url` tag from the `future` tag library to start using the new behavior.',
category=DeprecationWarning)
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one argument"
" (path to a view)" % bits[0])
viewname = bits[1]
args = []
kwargs = {}
asvar = None
bits = bits[2:]
if len(bits) >= 2 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
# Backwards compatibility: check for the old comma separated format
# {% url urlname arg1,arg2 %}
# Initial check - that the first space separated bit has a comma in it
if bits and ',' in bits[0]:
check_old_format = True
# In order to *really* be old format, there must be a comma
# in *every* space separated bit, except the last.
for bit in bits[1:-1]:
if ',' not in bit:
# No comma in this bit. Either the comma we found
# in bit 1 was a false positive (e.g., comma in a string),
# or there is a syntax problem with missing commas
check_old_format = False
break
else:
# No comma found - must be new format.
check_old_format = False
if check_old_format:
# Confirm that this is old format by trying to parse the first
# argument. An exception will be raised if the comma is
# unexpected (i.e. outside of a static string).
match = kwarg_re.match(bits[0])
if match:
value = match.groups()[1]
try:
parser.compile_filter(value)
except TemplateSyntaxError:
bits = ''.join(bits).split(',')
# Now all the bits are parsed into new format,
# process them as template vars
if len(bits):
for bit in bits:
match = kwarg_re.match(bit)
if not match:
raise TemplateSyntaxError("Malformed arguments to url tag")
name, value = match.groups()
if name:
kwargs[name] = parser.compile_filter(value)
else:
args.append(parser.compile_filter(value))
return URLNode(viewname, args, kwargs, asvar, legacy_view_name=True)
@register.tag
def widthratio(parser, token):
"""
For creating bar charts and such, this tag calculates the ratio of a given
value to a maximum value, and then applies that ratio to a constant.
For example::
<img src='bar.gif' height='10' width='{% widthratio this_value max_value 100 %}' />
Above, if ``this_value`` is 175 and ``max_value`` is 200, the image in
the above example will be 88 pixels wide (because 175/200 = .875;
.875 * 100 = 87.5 which is rounded up to 88).
"""
bits = token.contents.split()
if len(bits) != 4:
raise TemplateSyntaxError("widthratio takes three arguments")
tag, this_value_expr, max_value_expr, max_width = bits
return WidthRatioNode(parser.compile_filter(this_value_expr),
parser.compile_filter(max_value_expr),
parser.compile_filter(max_width))
@register.tag('with')
def do_with(parser, token):
"""
Adds one or more values to the context (inside of this block) for caching
and easy access.
For example::
{% with total=person.some_sql_method %}
{{ total }} object{{ total|pluralize }}
{% endwith %}
Multiple values can be added to the context::
{% with foo=1 bar=2 %}
...
{% endwith %}
The legacy format of ``{% with person.some_sql_method as total %}`` is
still accepted.
"""
bits = token.split_contents()
remaining_bits = bits[1:]
extra_context = token_kwargs(remaining_bits, parser, support_legacy=True)
if not extra_context:
raise TemplateSyntaxError("%r expected at least one variable "
"assignment" % bits[0])
if remaining_bits:
raise TemplateSyntaxError("%r received an invalid token: %r" %
(bits[0], remaining_bits[0]))
nodelist = parser.parse(('endwith',))
parser.delete_first_token()
return WithNode(None, None, nodelist, extra_context=extra_context)
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/django-1.4/django/template/defaulttags.py
|
Python
|
bsd-3-clause
| 48,056
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import add_days, getdate, cint
from frappe import throw, _
from erpnext.utilities.transaction_base import TransactionBase, delete_events
from erpnext.stock.utils import get_valid_serial_nos
class MaintenanceSchedule(TransactionBase):
def get_item_details(self, item_code):
item = frappe.db.sql("""select item_name, description from `tabItem`
where name=%s""", (item_code), as_dict=1)
ret = {
'item_name': item and item[0]['item_name'] or '',
'description' : item and item[0]['description'] or ''
}
return ret
def generate_schedule(self):
self.set('maintenance_schedule_detail', [])
frappe.db.sql("""delete from `tabMaintenance Schedule Detail`
where parent=%s""", (self.name))
count = 1
for d in self.get('item_maintenance_detail'):
self.validate_maintenance_detail()
s_list = []
s_list = self.create_schedule_list(d.start_date, d.end_date, d.no_of_visits, d.sales_person)
for i in range(d.no_of_visits):
child = self.append('maintenance_schedule_detail')
child.item_code = d.item_code
child.item_name = d.item_name
child.scheduled_date = s_list[i].strftime('%Y-%m-%d')
if d.serial_no:
child.serial_no = d.serial_no
child.idx = count
count = count + 1
child.sales_person = d.sales_person
self.save()
def on_submit(self):
if not self.get('maintenance_schedule_detail'):
throw(_("Please click on 'Generate Schedule' to get schedule"))
self.check_serial_no_added()
self.validate_schedule()
email_map = {}
for d in self.get('item_maintenance_detail'):
if d.serial_no:
serial_nos = get_valid_serial_nos(d.serial_no)
self.validate_serial_no(serial_nos, d.start_date)
self.update_amc_date(serial_nos, d.end_date)
if d.sales_person not in email_map:
sp = frappe.get_doc("Sales Person", d.sales_person)
email_map[d.sales_person] = sp.get_email_id()
scheduled_date = frappe.db.sql("""select scheduled_date from
`tabMaintenance Schedule Detail` where sales_person=%s and item_code=%s and
parent=%s""", (d.sales_person, d.item_code, self.name), as_dict=1)
for key in scheduled_date:
if email_map[d.sales_person]:
description = "Reference: %s, Item Code: %s and Customer: %s" % \
(self.name, d.item_code, self.customer)
frappe.get_doc({
"doctype": "Event",
"owner": email_map[d.sales_person] or self.owner,
"subject": description,
"description": description,
"starts_on": key["scheduled_date"] + " 10:00:00",
"event_type": "Private",
"ref_type": self.doctype,
"ref_name": self.name
}).insert(ignore_permissions=1)
frappe.db.set(self, 'status', 'Submitted')
def create_schedule_list(self, start_date, end_date, no_of_visit, sales_person):
schedule_list = []
start_date_copy = start_date
date_diff = (getdate(end_date) - getdate(start_date)).days
add_by = date_diff / no_of_visit
for visit in range(cint(no_of_visit)):
if (getdate(start_date_copy) < getdate(end_date)):
start_date_copy = add_days(start_date_copy, add_by)
if len(schedule_list) < no_of_visit:
schedule_date = self.validate_schedule_date_for_holiday_list(getdate(start_date_copy),
sales_person)
if schedule_date > getdate(end_date):
schedule_date = getdate(end_date)
schedule_list.append(schedule_date)
return schedule_list
def validate_schedule_date_for_holiday_list(self, schedule_date, sales_person):
from erpnext.accounts.utils import get_fiscal_year
validated = False
fy_details = ""
try:
fy_details = get_fiscal_year(date=schedule_date, verbose=0)
except Exception:
pass
if fy_details and fy_details[0]:
# check holiday list in employee master
holiday_list = frappe.db.sql_list("""select h.holiday_date from `tabEmployee` emp,
`tabSales Person` sp, `tabHoliday` h, `tabHoliday List` hl
where sp.name=%s and emp.name=sp.employee
and hl.name=emp.holiday_list and
h.parent=hl.name and
hl.fiscal_year=%s""", (sales_person, fy_details[0]))
if not holiday_list:
# check global holiday list
holiday_list = frappe.db.sql("""select h.holiday_date from
`tabHoliday` h, `tabHoliday List` hl
where h.parent=hl.name and ifnull(hl.is_default, 0) = 1
and hl.fiscal_year=%s""", fy_details[0])
if not validated and holiday_list:
if schedule_date in holiday_list:
schedule_date = add_days(schedule_date, -1)
else:
validated = True
return schedule_date
def validate_dates_with_periodicity(self):
for d in self.get("item_maintenance_detail"):
if d.start_date and d.end_date and d.periodicity and d.periodicity!="Random":
date_diff = (getdate(d.end_date) - getdate(d.start_date)).days + 1
days_in_period = {
"Weekly": 7,
"Monthly": 30,
"Quarterly": 90,
"Half Yearly": 180,
"Yearly": 365
}
if date_diff < days_in_period[d.periodicity]:
throw(_("Row {0}: To set {1} periodicity, difference between from and to date \
must be greater than or equal to {2}")
.format(d.idx, d.periodicity, days_in_period[d.periodicity]))
def validate_maintenance_detail(self):
if not self.get('item_maintenance_detail'):
throw(_("Please enter Maintaince Details first"))
for d in self.get('item_maintenance_detail'):
if not d.item_code:
throw(_("Please select item code"))
elif not d.start_date or not d.end_date:
throw(_("Please select Start Date and End Date for Item {0}".format(d.item_code)))
elif not d.no_of_visits:
throw(_("Please mention no of visits required"))
elif not d.sales_person:
throw(_("Please select Incharge Person's name"))
if getdate(d.start_date) >= getdate(d.end_date):
throw(_("Start date should be less than end date for Item {0}").format(d.item_code))
def validate_sales_order(self):
for d in self.get('item_maintenance_detail'):
if d.prevdoc_docname:
chk = frappe.db.sql("""select ms.name from `tabMaintenance Schedule` ms,
`tabMaintenance Schedule Item` msi where msi.parent=ms.name and
msi.prevdoc_docname=%s and ms.docstatus=1""", d.prevdoc_docname)
if chk:
throw(_("Maintenance Schedule {0} exists against {0}").format(chk[0][0], d.prevdoc_docname))
def validate(self):
self.validate_maintenance_detail()
self.validate_dates_with_periodicity()
self.validate_sales_order()
def on_update(self):
frappe.db.set(self, 'status', 'Draft')
def update_amc_date(self, serial_nos, amc_expiry_date=None):
for serial_no in serial_nos:
serial_no_doc = frappe.get_doc("Serial No", serial_no)
serial_no_doc.amc_expiry_date = amc_expiry_date
serial_no_doc.save()
def validate_serial_no(self, serial_nos, amc_start_date):
for serial_no in serial_nos:
sr_details = frappe.db.get_value("Serial No", serial_no,
["warranty_expiry_date", "amc_expiry_date", "status", "delivery_date"], as_dict=1)
if not sr_details:
frappe.throw(_("Serial No {0} not found").format(serial_no))
if sr_details.warranty_expiry_date and sr_details.warranty_expiry_date>=amc_start_date:
throw(_("Serial No {0} is under warranty upto {1}").format(serial_no, sr_details.warranty_expiry_date))
if sr_details.amc_expiry_date and sr_details.amc_expiry_date >= amc_start_date:
throw(_("Serial No {0} is under maintenance contract upto {1}").format(serial_no, sr_details.amc_start_date))
if sr_details.status=="Delivered" and sr_details.delivery_date and \
sr_details.delivery_date >= amc_start_date:
throw(_("Maintenance start date can not be before delivery date for Serial No {0}").format(serial_no))
def validate_schedule(self):
item_lst1 =[]
item_lst2 =[]
for d in self.get('item_maintenance_detail'):
if d.item_code not in item_lst1:
item_lst1.append(d.item_code)
for m in self.get('maintenance_schedule_detail'):
if m.item_code not in item_lst2:
item_lst2.append(m.item_code)
if len(item_lst1) != len(item_lst2):
throw(_("Maintenance Schedule is not generated for all the items. Please click on 'Generate Schedule'"))
else:
for x in item_lst1:
if x not in item_lst2:
throw(_("Please click on 'Generate Schedule'"))
def check_serial_no_added(self):
serial_present =[]
for d in self.get('item_maintenance_detail'):
if d.serial_no:
serial_present.append(d.item_code)
for m in self.get('maintenance_schedule_detail'):
if serial_present:
if m.item_code in serial_present and not m.serial_no:
throw(_("Please click on 'Generate Schedule' to fetch Serial No added for Item {0}").format(m.item_code))
def on_cancel(self):
for d in self.get('item_maintenance_detail'):
if d.serial_no:
serial_nos = get_valid_serial_nos(d.serial_no)
self.update_amc_date(serial_nos)
frappe.db.set(self, 'status', 'Cancelled')
delete_events(self.doctype, self.name)
def on_trash(self):
delete_events(self.doctype, self.name)
@frappe.whitelist()
def make_maintenance_visit(source_name, target_doc=None):
from frappe.model.mapper import get_mapped_doc
def update_status(source, target, parent):
target.maintenance_type = "Scheduled"
doclist = get_mapped_doc("Maintenance Schedule", source_name, {
"Maintenance Schedule": {
"doctype": "Maintenance Visit",
"field_map": {
"name": "maintenance_schedule"
},
"validation": {
"docstatus": ["=", 1]
},
"postprocess": update_status
},
"Maintenance Schedule Item": {
"doctype": "Maintenance Visit Purpose",
"field_map": {
"parent": "prevdoc_docname",
"parenttype": "prevdoc_doctype",
"sales_person": "service_person"
}
}
}, target_doc)
return doclist
|
indictranstech/focal-erpnext
|
support/doctype/maintenance_schedule/maintenance_schedule.py
|
Python
|
agpl-3.0
| 9,816
|
from util import foo
# <ref>
|
siosio/intellij-community
|
python/testData/resolve/multiFile/relativeAndSameDirectoryImports/plainDirectorySameDirectoryImportRegistryOff/plainDirectory/mod.py
|
Python
|
apache-2.0
| 44
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import models # noqa
from . import report # noqa
|
ddico/odoo
|
addons/sale_margin/__init__.py
|
Python
|
agpl-3.0
| 166
|
# Copyright (C) 2013-2014 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from perftest import perftest
class Disassemble(perftest.TestCaseWithBasicMeasurements):
def __init__(self):
super (Disassemble, self).__init__ ("disassemble")
def warm_up(self):
do_test_command = "disassemble ada_evaluate_subexp"
gdb.execute (do_test_command, False, True)
def _do_test(self, c):
for func in ["evaluate_subexp_standard", "handle_inferior_event", "c_parse_internal"]:
do_test_command = "disassemble %s" % func
for _ in range(c+1):
gdb.execute (do_test_command, False, True)
def execute_test(self):
for i in range(3):
# Flush code cache.
gdb.execute("set code-cache off");
gdb.execute("set code-cache on");
self.measure.measure(lambda: self._do_test(i), i)
|
zxombie/aarch64-freebsd-binutils
|
gdb/testsuite/gdb.perf/disassemble.py
|
Python
|
gpl-2.0
| 1,514
|
import os
import django
# It should be possible to just set DJANGO_SETTINGS_MODULE in setup.cfg
# or pytest.ini, but it doesn't work because pytest tries to do some
# magic by detecting a manage.py (which we don't have for our test suite).
# So we need to go the manual route here.
def pytest_configure():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tests.settings')
django.setup()
|
itbabu/django-oscar
|
tests/conftest.py
|
Python
|
bsd-3-clause
| 397
|
# The MIT License
#
# Copyright 2014, 2015 Piotr Dabkowski
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so, subject
# to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
# LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE
""" This module allows you to translate and execute Javascript in pure python.
Basically its implementation of ECMAScript 5.1 in pure python.
Use eval_js method to execute javascript code and get resulting python object (builtin if possible).
EXAMPLE:
>>> import js2py
>>> add = js2py.eval_js('function add(a, b) {return a + b}')
>>> add(1, 2) + 3
6
>>> add('1', 2, 3)
u'12'
>>> add.constructor
function Function() { [python code] }
Or use EvalJs to execute many javascript code fragments under same context - you would be able to get any
variable from the context!
>>> js = js2py.EvalJs()
>>> js.execute('var a = 10; function f(x) {return x*x};')
>>> js.f(9)
81
>>> js.a
10
Also you can use its console method to play with interactive javascript console.
Use parse_js to parse (syntax tree is just like in esprima.js) and translate_js to trasnlate JavaScript.
Finally, you can use pyimport statement from inside JS code to import and use python libraries.
>>> js2py.eval_js('pyimport urllib; urllib.urlopen("https://www.google.com")')
NOTE: This module is still not fully finished:
Date and JSON builtin objects are not implemented
Array prototype is not fully finished (will be soon)
Other than that everything should work fine.
"""
__author__ = 'Piotr Dabkowski'
__all__ = ['EvalJs', 'translate_js', 'import_js', 'eval_js', 'parse_js', 'translate_file', 'run_file']
from .evaljs import *
from .translators import parse as parse_js
|
repotvsupertuga/tvsupertuga.repository
|
script.module.universalscrapers/lib/universalscrapers/modules/js2py/__init__.py
|
Python
|
gpl-2.0
| 2,684
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from jinja2.runtime import Context
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleUndefinedVariable
from ansible.module_utils.six import string_types
from ansible.template import Templar, AnsibleContext, AnsibleEnvironment
from ansible.utils.unsafe_proxy import AnsibleUnsafe, wrap_var
from units.mock.loader import DictDataLoader
class BaseTemplar(object):
def setUp(self):
self.test_vars = dict(
foo="bar",
bam="{{foo}}",
num=1,
var_true=True,
var_false=False,
var_dict=dict(a="b"),
bad_dict="{a='b'",
var_list=[1],
recursive="{{recursive}}",
some_var="blip",
some_static_var="static_blip",
some_keyword="{{ foo }}",
some_unsafe_var=wrap_var("unsafe_blip"),
some_static_unsafe_var=wrap_var("static_unsafe_blip"),
some_unsafe_keyword=wrap_var("{{ foo }}"),
)
self.fake_loader = DictDataLoader({
"/path/to/my_file.txt": "foo\n",
})
self.templar = Templar(loader=self.fake_loader, variables=self.test_vars)
def is_unsafe(self, obj):
if obj is None:
return False
if hasattr(obj, '__UNSAFE__'):
return True
if isinstance(obj, AnsibleUnsafe):
return True
if isinstance(obj, dict):
for key in obj.keys():
if self.is_unsafe(key) or self.is_unsafe(obj[key]):
return True
if isinstance(obj, list):
for item in obj:
if self.is_unsafe(item):
return True
if isinstance(obj, string_types) and hasattr(obj, '__UNSAFE__'):
return True
return False
# class used for testing arbitrary objects passed to template
class SomeClass(object):
foo = 'bar'
def __init__(self):
self.blip = 'blip'
class SomeUnsafeClass(AnsibleUnsafe):
def __init__(self):
super(SomeUnsafeClass, self).__init__()
self.blip = 'unsafe blip'
class TestTemplarTemplate(BaseTemplar, unittest.TestCase):
def test_lookup_jinja_dict_key_in_static_vars(self):
res = self.templar.template("{'some_static_var': '{{ some_var }}'}",
static_vars=['some_static_var'])
# self.assertEqual(res['{{ a_keyword }}'], "blip")
print(res)
def test_templatable(self):
res = self.templar.templatable('foo')
self.assertTrue(res)
def test_templatable_none(self):
res = self.templar.templatable(None)
self.assertTrue(res)
@patch('ansible.template.Templar.template', side_effect=AnsibleError)
def test_templatable_exception(self, mock_template):
res = self.templar.templatable('foo')
self.assertFalse(res)
def test_template_convert_bare_string(self):
# Note: no bare_deprecated=False so we hit the deprecation path
res = self.templar.template('foo', convert_bare=True)
self.assertEqual(res, 'bar')
def test_template_convert_bare_nested(self):
res = self.templar.template('bam', convert_bare=True, bare_deprecated=False)
self.assertEqual(res, 'bar')
def test_template_convert_bare_unsafe(self):
res = self.templar.template('some_unsafe_var', convert_bare=True, bare_deprecated=False)
self.assertEqual(res, 'unsafe_blip')
# self.assertIsInstance(res, AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_template_convert_bare_filter(self):
res = self.templar.template('bam|capitalize', convert_bare=True, bare_deprecated=False)
self.assertEqual(res, 'Bar')
def test_template_convert_bare_filter_unsafe(self):
res = self.templar.template('some_unsafe_var|capitalize', convert_bare=True, bare_deprecated=False)
self.assertEqual(res, 'Unsafe_blip')
# self.assertIsInstance(res, AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_template_convert_data(self):
res = self.templar.template('{{foo}}', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, 'bar')
@patch('ansible.template.safe_eval', side_effect=AnsibleError)
def test_template_convert_data_template_in_data(self, mock_safe_eval):
res = self.templar.template('{{bam}}', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, 'bar')
def test_template_convert_data_bare(self):
res = self.templar.template('bam', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, 'bam')
def test_template_convert_data_to_json(self):
res = self.templar.template('{{bam|to_json}}', convert_data=True)
self.assertTrue(res)
self.assertEqual(res, '"bar"')
def test_template_convert_data_convert_bare_data_bare(self):
res = self.templar.template('bam', convert_data=True, convert_bare=True)
self.assertTrue(res)
self.assertEqual(res, 'bar')
def test_template_unsafe_non_string(self):
unsafe_obj = AnsibleUnsafe()
res = self.templar.template(unsafe_obj)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_template_unsafe_non_string_subclass(self):
unsafe_obj = SomeUnsafeClass()
res = self.templar.template(unsafe_obj)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
# TODO: not sure what template is supposed to do it, but it currently throws attributeError
@patch('ansible.template.Templar._clean_data')
def test_template_unsafe_non_string_clean_data_exception(self, mock_clean_data):
msg = 'Error raised from _clean_data by test_template_unsafe_non_string_clean_data_exception'
mock_clean_data.side_effect = AnsibleError(msg)
unsafe_obj = AnsibleUnsafe()
res = self.templar.template(unsafe_obj)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
# TODO: not sure what template is supposed to do it, but it currently throws attributeError
@patch('ansible.template.Templar._clean_data', side_effect=AnsibleError)
def test_template_unsafe_non_string_subclass_clean_data_exception(self, mock_clean_data):
unsafe_obj = SomeUnsafeClass()
self.assertTrue(self.is_unsafe(unsafe_obj))
res = self.templar.template(unsafe_obj)
self.assertTrue(self.is_unsafe(res), 'returned value from template.template (%s) is not marked unsafe' % res)
def test_weird(self):
data = u'''1 2 #}huh{# %}ddfg{% }}dfdfg{{ {%what%} {{#foo#}} {%{bar}%} {#%blip%#} {{asdfsd%} 3 4 {{foo}} 5 6 7'''
self.assertRaisesRegexp(AnsibleError,
'template error while templating string',
self.templar.template,
data)
class TestTemplarCleanData(BaseTemplar, unittest.TestCase):
def test_clean_data(self):
res = self.templar._clean_data(u'some string')
self.assertEqual(res, u'some string')
def test_clean_data_not_stringtype(self):
res = self.templar._clean_data(None)
# None vs NoneType
self.assertEqual(res, None)
def test_clean_data_jinja(self):
res = self.templar._clean_data(u'1 2 {what} 3 4 {{foo}} 5 6 7')
self.assertEqual(res, u'1 2 {what} 3 4 {#foo#} 5 6 7')
def test_clean_data_block(self):
res = self.templar._clean_data(u'1 2 {%what%} 3 4 {{foo}} 5 6 7')
self.assertEqual(res, u'1 2 {#what#} 3 4 {#foo#} 5 6 7')
# def test_clean_data_weird(self):
# res = self.templar._clean_data(u'1 2 #}huh{# %}ddfg{% }}dfdfg{{ {%what%} {{#foo#}} {%{bar}%} {#%blip%#} {{asdfsd%} 3 4 {{foo}} 5 6 7')
# print(res)
self.assertEqual(res, u'1 2 {#what#} 3 4 {#foo#} 5 6 7')
def test_clean_data_object(self):
obj = {u'foo': [1, 2, 3, u'bdasdf', u'{what}', u'{{foo}}', 5]}
clean_obj = {u'foo': [1, 2, 3, u'bdasdf', u'{what}', u'{#foo#}', 5]}
res = self.templar._clean_data(obj)
self.assertNotEqual(res, obj)
self.assertEqual(res, clean_obj)
def test_clean_data_bad_dict(self):
res = self.templar._clean_data(u'{{bad_dict}}')
self.assertEqual(res, u'{#bad_dict#}')
def test_clean_data_unsafe_obj(self):
some_obj = SomeClass()
unsafe_obj = wrap_var(some_obj)
res = self.templar._clean_data(unsafe_obj)
self.assertIsInstance(res, SomeClass)
class TestTemplarMisc(BaseTemplar, unittest.TestCase):
def test_templar_simple(self):
templar = self.templar
# test some basic templating
self.assertEqual(templar.template("{{foo}}"), "bar")
self.assertEqual(templar.template("{{foo}}\n"), "bar\n")
self.assertEqual(templar.template("{{foo}}\n", preserve_trailing_newlines=True), "bar\n")
self.assertEqual(templar.template("{{foo}}\n", preserve_trailing_newlines=False), "bar")
self.assertEqual(templar.template("{{bam}}"), "bar")
self.assertEqual(templar.template("{{num}}"), 1)
self.assertEqual(templar.template("{{var_true}}"), True)
self.assertEqual(templar.template("{{var_false}}"), False)
self.assertEqual(templar.template("{{var_dict}}"), dict(a="b"))
self.assertEqual(templar.template("{{bad_dict}}"), "{a='b'")
self.assertEqual(templar.template("{{var_list}}"), [1])
self.assertEqual(templar.template(1, convert_bare=True), 1)
# force errors
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{bad_var}}")
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{lookup('file', bad_var)}}")
self.assertRaises(AnsibleError, templar.template, "{{lookup('bad_lookup')}}")
self.assertRaises(AnsibleError, templar.template, "{{recursive}}")
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{foo-bar}}")
# test with fail_on_undefined=False
self.assertEqual(templar.template("{{bad_var}}", fail_on_undefined=False), "{{bad_var}}")
# test set_available_variables()
templar.set_available_variables(variables=dict(foo="bam"))
self.assertEqual(templar.template("{{foo}}"), "bam")
# variables must be a dict() for set_available_variables()
self.assertRaises(AssertionError, templar.set_available_variables, "foo=bam")
def test_templar_escape_backslashes(self):
# Rule of thumb: If escape backslashes is True you should end up with
# the same number of backslashes as when you started.
self.assertEqual(self.templar.template("\t{{foo}}", escape_backslashes=True), "\tbar")
self.assertEqual(self.templar.template("\t{{foo}}", escape_backslashes=False), "\tbar")
self.assertEqual(self.templar.template("\\{{foo}}", escape_backslashes=True), "\\bar")
self.assertEqual(self.templar.template("\\{{foo}}", escape_backslashes=False), "\\bar")
self.assertEqual(self.templar.template("\\{{foo + '\t' }}", escape_backslashes=True), "\\bar\t")
self.assertEqual(self.templar.template("\\{{foo + '\t' }}", escape_backslashes=False), "\\bar\t")
self.assertEqual(self.templar.template("\\{{foo + '\\t' }}", escape_backslashes=True), "\\bar\\t")
self.assertEqual(self.templar.template("\\{{foo + '\\t' }}", escape_backslashes=False), "\\bar\t")
self.assertEqual(self.templar.template("\\{{foo + '\\\\t' }}", escape_backslashes=True), "\\bar\\\\t")
self.assertEqual(self.templar.template("\\{{foo + '\\\\t' }}", escape_backslashes=False), "\\bar\\t")
def test_template_jinja2_extensions(self):
fake_loader = DictDataLoader({})
templar = Templar(loader=fake_loader)
old_exts = C.DEFAULT_JINJA2_EXTENSIONS
try:
C.DEFAULT_JINJA2_EXTENSIONS = "foo,bar"
self.assertEqual(templar._get_extensions(), ['foo', 'bar'])
finally:
C.DEFAULT_JINJA2_EXTENSIONS = old_exts
class TestTemplarLookup(BaseTemplar, unittest.TestCase):
def test_lookup_missing_plugin(self):
self.assertRaisesRegexp(AnsibleError,
r'lookup plugin \(not_a_real_lookup_plugin\) not found',
self.templar._lookup,
'not_a_real_lookup_plugin',
'an_arg', a_keyword_arg='a_keyword_arg_value')
def test_lookup_list(self):
res = self.templar._lookup('list', 'an_arg', 'another_arg')
self.assertEqual(res, 'an_arg,another_arg')
def test_lookup_jinja_undefined(self):
self.assertRaisesRegexp(AnsibleUndefinedVariable,
"'an_undefined_jinja_var' is undefined",
self.templar._lookup,
'list', '{{ an_undefined_jinja_var }}')
def test_lookup_jinja_defined(self):
res = self.templar._lookup('list', '{{ some_var }}')
self.assertTrue(self.is_unsafe(res))
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict_string_passed(self):
self.assertRaisesRegexp(AnsibleError,
"with_dict expects a dict",
self.templar._lookup,
'dict',
'{{ some_var }}')
def test_lookup_jinja_dict_list_passed(self):
self.assertRaisesRegexp(AnsibleError,
"with_dict expects a dict",
self.templar._lookup,
'dict',
['foo', 'bar'])
def test_lookup_jinja_kwargs(self):
res = self.templar._lookup('list', 'blip', random_keyword='12345')
self.assertTrue(self.is_unsafe(res))
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_list_wantlist(self):
res = self.templar._lookup('list', '{{ some_var }}', wantlist=True)
self.assertEqual(res, ["blip"])
def test_lookup_jinja_list_wantlist_undefined(self):
self.assertRaisesRegexp(AnsibleUndefinedVariable,
"'some_undefined_var' is undefined",
self.templar._lookup,
'list',
'{{ some_undefined_var }}',
wantlist=True)
def test_lookup_jinja_list_wantlist_unsafe(self):
res = self.templar._lookup('list', '{{ some_unsafe_var }}', wantlist=True)
for lookup_result in res:
self.assertTrue(self.is_unsafe(lookup_result))
# self.assertIsInstance(lookup_result, AnsibleUnsafe)
# Should this be an AnsibleUnsafe
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict(self):
res = self.templar._lookup('list', {'{{ a_keyword }}': '{{ some_var }}'})
self.assertEqual(res['{{ a_keyword }}'], "blip")
# TODO: Should this be an AnsibleUnsafe
# self.assertIsInstance(res['{{ a_keyword }}'], AnsibleUnsafe)
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict_unsafe(self):
res = self.templar._lookup('list', {'{{ some_unsafe_key }}': '{{ some_unsafe_var }}'})
self.assertTrue(self.is_unsafe(res['{{ some_unsafe_key }}']))
# self.assertIsInstance(res['{{ some_unsafe_key }}'], AnsibleUnsafe)
# TODO: Should this be an AnsibleUnsafe
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_dict_unsafe_value(self):
res = self.templar._lookup('list', {'{{ a_keyword }}': '{{ some_unsafe_var }}'})
self.assertTrue(self.is_unsafe(res['{{ a_keyword }}']))
# self.assertIsInstance(res['{{ a_keyword }}'], AnsibleUnsafe)
# TODO: Should this be an AnsibleUnsafe
# self.assertIsInstance(res, AnsibleUnsafe)
def test_lookup_jinja_none(self):
res = self.templar._lookup('list', None)
self.assertIsNone(res)
class TestAnsibleContext(BaseTemplar, unittest.TestCase):
def _context(self, variables=None):
variables = variables or {}
env = AnsibleEnvironment()
context = AnsibleContext(env, parent={}, name='some_context',
blocks={})
for key, value in variables.items():
context.vars[key] = value
return context
def test(self):
context = self._context()
self.assertIsInstance(context, AnsibleContext)
self.assertIsInstance(context, Context)
def test_resolve_unsafe(self):
context = self._context(variables={'some_unsafe_key': wrap_var('some_unsafe_string')})
res = context.resolve('some_unsafe_key')
# self.assertIsInstance(res, AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was expected to be marked unsafe but was not' % res)
def test_resolve_unsafe_list(self):
context = self._context(variables={'some_unsafe_key': [wrap_var('some unsafe string 1')]})
res = context.resolve('some_unsafe_key')
# self.assertIsInstance(res[0], AnsibleUnsafe)
self.assertTrue(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was expected to be marked unsafe but was not' % res)
def test_resolve_unsafe_dict(self):
context = self._context(variables={'some_unsafe_key':
{'an_unsafe_dict': wrap_var('some unsafe string 1')}
})
res = context.resolve('some_unsafe_key')
self.assertTrue(self.is_unsafe(res['an_unsafe_dict']),
'return of AnsibleContext.resolve (%s) was expected to be marked unsafe but was not' % res['an_unsafe_dict'])
def test_resolve(self):
context = self._context(variables={'some_key': 'some_string'})
res = context.resolve('some_key')
self.assertEqual(res, 'some_string')
# self.assertNotIsInstance(res, AnsibleUnsafe)
self.assertFalse(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was not expected to be marked unsafe but was' % res)
def test_resolve_none(self):
context = self._context(variables={'some_key': None})
res = context.resolve('some_key')
self.assertEqual(res, None)
# self.assertNotIsInstance(res, AnsibleUnsafe)
self.assertFalse(self.is_unsafe(res),
'return of AnsibleContext.resolve (%s) was not expected to be marked unsafe but was' % res)
|
tsdmgz/ansible
|
test/units/template/test_templar.py
|
Python
|
gpl-3.0
| 20,034
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import django.shortcuts
def dispatcher(request):
template = "horizon/jasmine/jasmine.html"
return django.shortcuts.render(request, template)
|
FNST-OpenStack/horizon
|
horizon/test/jasmine/jasmine.py
|
Python
|
apache-2.0
| 697
|
# cmdline: -O
# test optimisation output
print(__debug__)
assert 0
|
AriZuu/micropython
|
tests/cmdline/cmd_optimise.py
|
Python
|
mit
| 67
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Adam Števko <adam.stevko@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: beadm
short_description: Manage ZFS boot environments on FreeBSD/Solaris/illumos systems.
description:
- Create, delete or activate ZFS boot environments.
- Mount and unmount ZFS boot environments.
version_added: "2.3"
author: Adam Števko (@xen0l)
options:
name:
description:
- ZFS boot environment name.
aliases: [ "be" ]
required: True
snapshot:
description:
- If specified, the new boot environment will be cloned from the given
snapshot or inactive boot environment.
required: false
default: false
description:
description:
- Associate a description with a new boot environment. This option is
available only on Solarish platforms.
required: false
default: false
options:
description:
- Create the datasets for new BE with specific ZFS properties. Multiple
options can be specified. This option is available only on
Solarish platforms.
required: false
default: false
mountpoint:
description:
- Path where to mount the ZFS boot environment
required: false
default: false
state:
description:
- Create or delete ZFS boot environment.
required: false
default: "present"
choices: [ "present", "absent", "activated", "mounted", "unmounted" ]
force:
description:
- Specifies if the unmount should be forced.
required: false
default: false
choices: [ "true", "false" ]
'''
EXAMPLES = '''
- name: Create ZFS boot environment
beadm:
name: upgrade-be
state: present
- name: Create ZFS boot environment from existing inactive boot environment
beadm:
name: upgrade-be
snapshot: be@old
state: present
- name: Create ZFS boot environment with compression enabled and description "upgrade"
beadm:
name: upgrade-be
options: "compression=on"
description: upgrade
state: present
- name: Delete ZFS boot environment
beadm:
name: old-be
state: absent
- name: Mount ZFS boot environment on /tmp/be
beadm:
name: BE
mountpoint: /tmp/be
state: mounted
- name: Unmount ZFS boot environment
beadm:
name: BE
state: unmounted
- name: Activate ZFS boot environment
beadm:
name: upgrade-be
state: activated
'''
RETURN = '''
name:
description: BE name
returned: always
type: string
sample: pre-upgrade
snapshot:
description: ZFS snapshot to create BE from
returned: always
type: string
sample: rpool/ROOT/oi-hipster@fresh
description:
description: BE description
returned: always
type: string
sample: Upgrade from 9.0 to 10.0
options:
description: BE additional options
returned: always
type: string
sample: compression=on
mountpoint:
description: BE mountpoint
returned: always
type: string
sample: /mnt/be
state:
description: state of the target
returned: always
type: string
sample: present
force:
description: if forced action is wanted
returned: always
type: boolean
sample: False
'''
import os
from ansible.module_utils.basic import AnsibleModule
class BE(object):
def __init__(self, module):
self.module = module
self.name = module.params['name']
self.snapshot = module.params['snapshot']
self.description = module.params['description']
self.options = module.params['options']
self.mountpoint = module.params['mountpoint']
self.state = module.params['state']
self.force = module.params['force']
self.is_freebsd = os.uname()[0] == 'FreeBSD'
def _beadm_list(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('list')
cmd.append('-H')
if not self.is_freebsd:
cmd.append(self.name)
return self.module.run_command(cmd)
def _find_be_by_name(self, out):
for line in out.splitlines():
if line.split('\t')[0] == self.name:
return line
return None
def exists(self):
(rc, out, _) = self._beadm_list()
if rc == 0:
if self.is_freebsd:
if self._find_be_by_name(out):
return True
else:
return True
else:
return False
def is_activated(self):
(rc, out, _) = self._beadm_list()
if rc == 0:
if self.is_freebsd:
line = self._find_be_by_name(out)
if line is not None and 'R' in line.split('\t')[1]:
return True
else:
if 'R' in out.split(';')[2]:
return True
return False
def activate_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('activate')
cmd.append(self.name)
return self.module.run_command(cmd)
def create_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('create')
if self.snapshot:
cmd.append('-e')
cmd.append(self.snapshot)
if not self.is_freebsd:
if self.description:
cmd.append('-d')
cmd.append(self.description)
if self.options:
cmd.append('-o')
cmd.append(self.options)
cmd.append(self.name)
return self.module.run_command(cmd)
def destroy_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('destroy')
cmd.append('-F')
cmd.append(self.name)
return self.module.run_command(cmd)
def is_mounted(self):
(rc, out, _) = self._beadm_list()
if rc == 0:
if self.is_freebsd:
line = self._find_be_by_name(out)
# On FreeBSD, we exclude currently mounted BE on /, as it is
# special and can be activated even if it is mounted. That is not
# possible with non-root BEs.
if line.split('\t')[2] is not '-' and \
line.split('\t')[2] is not '/':
return True
else:
if out.split(';')[3]:
return True
return False
def mount_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('mount')
cmd.append(self.name)
if self.mountpoint:
cmd.append(self.mountpoint)
return self.module.run_command(cmd)
def unmount_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('unmount')
if self.force:
cmd.append('-f')
cmd.append(self.name)
return self.module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, aliases=['be'], type='str'),
snapshot=dict(type='str'),
description=dict(type='str'),
options=dict(type='str'),
mountpoint=dict(default=False, type='path'),
state=dict(
default='present',
choices=['present', 'absent', 'activated',
'mounted', 'unmounted']),
force=dict(default=False, type='bool'),
),
supports_check_mode=True
)
be = BE(module)
rc = None
out = ''
err = ''
result = {}
result['name'] = be.name
result['state'] = be.state
if be.snapshot:
result['snapshot'] = be.snapshot
if be.description:
result['description'] = be.description
if be.options:
result['options'] = be.options
if be.mountpoint:
result['mountpoint'] = be.mountpoint
if be.state == 'absent':
# beadm on FreeBSD and Solarish systems differs in delete behaviour in
# that we are not allowed to delete activated BE on FreeBSD while on
# Solarish systems we cannot delete BE if it is mounted. We add mount
# check for both platforms as BE should be explicitly unmounted before
# being deleted. On FreeBSD, we also check if the BE is activated.
if be.exists():
if not be.is_mounted():
if module.check_mode:
module.exit_json(changed=True)
if be.is_freebsd:
if be.is_activated():
module.fail_json(msg='Unable to remove active BE!')
(rc, out, err) = be.destroy_be()
if rc != 0:
module.fail_json(msg='Error while destroying BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
else:
module.fail_json(msg='Unable to remove BE as it is mounted!')
elif be.state == 'present':
if not be.exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = be.create_be()
if rc != 0:
module.fail_json(msg='Error while creating BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
elif be.state == 'activated':
if not be.is_activated():
if module.check_mode:
module.exit_json(changed=True)
# On FreeBSD, beadm is unable to activate mounted BEs, so we add
# an explicit check for that case.
if be.is_freebsd:
if be.is_mounted():
module.fail_json(msg='Unable to activate mounted BE!')
(rc, out, err) = be.activate_be()
if rc != 0:
module.fail_json(msg='Error while activating BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
elif be.state == 'mounted':
if not be.is_mounted():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = be.mount_be()
if rc != 0:
module.fail_json(msg='Error while mounting BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
elif be.state == 'unmounted':
if be.is_mounted():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = be.unmount_be()
if rc != 0:
module.fail_json(msg='Error while unmounting BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
if __name__ == '__main__':
main()
|
e-gob/plataforma-kioscos-autoatencion
|
scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/modules/system/beadm.py
|
Python
|
bsd-3-clause
| 11,657
|
"""
Wheel command-line utility.
"""
import argparse
import hashlib
import json
import os
import sys
from glob import iglob
from .. import signatures
from ..install import WheelFile, VerifyingZipFile
from ..paths import get_install_command
from ..util import urlsafe_b64decode, urlsafe_b64encode, native, binary, matches_requirement
def require_pkgresources(name):
try:
import pkg_resources # noqa: F401
except ImportError:
raise RuntimeError("'{0}' needs pkg_resources (part of setuptools).".format(name))
class WheelError(Exception):
pass
# For testability
def get_keyring():
try:
from ..signatures import keys
import keyring
assert keyring.get_keyring().priority
except (ImportError, AssertionError):
raise WheelError(
"Install wheel[signatures] (requires keyring, keyrings.alt, pyxdg) for signatures.")
return keys.WheelKeys, keyring
def keygen(get_keyring=get_keyring):
"""Generate a public/private key pair."""
WheelKeys, keyring = get_keyring()
ed25519ll = signatures.get_ed25519ll()
wk = WheelKeys().load()
keypair = ed25519ll.crypto_sign_keypair()
vk = native(urlsafe_b64encode(keypair.vk))
sk = native(urlsafe_b64encode(keypair.sk))
kr = keyring.get_keyring()
kr.set_password("wheel", vk, sk)
sys.stdout.write("Created Ed25519 keypair with vk={0}\n".format(vk))
sys.stdout.write("in {0!r}\n".format(kr))
sk2 = kr.get_password('wheel', vk)
if sk2 != sk:
raise WheelError("Keyring is broken. Could not retrieve secret key.")
sys.stdout.write("Trusting {0} to sign and verify all packages.\n".format(vk))
wk.add_signer('+', vk)
wk.trust('+', vk)
wk.save()
def sign(wheelfile, replace=False, get_keyring=get_keyring):
"""Sign a wheel"""
WheelKeys, keyring = get_keyring()
ed25519ll = signatures.get_ed25519ll()
wf = WheelFile(wheelfile, append=True)
wk = WheelKeys().load()
name = wf.parsed_filename.group('name')
sign_with = wk.signers(name)[0]
sys.stdout.write("Signing {0} with {1}\n".format(name, sign_with[1]))
vk = sign_with[1]
kr = keyring.get_keyring()
sk = kr.get_password('wheel', vk)
keypair = ed25519ll.Keypair(urlsafe_b64decode(binary(vk)),
urlsafe_b64decode(binary(sk)))
record_name = wf.distinfo_name + '/RECORD'
sig_name = wf.distinfo_name + '/RECORD.jws'
if sig_name in wf.zipfile.namelist():
raise WheelError("Wheel is already signed.")
record_data = wf.zipfile.read(record_name)
payload = {"hash": "sha256=" + native(urlsafe_b64encode(hashlib.sha256(record_data).digest()))}
sig = signatures.sign(payload, keypair)
wf.zipfile.writestr(sig_name, json.dumps(sig, sort_keys=True))
wf.zipfile.close()
def unsign(wheelfile):
"""
Remove RECORD.jws from a wheel by truncating the zip file.
RECORD.jws must be at the end of the archive. The zip file must be an
ordinary archive, with the compressed files and the directory in the same
order, and without any non-zip content after the truncation point.
"""
vzf = VerifyingZipFile(wheelfile, "a")
info = vzf.infolist()
if not (len(info) and info[-1].filename.endswith('/RECORD.jws')):
raise WheelError('The wheel is not signed (RECORD.jws not found at end of the archive).')
vzf.pop()
vzf.close()
def verify(wheelfile):
"""Verify a wheel.
The signature will be verified for internal consistency ONLY and printed.
Wheel's own unpack/install commands verify the manifest against the
signature and file contents.
"""
wf = WheelFile(wheelfile)
sig_name = wf.distinfo_name + '/RECORD.jws'
try:
sig = json.loads(native(wf.zipfile.open(sig_name).read()))
except KeyError:
raise WheelError('The wheel is not signed (RECORD.jws not found at end of the archive).')
verified = signatures.verify(sig)
sys.stderr.write("Signatures are internally consistent.\n")
sys.stdout.write(json.dumps(verified, indent=2))
sys.stdout.write('\n')
def unpack(wheelfile, dest='.'):
"""Unpack a wheel.
Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
is the package name and {ver} its version.
:param wheelfile: The path to the wheel.
:param dest: Destination directory (default to current directory).
"""
wf = WheelFile(wheelfile)
namever = wf.parsed_filename.group('namever')
destination = os.path.join(dest, namever)
sys.stderr.write("Unpacking to: %s\n" % (destination))
wf.zipfile.extractall(destination)
wf.zipfile.close()
def install(requirements, requirements_file=None,
wheel_dirs=None, force=False, list_files=False,
dry_run=False):
"""Install wheels.
:param requirements: A list of requirements or wheel files to install.
:param requirements_file: A file containing requirements to install.
:param wheel_dirs: A list of directories to search for wheels.
:param force: Install a wheel file even if it is not compatible.
:param list_files: Only list the files to install, don't install them.
:param dry_run: Do everything but the actual install.
"""
# If no wheel directories specified, use the WHEELPATH environment
# variable, or the current directory if that is not set.
if not wheel_dirs:
wheelpath = os.getenv("WHEELPATH")
if wheelpath:
wheel_dirs = wheelpath.split(os.pathsep)
else:
wheel_dirs = [os.path.curdir]
# Get a list of all valid wheels in wheel_dirs
all_wheels = []
for d in wheel_dirs:
for w in os.listdir(d):
if w.endswith('.whl'):
wf = WheelFile(os.path.join(d, w))
if wf.compatible:
all_wheels.append(wf)
# If there is a requirements file, add it to the list of requirements
if requirements_file:
# If the file doesn't exist, search for it in wheel_dirs
# This allows standard requirements files to be stored with the
# wheels.
if not os.path.exists(requirements_file):
for d in wheel_dirs:
name = os.path.join(d, requirements_file)
if os.path.exists(name):
requirements_file = name
break
with open(requirements_file) as fd:
requirements.extend(fd)
to_install = []
for req in requirements:
if req.endswith('.whl'):
# Explicitly specified wheel filename
if os.path.exists(req):
wf = WheelFile(req)
if wf.compatible or force:
to_install.append(wf)
else:
msg = ("{0} is not compatible with this Python. "
"--force to install anyway.".format(req))
raise WheelError(msg)
else:
# We could search on wheel_dirs, but it's probably OK to
# assume the user has made an error.
raise WheelError("No such wheel file: {}".format(req))
continue
# We have a requirement spec
# If we don't have pkg_resources, this will raise an exception
matches = matches_requirement(req, all_wheels)
if not matches:
raise WheelError("No match for requirement {}".format(req))
to_install.append(max(matches))
# We now have a list of wheels to install
if list_files:
sys.stdout.write("Installing:\n")
if dry_run:
return
for wf in to_install:
if list_files:
sys.stdout.write(" {0}\n".format(wf.filename))
continue
wf.install(force=force)
wf.zipfile.close()
def install_scripts(distributions):
"""
Regenerate the entry_points console_scripts for the named distribution.
"""
try:
from setuptools.command import easy_install
import pkg_resources
except ImportError:
raise RuntimeError("'wheel install_scripts' needs setuptools.")
for dist in distributions:
pkg_resources_dist = pkg_resources.get_distribution(dist)
install = get_install_command(dist)
command = easy_install.easy_install(install.distribution)
command.args = ['wheel'] # dummy argument
command.finalize_options()
command.install_egg_scripts(pkg_resources_dist)
def convert(installers, dest_dir, verbose):
require_pkgresources('wheel convert')
# Only support wheel convert if pkg_resources is present
from ..wininst2wheel import bdist_wininst2wheel
from ..egg2wheel import egg2wheel
for pat in installers:
for installer in iglob(pat):
if os.path.splitext(installer)[1] == '.egg':
conv = egg2wheel
else:
conv = bdist_wininst2wheel
if verbose:
sys.stdout.write("{0}... ".format(installer))
sys.stdout.flush()
conv(installer, dest_dir)
if verbose:
sys.stdout.write("OK\n")
def parser():
p = argparse.ArgumentParser()
s = p.add_subparsers(help="commands")
def keygen_f(args):
keygen()
keygen_parser = s.add_parser('keygen', help='Generate signing key')
keygen_parser.set_defaults(func=keygen_f)
def sign_f(args):
sign(args.wheelfile)
sign_parser = s.add_parser('sign', help='Sign wheel')
sign_parser.add_argument('wheelfile', help='Wheel file')
sign_parser.set_defaults(func=sign_f)
def unsign_f(args):
unsign(args.wheelfile)
unsign_parser = s.add_parser('unsign', help=unsign.__doc__)
unsign_parser.add_argument('wheelfile', help='Wheel file')
unsign_parser.set_defaults(func=unsign_f)
def verify_f(args):
verify(args.wheelfile)
verify_parser = s.add_parser('verify', help=verify.__doc__)
verify_parser.add_argument('wheelfile', help='Wheel file')
verify_parser.set_defaults(func=verify_f)
def unpack_f(args):
unpack(args.wheelfile, args.dest)
unpack_parser = s.add_parser('unpack', help='Unpack wheel')
unpack_parser.add_argument('--dest', '-d', help='Destination directory',
default='.')
unpack_parser.add_argument('wheelfile', help='Wheel file')
unpack_parser.set_defaults(func=unpack_f)
def install_f(args):
install(args.requirements, args.requirements_file,
args.wheel_dirs, args.force, args.list_files)
install_parser = s.add_parser('install', help='Install wheels')
install_parser.add_argument('requirements', nargs='*',
help='Requirements to install.')
install_parser.add_argument('--force', default=False,
action='store_true',
help='Install incompatible wheel files.')
install_parser.add_argument('--wheel-dir', '-d', action='append',
dest='wheel_dirs',
help='Directories containing wheels.')
install_parser.add_argument('--requirements-file', '-r',
help="A file containing requirements to "
"install.")
install_parser.add_argument('--list', '-l', default=False,
dest='list_files',
action='store_true',
help="List wheels which would be installed, "
"but don't actually install anything.")
install_parser.set_defaults(func=install_f)
def install_scripts_f(args):
install_scripts(args.distributions)
install_scripts_parser = s.add_parser('install-scripts', help='Install console_scripts')
install_scripts_parser.add_argument('distributions', nargs='*',
help='Regenerate console_scripts for these distributions')
install_scripts_parser.set_defaults(func=install_scripts_f)
def convert_f(args):
convert(args.installers, args.dest_dir, args.verbose)
convert_parser = s.add_parser('convert', help='Convert egg or wininst to wheel')
convert_parser.add_argument('installers', nargs='*', help='Installers to convert')
convert_parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
help="Directory to store wheels (default %(default)s)")
convert_parser.add_argument('--verbose', '-v', action='store_true')
convert_parser.set_defaults(func=convert_f)
def version_f(args):
from .. import __version__
sys.stdout.write("wheel %s\n" % __version__)
version_parser = s.add_parser('version', help='Print version and exit')
version_parser.set_defaults(func=version_f)
def help_f(args):
p.print_help()
help_parser = s.add_parser('help', help='Show this help')
help_parser.set_defaults(func=help_f)
return p
def main():
p = parser()
args = p.parse_args()
if not hasattr(args, 'func'):
p.print_help()
else:
# XXX on Python 3.3 we get 'args has no func' rather than short help.
try:
args.func(args)
return 0
except WheelError as e:
sys.stderr.write(e.message + "\n")
return 1
|
pcu4dros/pandora-core
|
workspace/lib/python3.5/site-packages/wheel/tool/__init__.py
|
Python
|
mit
| 13,421
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A time series library in TensorFlow (TFTS)."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.timeseries.python.timeseries import saved_model_utils
# pylint: disable=wildcard-import
from tensorflow.contrib.timeseries.python.timeseries.ar_model import *
from tensorflow.contrib.timeseries.python.timeseries.estimators import *
from tensorflow.contrib.timeseries.python.timeseries.feature_keys import *
from tensorflow.contrib.timeseries.python.timeseries.input_pipeline import *
# pylint: enable=wildcard-import
|
npuichigo/ttsflow
|
third_party/tensorflow/tensorflow/contrib/timeseries/python/timeseries/__init__.py
|
Python
|
apache-2.0
| 1,292
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies file copies with --generator-output using an explicit build
target of 'all'.
"""
import TestGyp
# Android doesn't support --generator-output.
test = TestGyp.TestGyp(formats=['!android'])
test.writable(test.workpath('copies'), False)
test.run_gyp('copies.gyp',
'--generator-output=' + test.workpath('gypfiles'),
'-G', 'xcode_ninja_target_pattern=^(?!copies_null)',
chdir='copies')
test.writable(test.workpath('copies'), True)
test.relocate('copies', 'relocate/copies')
test.relocate('gypfiles', 'relocate/gypfiles')
test.writable(test.workpath('relocate/copies'), False)
test.writable(test.workpath('relocate/copies/build'), True)
test.writable(test.workpath('relocate/copies/copies-out'), True)
test.writable(test.workpath('relocate/copies/subdir/build'), True)
test.writable(test.workpath('relocate/copies/subdir/copies-out'), True)
test.build('copies.gyp', test.ALL, chdir='relocate/gypfiles')
test.must_match(['relocate', 'copies', 'copies-out', 'file1'],
"file1 contents\n")
if test.format == 'xcode':
chdir = 'relocate/copies/build'
elif test.format in ['make', 'ninja', 'xcode-ninja', 'cmake']:
chdir = 'relocate/gypfiles/out'
else:
chdir = 'relocate/gypfiles'
test.must_match([chdir, 'Default', 'copies-out', 'file2'], "file2 contents\n")
test.must_match(['relocate', 'copies', 'subdir', 'copies-out', 'file3'],
"file3 contents\n")
if test.format == 'xcode':
chdir = 'relocate/copies/subdir/build'
elif test.format in ['make', 'ninja', 'xcode-ninja', 'cmake']:
chdir = 'relocate/gypfiles/out'
else:
chdir = 'relocate/gypfiles'
test.must_match([chdir, 'Default', 'copies-out', 'file4'], "file4 contents\n")
test.pass_test()
|
guorendong/iridium-browser-ubuntu
|
tools/gyp/test/generator-output/gyptest-copies.py
|
Python
|
bsd-3-clause
| 1,920
|
# -*- coding: utf-8 -*-
"""Tests for Beautiful Soup's tree traversal methods.
The tree traversal methods are the main advantage of using Beautiful
Soup over just using a parser.
Different parsers will build different Beautiful Soup trees given the
same markup, but all Beautiful Soup trees can be traversed with the
methods tested here.
"""
import copy
import pickle
import re
import warnings
from bs4 import BeautifulSoup
from bs4.builder import (
builder_registry,
HTMLParserTreeBuilder,
)
from bs4.element import (
CData,
Doctype,
NavigableString,
SoupStrainer,
Tag,
)
from bs4.testing import (
SoupTest,
skipIf,
)
XML_BUILDER_PRESENT = (builder_registry.lookup("xml") is not None)
LXML_PRESENT = (builder_registry.lookup("lxml") is not None)
class TreeTest(SoupTest):
def assertSelects(self, tags, should_match):
"""Make sure that the given tags have the correct text.
This is used in tests that define a bunch of tags, each
containing a single string, and then select certain strings by
some mechanism.
"""
self.assertEqual([tag.string for tag in tags], should_match)
def assertSelectsIDs(self, tags, should_match):
"""Make sure that the given tags have the correct IDs.
This is used in tests that define a bunch of tags, each
containing a single string, and then select certain strings by
some mechanism.
"""
self.assertEqual([tag['id'] for tag in tags], should_match)
class TestFind(TreeTest):
"""Basic tests of the find() method.
find() just calls find_all() with limit=1, so it's not tested all
that thouroughly here.
"""
def test_find_tag(self):
soup = self.soup("<a>1</a><b>2</b><a>3</a><b>4</b>")
self.assertEqual(soup.find("b").string, "2")
def test_unicode_text_find(self):
soup = self.soup(u'<h1>Räksmörgås</h1>')
self.assertEqual(soup.find(text=u'Räksmörgås'), u'Räksmörgås')
class TestFindAll(TreeTest):
"""Basic tests of the find_all() method."""
def test_find_all_text_nodes(self):
"""You can search the tree for text nodes."""
soup = self.soup("<html>Foo<b>bar</b>\xbb</html>")
# Exact match.
self.assertEqual(soup.find_all(text="bar"), [u"bar"])
# Match any of a number of strings.
self.assertEqual(
soup.find_all(text=["Foo", "bar"]), [u"Foo", u"bar"])
# Match a regular expression.
self.assertEqual(soup.find_all(text=re.compile('.*')),
[u"Foo", u"bar", u'\xbb'])
# Match anything.
self.assertEqual(soup.find_all(text=True),
[u"Foo", u"bar", u'\xbb'])
def test_find_all_limit(self):
"""You can limit the number of items returned by find_all."""
soup = self.soup("<a>1</a><a>2</a><a>3</a><a>4</a><a>5</a>")
self.assertSelects(soup.find_all('a', limit=3), ["1", "2", "3"])
self.assertSelects(soup.find_all('a', limit=1), ["1"])
self.assertSelects(
soup.find_all('a', limit=10), ["1", "2", "3", "4", "5"])
# A limit of 0 means no limit.
self.assertSelects(
soup.find_all('a', limit=0), ["1", "2", "3", "4", "5"])
def test_calling_a_tag_is_calling_findall(self):
soup = self.soup("<a>1</a><b>2<a id='foo'>3</a></b>")
self.assertSelects(soup('a', limit=1), ["1"])
self.assertSelects(soup.b(id="foo"), ["3"])
def test_find_all_with_self_referential_data_structure_does_not_cause_infinite_recursion(self):
soup = self.soup("<a></a>")
# Create a self-referential list.
l = []
l.append(l)
# Without special code in _normalize_search_value, this would cause infinite
# recursion.
self.assertEqual([], soup.find_all(l))
class TestFindAllBasicNamespaces(TreeTest):
def test_find_by_namespaced_name(self):
soup = self.soup('<mathml:msqrt>4</mathml:msqrt><a svg:fill="red">')
self.assertEqual("4", soup.find("mathml:msqrt").string)
self.assertEqual("a", soup.find(attrs= { "svg:fill" : "red" }).name)
class TestFindAllByName(TreeTest):
"""Test ways of finding tags by tag name."""
def setUp(self):
super(TreeTest, self).setUp()
self.tree = self.soup("""<a>First tag.</a>
<b>Second tag.</b>
<c>Third <a>Nested tag.</a> tag.</c>""")
def test_find_all_by_tag_name(self):
# Find all the <a> tags.
self.assertSelects(
self.tree.find_all('a'), ['First tag.', 'Nested tag.'])
def test_find_all_by_name_and_text(self):
self.assertSelects(
self.tree.find_all('a', text='First tag.'), ['First tag.'])
self.assertSelects(
self.tree.find_all('a', text=True), ['First tag.', 'Nested tag.'])
self.assertSelects(
self.tree.find_all('a', text=re.compile("tag")),
['First tag.', 'Nested tag.'])
def test_find_all_on_non_root_element(self):
# You can call find_all on any node, not just the root.
self.assertSelects(self.tree.c.find_all('a'), ['Nested tag.'])
def test_calling_element_invokes_find_all(self):
self.assertSelects(self.tree('a'), ['First tag.', 'Nested tag.'])
def test_find_all_by_tag_strainer(self):
self.assertSelects(
self.tree.find_all(SoupStrainer('a')),
['First tag.', 'Nested tag.'])
def test_find_all_by_tag_names(self):
self.assertSelects(
self.tree.find_all(['a', 'b']),
['First tag.', 'Second tag.', 'Nested tag.'])
def test_find_all_by_tag_dict(self):
self.assertSelects(
self.tree.find_all({'a' : True, 'b' : True}),
['First tag.', 'Second tag.', 'Nested tag.'])
def test_find_all_by_tag_re(self):
self.assertSelects(
self.tree.find_all(re.compile('^[ab]$')),
['First tag.', 'Second tag.', 'Nested tag.'])
def test_find_all_with_tags_matching_method(self):
# You can define an oracle method that determines whether
# a tag matches the search.
def id_matches_name(tag):
return tag.name == tag.get('id')
tree = self.soup("""<a id="a">Match 1.</a>
<a id="1">Does not match.</a>
<b id="b">Match 2.</a>""")
self.assertSelects(
tree.find_all(id_matches_name), ["Match 1.", "Match 2."])
class TestFindAllByAttribute(TreeTest):
def test_find_all_by_attribute_name(self):
# You can pass in keyword arguments to find_all to search by
# attribute.
tree = self.soup("""
<a id="first">Matching a.</a>
<a id="second">
Non-matching <b id="first">Matching b.</b>a.
</a>""")
self.assertSelects(tree.find_all(id='first'),
["Matching a.", "Matching b."])
def test_find_all_by_utf8_attribute_value(self):
peace = u"םולש".encode("utf8")
data = u'<a title="םולש"></a>'.encode("utf8")
soup = self.soup(data)
self.assertEqual([soup.a], soup.find_all(title=peace))
self.assertEqual([soup.a], soup.find_all(title=peace.decode("utf8")))
self.assertEqual([soup.a], soup.find_all(title=[peace, "something else"]))
def test_find_all_by_attribute_dict(self):
# You can pass in a dictionary as the argument 'attrs'. This
# lets you search for attributes like 'name' (a fixed argument
# to find_all) and 'class' (a reserved word in Python.)
tree = self.soup("""
<a name="name1" class="class1">Name match.</a>
<a name="name2" class="class2">Class match.</a>
<a name="name3" class="class3">Non-match.</a>
<name1>A tag called 'name1'.</name1>
""")
# This doesn't do what you want.
self.assertSelects(tree.find_all(name='name1'),
["A tag called 'name1'."])
# This does what you want.
self.assertSelects(tree.find_all(attrs={'name' : 'name1'}),
["Name match."])
self.assertSelects(tree.find_all(attrs={'class' : 'class2'}),
["Class match."])
def test_find_all_by_class(self):
tree = self.soup("""
<a class="1">Class 1.</a>
<a class="2">Class 2.</a>
<b class="1">Class 1.</b>
<c class="3 4">Class 3 and 4.</c>
""")
# Passing in the class_ keyword argument will search against
# the 'class' attribute.
self.assertSelects(tree.find_all('a', class_='1'), ['Class 1.'])
self.assertSelects(tree.find_all('c', class_='3'), ['Class 3 and 4.'])
self.assertSelects(tree.find_all('c', class_='4'), ['Class 3 and 4.'])
# Passing in a string to 'attrs' will also search the CSS class.
self.assertSelects(tree.find_all('a', '1'), ['Class 1.'])
self.assertSelects(tree.find_all(attrs='1'), ['Class 1.', 'Class 1.'])
self.assertSelects(tree.find_all('c', '3'), ['Class 3 and 4.'])
self.assertSelects(tree.find_all('c', '4'), ['Class 3 and 4.'])
def test_find_by_class_when_multiple_classes_present(self):
tree = self.soup("<gar class='foo bar'>Found it</gar>")
f = tree.find_all("gar", class_=re.compile("o"))
self.assertSelects(f, ["Found it"])
f = tree.find_all("gar", class_=re.compile("a"))
self.assertSelects(f, ["Found it"])
# Since the class is not the string "foo bar", but the two
# strings "foo" and "bar", this will not find anything.
f = tree.find_all("gar", class_=re.compile("o b"))
self.assertSelects(f, [])
def test_find_all_with_non_dictionary_for_attrs_finds_by_class(self):
soup = self.soup("<a class='bar'>Found it</a>")
self.assertSelects(soup.find_all("a", re.compile("ba")), ["Found it"])
def big_attribute_value(value):
return len(value) > 3
self.assertSelects(soup.find_all("a", big_attribute_value), [])
def small_attribute_value(value):
return len(value) <= 3
self.assertSelects(
soup.find_all("a", small_attribute_value), ["Found it"])
def test_find_all_with_string_for_attrs_finds_multiple_classes(self):
soup = self.soup('<a class="foo bar"></a><a class="foo"></a>')
a, a2 = soup.find_all("a")
self.assertEqual([a, a2], soup.find_all("a", "foo"))
self.assertEqual([a], soup.find_all("a", "bar"))
# If you specify the class as a string that contains a
# space, only that specific value will be found.
self.assertEqual([a], soup.find_all("a", class_="foo bar"))
self.assertEqual([a], soup.find_all("a", "foo bar"))
self.assertEqual([], soup.find_all("a", "bar foo"))
def test_find_all_by_attribute_soupstrainer(self):
tree = self.soup("""
<a id="first">Match.</a>
<a id="second">Non-match.</a>""")
strainer = SoupStrainer(attrs={'id' : 'first'})
self.assertSelects(tree.find_all(strainer), ['Match.'])
def test_find_all_with_missing_atribute(self):
# You can pass in None as the value of an attribute to find_all.
# This will match tags that do not have that attribute set.
tree = self.soup("""<a id="1">ID present.</a>
<a>No ID present.</a>
<a id="">ID is empty.</a>""")
self.assertSelects(tree.find_all('a', id=None), ["No ID present."])
def test_find_all_with_defined_attribute(self):
# You can pass in None as the value of an attribute to find_all.
# This will match tags that have that attribute set to any value.
tree = self.soup("""<a id="1">ID present.</a>
<a>No ID present.</a>
<a id="">ID is empty.</a>""")
self.assertSelects(
tree.find_all(id=True), ["ID present.", "ID is empty."])
def test_find_all_with_numeric_attribute(self):
# If you search for a number, it's treated as a string.
tree = self.soup("""<a id=1>Unquoted attribute.</a>
<a id="1">Quoted attribute.</a>""")
expected = ["Unquoted attribute.", "Quoted attribute."]
self.assertSelects(tree.find_all(id=1), expected)
self.assertSelects(tree.find_all(id="1"), expected)
def test_find_all_with_list_attribute_values(self):
# You can pass a list of attribute values instead of just one,
# and you'll get tags that match any of the values.
tree = self.soup("""<a id="1">1</a>
<a id="2">2</a>
<a id="3">3</a>
<a>No ID.</a>""")
self.assertSelects(tree.find_all(id=["1", "3", "4"]),
["1", "3"])
def test_find_all_with_regular_expression_attribute_value(self):
# You can pass a regular expression as an attribute value, and
# you'll get tags whose values for that attribute match the
# regular expression.
tree = self.soup("""<a id="a">One a.</a>
<a id="aa">Two as.</a>
<a id="ab">Mixed as and bs.</a>
<a id="b">One b.</a>
<a>No ID.</a>""")
self.assertSelects(tree.find_all(id=re.compile("^a+$")),
["One a.", "Two as."])
def test_find_by_name_and_containing_string(self):
soup = self.soup("<b>foo</b><b>bar</b><a>foo</a>")
a = soup.a
self.assertEqual([a], soup.find_all("a", text="foo"))
self.assertEqual([], soup.find_all("a", text="bar"))
self.assertEqual([], soup.find_all("a", text="bar"))
def test_find_by_name_and_containing_string_when_string_is_buried(self):
soup = self.soup("<a>foo</a><a><b><c>foo</c></b></a>")
self.assertEqual(soup.find_all("a"), soup.find_all("a", text="foo"))
def test_find_by_attribute_and_containing_string(self):
soup = self.soup('<b id="1">foo</b><a id="2">foo</a>')
a = soup.a
self.assertEqual([a], soup.find_all(id=2, text="foo"))
self.assertEqual([], soup.find_all(id=1, text="bar"))
class TestIndex(TreeTest):
"""Test Tag.index"""
def test_index(self):
tree = self.soup("""<div>
<a>Identical</a>
<b>Not identical</b>
<a>Identical</a>
<c><d>Identical with child</d></c>
<b>Also not identical</b>
<c><d>Identical with child</d></c>
</div>""")
div = tree.div
for i, element in enumerate(div.contents):
self.assertEqual(i, div.index(element))
self.assertRaises(ValueError, tree.index, 1)
class TestParentOperations(TreeTest):
"""Test navigation and searching through an element's parents."""
def setUp(self):
super(TestParentOperations, self).setUp()
self.tree = self.soup('''<ul id="empty"></ul>
<ul id="top">
<ul id="middle">
<ul id="bottom">
<b>Start here</b>
</ul>
</ul>''')
self.start = self.tree.b
def test_parent(self):
self.assertEqual(self.start.parent['id'], 'bottom')
self.assertEqual(self.start.parent.parent['id'], 'middle')
self.assertEqual(self.start.parent.parent.parent['id'], 'top')
def test_parent_of_top_tag_is_soup_object(self):
top_tag = self.tree.contents[0]
self.assertEqual(top_tag.parent, self.tree)
def test_soup_object_has_no_parent(self):
self.assertEqual(None, self.tree.parent)
def test_find_parents(self):
self.assertSelectsIDs(
self.start.find_parents('ul'), ['bottom', 'middle', 'top'])
self.assertSelectsIDs(
self.start.find_parents('ul', id="middle"), ['middle'])
def test_find_parent(self):
self.assertEqual(self.start.find_parent('ul')['id'], 'bottom')
def test_parent_of_text_element(self):
text = self.tree.find(text="Start here")
self.assertEqual(text.parent.name, 'b')
def test_text_element_find_parent(self):
text = self.tree.find(text="Start here")
self.assertEqual(text.find_parent('ul')['id'], 'bottom')
def test_parent_generator(self):
parents = [parent['id'] for parent in self.start.parents
if parent is not None and 'id' in parent.attrs]
self.assertEqual(parents, ['bottom', 'middle', 'top'])
class ProximityTest(TreeTest):
def setUp(self):
super(TreeTest, self).setUp()
self.tree = self.soup(
'<html id="start"><head></head><body><b id="1">One</b><b id="2">Two</b><b id="3">Three</b></body></html>')
class TestNextOperations(ProximityTest):
def setUp(self):
super(TestNextOperations, self).setUp()
self.start = self.tree.b
def test_next(self):
self.assertEqual(self.start.next_element, "One")
self.assertEqual(self.start.next_element.next_element['id'], "2")
def test_next_of_last_item_is_none(self):
last = self.tree.find(text="Three")
self.assertEqual(last.next_element, None)
def test_next_of_root_is_none(self):
# The document root is outside the next/previous chain.
self.assertEqual(self.tree.next_element, None)
def test_find_all_next(self):
self.assertSelects(self.start.find_all_next('b'), ["Two", "Three"])
self.start.find_all_next(id=3)
self.assertSelects(self.start.find_all_next(id=3), ["Three"])
def test_find_next(self):
self.assertEqual(self.start.find_next('b')['id'], '2')
self.assertEqual(self.start.find_next(text="Three"), "Three")
def test_find_next_for_text_element(self):
text = self.tree.find(text="One")
self.assertEqual(text.find_next("b").string, "Two")
self.assertSelects(text.find_all_next("b"), ["Two", "Three"])
def test_next_generator(self):
start = self.tree.find(text="Two")
successors = [node for node in start.next_elements]
# There are two successors: the final <b> tag and its text contents.
tag, contents = successors
self.assertEqual(tag['id'], '3')
self.assertEqual(contents, "Three")
class TestPreviousOperations(ProximityTest):
def setUp(self):
super(TestPreviousOperations, self).setUp()
self.end = self.tree.find(text="Three")
def test_previous(self):
self.assertEqual(self.end.previous_element['id'], "3")
self.assertEqual(self.end.previous_element.previous_element, "Two")
def test_previous_of_first_item_is_none(self):
first = self.tree.find('html')
self.assertEqual(first.previous_element, None)
def test_previous_of_root_is_none(self):
# The document root is outside the next/previous chain.
# XXX This is broken!
#self.assertEqual(self.tree.previous_element, None)
pass
def test_find_all_previous(self):
# The <b> tag containing the "Three" node is the predecessor
# of the "Three" node itself, which is why "Three" shows up
# here.
self.assertSelects(
self.end.find_all_previous('b'), ["Three", "Two", "One"])
self.assertSelects(self.end.find_all_previous(id=1), ["One"])
def test_find_previous(self):
self.assertEqual(self.end.find_previous('b')['id'], '3')
self.assertEqual(self.end.find_previous(text="One"), "One")
def test_find_previous_for_text_element(self):
text = self.tree.find(text="Three")
self.assertEqual(text.find_previous("b").string, "Three")
self.assertSelects(
text.find_all_previous("b"), ["Three", "Two", "One"])
def test_previous_generator(self):
start = self.tree.find(text="One")
predecessors = [node for node in start.previous_elements]
# There are four predecessors: the <b> tag containing "One"
# the <body> tag, the <head> tag, and the <html> tag.
b, body, head, html = predecessors
self.assertEqual(b['id'], '1')
self.assertEqual(body.name, "body")
self.assertEqual(head.name, "head")
self.assertEqual(html.name, "html")
class SiblingTest(TreeTest):
def setUp(self):
super(SiblingTest, self).setUp()
markup = '''<html>
<span id="1">
<span id="1.1"></span>
</span>
<span id="2">
<span id="2.1"></span>
</span>
<span id="3">
<span id="3.1"></span>
</span>
<span id="4"></span>
</html>'''
# All that whitespace looks good but makes the tests more
# difficult. Get rid of it.
markup = re.compile("\n\s*").sub("", markup)
self.tree = self.soup(markup)
class TestNextSibling(SiblingTest):
def setUp(self):
super(TestNextSibling, self).setUp()
self.start = self.tree.find(id="1")
def test_next_sibling_of_root_is_none(self):
self.assertEqual(self.tree.next_sibling, None)
def test_next_sibling(self):
self.assertEqual(self.start.next_sibling['id'], '2')
self.assertEqual(self.start.next_sibling.next_sibling['id'], '3')
# Note the difference between next_sibling and next_element.
self.assertEqual(self.start.next_element['id'], '1.1')
def test_next_sibling_may_not_exist(self):
self.assertEqual(self.tree.html.next_sibling, None)
nested_span = self.tree.find(id="1.1")
self.assertEqual(nested_span.next_sibling, None)
last_span = self.tree.find(id="4")
self.assertEqual(last_span.next_sibling, None)
def test_find_next_sibling(self):
self.assertEqual(self.start.find_next_sibling('span')['id'], '2')
def test_next_siblings(self):
self.assertSelectsIDs(self.start.find_next_siblings("span"),
['2', '3', '4'])
self.assertSelectsIDs(self.start.find_next_siblings(id='3'), ['3'])
def test_next_sibling_for_text_element(self):
soup = self.soup("Foo<b>bar</b>baz")
start = soup.find(text="Foo")
self.assertEqual(start.next_sibling.name, 'b')
self.assertEqual(start.next_sibling.next_sibling, 'baz')
self.assertSelects(start.find_next_siblings('b'), ['bar'])
self.assertEqual(start.find_next_sibling(text="baz"), "baz")
self.assertEqual(start.find_next_sibling(text="nonesuch"), None)
class TestPreviousSibling(SiblingTest):
def setUp(self):
super(TestPreviousSibling, self).setUp()
self.end = self.tree.find(id="4")
def test_previous_sibling_of_root_is_none(self):
self.assertEqual(self.tree.previous_sibling, None)
def test_previous_sibling(self):
self.assertEqual(self.end.previous_sibling['id'], '3')
self.assertEqual(self.end.previous_sibling.previous_sibling['id'], '2')
# Note the difference between previous_sibling and previous_element.
self.assertEqual(self.end.previous_element['id'], '3.1')
def test_previous_sibling_may_not_exist(self):
self.assertEqual(self.tree.html.previous_sibling, None)
nested_span = self.tree.find(id="1.1")
self.assertEqual(nested_span.previous_sibling, None)
first_span = self.tree.find(id="1")
self.assertEqual(first_span.previous_sibling, None)
def test_find_previous_sibling(self):
self.assertEqual(self.end.find_previous_sibling('span')['id'], '3')
def test_previous_siblings(self):
self.assertSelectsIDs(self.end.find_previous_siblings("span"),
['3', '2', '1'])
self.assertSelectsIDs(self.end.find_previous_siblings(id='1'), ['1'])
def test_previous_sibling_for_text_element(self):
soup = self.soup("Foo<b>bar</b>baz")
start = soup.find(text="baz")
self.assertEqual(start.previous_sibling.name, 'b')
self.assertEqual(start.previous_sibling.previous_sibling, 'Foo')
self.assertSelects(start.find_previous_siblings('b'), ['bar'])
self.assertEqual(start.find_previous_sibling(text="Foo"), "Foo")
self.assertEqual(start.find_previous_sibling(text="nonesuch"), None)
class TestTagCreation(SoupTest):
"""Test the ability to create new tags."""
def test_new_tag(self):
soup = self.soup("")
new_tag = soup.new_tag("foo", bar="baz")
self.assertTrue(isinstance(new_tag, Tag))
self.assertEqual("foo", new_tag.name)
self.assertEqual(dict(bar="baz"), new_tag.attrs)
self.assertEqual(None, new_tag.parent)
def test_tag_inherits_self_closing_rules_from_builder(self):
if XML_BUILDER_PRESENT:
xml_soup = BeautifulSoup("", "xml")
xml_br = xml_soup.new_tag("br")
xml_p = xml_soup.new_tag("p")
# Both the <br> and <p> tag are empty-element, just because
# they have no contents.
self.assertEqual(b"<br/>", xml_br.encode())
self.assertEqual(b"<p/>", xml_p.encode())
html_soup = BeautifulSoup("", "html")
html_br = html_soup.new_tag("br")
html_p = html_soup.new_tag("p")
# The HTML builder users HTML's rules about which tags are
# empty-element tags, and the new tags reflect these rules.
self.assertEqual(b"<br/>", html_br.encode())
self.assertEqual(b"<p></p>", html_p.encode())
def test_new_string_creates_navigablestring(self):
soup = self.soup("")
s = soup.new_string("foo")
self.assertEqual("foo", s)
self.assertTrue(isinstance(s, NavigableString))
class TestTreeModification(SoupTest):
def test_attribute_modification(self):
soup = self.soup('<a id="1"></a>')
soup.a['id'] = 2
self.assertEqual(soup.decode(), self.document_for('<a id="2"></a>'))
del(soup.a['id'])
self.assertEqual(soup.decode(), self.document_for('<a></a>'))
soup.a['id2'] = 'foo'
self.assertEqual(soup.decode(), self.document_for('<a id2="foo"></a>'))
def test_new_tag_creation(self):
builder = builder_registry.lookup('html')()
soup = self.soup("<body></body>", builder=builder)
a = Tag(soup, builder, 'a')
ol = Tag(soup, builder, 'ol')
a['href'] = 'http://foo.com/'
soup.body.insert(0, a)
soup.body.insert(1, ol)
self.assertEqual(
soup.body.encode(),
b'<body><a href="http://foo.com/"></a><ol></ol></body>')
def test_append_to_contents_moves_tag(self):
doc = """<p id="1">Don't leave me <b>here</b>.</p>
<p id="2">Don\'t leave!</p>"""
soup = self.soup(doc)
second_para = soup.find(id='2')
bold = soup.b
# Move the <b> tag to the end of the second paragraph.
soup.find(id='2').append(soup.b)
# The <b> tag is now a child of the second paragraph.
self.assertEqual(bold.parent, second_para)
self.assertEqual(
soup.decode(), self.document_for(
'<p id="1">Don\'t leave me .</p>\n'
'<p id="2">Don\'t leave!<b>here</b></p>'))
def test_replace_with_returns_thing_that_was_replaced(self):
text = "<a></a><b><c></c></b>"
soup = self.soup(text)
a = soup.a
new_a = a.replace_with(soup.c)
self.assertEqual(a, new_a)
def test_unwrap_returns_thing_that_was_replaced(self):
text = "<a><b></b><c></c></a>"
soup = self.soup(text)
a = soup.a
new_a = a.unwrap()
self.assertEqual(a, new_a)
def test_replace_tag_with_itself(self):
text = "<a><b></b><c>Foo<d></d></c></a><a><e></e></a>"
soup = self.soup(text)
c = soup.c
soup.c.replace_with(c)
self.assertEqual(soup.decode(), self.document_for(text))
def test_replace_tag_with_its_parent_raises_exception(self):
text = "<a><b></b></a>"
soup = self.soup(text)
self.assertRaises(ValueError, soup.b.replace_with, soup.a)
def test_insert_tag_into_itself_raises_exception(self):
text = "<a><b></b></a>"
soup = self.soup(text)
self.assertRaises(ValueError, soup.a.insert, 0, soup.a)
def test_replace_with_maintains_next_element_throughout(self):
soup = self.soup('<p><a>one</a><b>three</b></p>')
a = soup.a
b = a.contents[0]
# Make it so the <a> tag has two text children.
a.insert(1, "two")
# Now replace each one with the empty string.
left, right = a.contents
left.replaceWith('')
right.replaceWith('')
# The <b> tag is still connected to the tree.
self.assertEqual("three", soup.b.string)
def test_replace_final_node(self):
soup = self.soup("<b>Argh!</b>")
soup.find(text="Argh!").replace_with("Hooray!")
new_text = soup.find(text="Hooray!")
b = soup.b
self.assertEqual(new_text.previous_element, b)
self.assertEqual(new_text.parent, b)
self.assertEqual(new_text.previous_element.next_element, new_text)
self.assertEqual(new_text.next_element, None)
def test_consecutive_text_nodes(self):
# A builder should never create two consecutive text nodes,
# but if you insert one next to another, Beautiful Soup will
# handle it correctly.
soup = self.soup("<a><b>Argh!</b><c></c></a>")
soup.b.insert(1, "Hooray!")
self.assertEqual(
soup.decode(), self.document_for(
"<a><b>Argh!Hooray!</b><c></c></a>"))
new_text = soup.find(text="Hooray!")
self.assertEqual(new_text.previous_element, "Argh!")
self.assertEqual(new_text.previous_element.next_element, new_text)
self.assertEqual(new_text.previous_sibling, "Argh!")
self.assertEqual(new_text.previous_sibling.next_sibling, new_text)
self.assertEqual(new_text.next_sibling, None)
self.assertEqual(new_text.next_element, soup.c)
def test_insert_string(self):
soup = self.soup("<a></a>")
soup.a.insert(0, "bar")
soup.a.insert(0, "foo")
# The string were added to the tag.
self.assertEqual(["foo", "bar"], soup.a.contents)
# And they were converted to NavigableStrings.
self.assertEqual(soup.a.contents[0].next_element, "bar")
def test_insert_tag(self):
builder = self.default_builder
soup = self.soup(
"<a><b>Find</b><c>lady!</c><d></d></a>", builder=builder)
magic_tag = Tag(soup, builder, 'magictag')
magic_tag.insert(0, "the")
soup.a.insert(1, magic_tag)
self.assertEqual(
soup.decode(), self.document_for(
"<a><b>Find</b><magictag>the</magictag><c>lady!</c><d></d></a>"))
# Make sure all the relationships are hooked up correctly.
b_tag = soup.b
self.assertEqual(b_tag.next_sibling, magic_tag)
self.assertEqual(magic_tag.previous_sibling, b_tag)
find = b_tag.find(text="Find")
self.assertEqual(find.next_element, magic_tag)
self.assertEqual(magic_tag.previous_element, find)
c_tag = soup.c
self.assertEqual(magic_tag.next_sibling, c_tag)
self.assertEqual(c_tag.previous_sibling, magic_tag)
the = magic_tag.find(text="the")
self.assertEqual(the.parent, magic_tag)
self.assertEqual(the.next_element, c_tag)
self.assertEqual(c_tag.previous_element, the)
def test_append_child_thats_already_at_the_end(self):
data = "<a><b></b></a>"
soup = self.soup(data)
soup.a.append(soup.b)
self.assertEqual(data, soup.decode())
def test_move_tag_to_beginning_of_parent(self):
data = "<a><b></b><c></c><d></d></a>"
soup = self.soup(data)
soup.a.insert(0, soup.d)
self.assertEqual("<a><d></d><b></b><c></c></a>", soup.decode())
def test_insert_works_on_empty_element_tag(self):
# This is a little strange, since most HTML parsers don't allow
# markup like this to come through. But in general, we don't
# know what the parser would or wouldn't have allowed, so
# I'm letting this succeed for now.
soup = self.soup("<br/>")
soup.br.insert(1, "Contents")
self.assertEqual(str(soup.br), "<br>Contents</br>")
def test_insert_before(self):
soup = self.soup("<a>foo</a><b>bar</b>")
soup.b.insert_before("BAZ")
soup.a.insert_before("QUUX")
self.assertEqual(
soup.decode(), self.document_for("QUUX<a>foo</a>BAZ<b>bar</b>"))
soup.a.insert_before(soup.b)
self.assertEqual(
soup.decode(), self.document_for("QUUX<b>bar</b><a>foo</a>BAZ"))
def test_insert_after(self):
soup = self.soup("<a>foo</a><b>bar</b>")
soup.b.insert_after("BAZ")
soup.a.insert_after("QUUX")
self.assertEqual(
soup.decode(), self.document_for("<a>foo</a>QUUX<b>bar</b>BAZ"))
soup.b.insert_after(soup.a)
self.assertEqual(
soup.decode(), self.document_for("QUUX<b>bar</b><a>foo</a>BAZ"))
def test_insert_after_raises_exception_if_after_has_no_meaning(self):
soup = self.soup("")
tag = soup.new_tag("a")
string = soup.new_string("")
self.assertRaises(ValueError, string.insert_after, tag)
self.assertRaises(NotImplementedError, soup.insert_after, tag)
self.assertRaises(ValueError, tag.insert_after, tag)
def test_insert_before_raises_notimplementederror_if_before_has_no_meaning(self):
soup = self.soup("")
tag = soup.new_tag("a")
string = soup.new_string("")
self.assertRaises(ValueError, string.insert_before, tag)
self.assertRaises(NotImplementedError, soup.insert_before, tag)
self.assertRaises(ValueError, tag.insert_before, tag)
def test_replace_with(self):
soup = self.soup(
"<p>There's <b>no</b> business like <b>show</b> business</p>")
no, show = soup.find_all('b')
show.replace_with(no)
self.assertEqual(
soup.decode(),
self.document_for(
"<p>There's business like <b>no</b> business</p>"))
self.assertEqual(show.parent, None)
self.assertEqual(no.parent, soup.p)
self.assertEqual(no.next_element, "no")
self.assertEqual(no.next_sibling, " business")
def test_replace_first_child(self):
data = "<a><b></b><c></c></a>"
soup = self.soup(data)
soup.b.replace_with(soup.c)
self.assertEqual("<a><c></c></a>", soup.decode())
def test_replace_last_child(self):
data = "<a><b></b><c></c></a>"
soup = self.soup(data)
soup.c.replace_with(soup.b)
self.assertEqual("<a><b></b></a>", soup.decode())
def test_nested_tag_replace_with(self):
soup = self.soup(
"""<a>We<b>reserve<c>the</c><d>right</d></b></a><e>to<f>refuse</f><g>service</g></e>""")
# Replace the entire <b> tag and its contents ("reserve the
# right") with the <f> tag ("refuse").
remove_tag = soup.b
move_tag = soup.f
remove_tag.replace_with(move_tag)
self.assertEqual(
soup.decode(), self.document_for(
"<a>We<f>refuse</f></a><e>to<g>service</g></e>"))
# The <b> tag is now an orphan.
self.assertEqual(remove_tag.parent, None)
self.assertEqual(remove_tag.find(text="right").next_element, None)
self.assertEqual(remove_tag.previous_element, None)
self.assertEqual(remove_tag.next_sibling, None)
self.assertEqual(remove_tag.previous_sibling, None)
# The <f> tag is now connected to the <a> tag.
self.assertEqual(move_tag.parent, soup.a)
self.assertEqual(move_tag.previous_element, "We")
self.assertEqual(move_tag.next_element.next_element, soup.e)
self.assertEqual(move_tag.next_sibling, None)
# The gap where the <f> tag used to be has been mended, and
# the word "to" is now connected to the <g> tag.
to_text = soup.find(text="to")
g_tag = soup.g
self.assertEqual(to_text.next_element, g_tag)
self.assertEqual(to_text.next_sibling, g_tag)
self.assertEqual(g_tag.previous_element, to_text)
self.assertEqual(g_tag.previous_sibling, to_text)
def test_unwrap(self):
tree = self.soup("""
<p>Unneeded <em>formatting</em> is unneeded</p>
""")
tree.em.unwrap()
self.assertEqual(tree.em, None)
self.assertEqual(tree.p.text, "Unneeded formatting is unneeded")
def test_wrap(self):
soup = self.soup("I wish I was bold.")
value = soup.string.wrap(soup.new_tag("b"))
self.assertEqual(value.decode(), "<b>I wish I was bold.</b>")
self.assertEqual(
soup.decode(), self.document_for("<b>I wish I was bold.</b>"))
def test_wrap_extracts_tag_from_elsewhere(self):
soup = self.soup("<b></b>I wish I was bold.")
soup.b.next_sibling.wrap(soup.b)
self.assertEqual(
soup.decode(), self.document_for("<b>I wish I was bold.</b>"))
def test_wrap_puts_new_contents_at_the_end(self):
soup = self.soup("<b>I like being bold.</b>I wish I was bold.")
soup.b.next_sibling.wrap(soup.b)
self.assertEqual(2, len(soup.b.contents))
self.assertEqual(
soup.decode(), self.document_for(
"<b>I like being bold.I wish I was bold.</b>"))
def test_extract(self):
soup = self.soup(
'<html><body>Some content. <div id="nav">Nav crap</div> More content.</body></html>')
self.assertEqual(len(soup.body.contents), 3)
extracted = soup.find(id="nav").extract()
self.assertEqual(
soup.decode(), "<html><body>Some content. More content.</body></html>")
self.assertEqual(extracted.decode(), '<div id="nav">Nav crap</div>')
# The extracted tag is now an orphan.
self.assertEqual(len(soup.body.contents), 2)
self.assertEqual(extracted.parent, None)
self.assertEqual(extracted.previous_element, None)
self.assertEqual(extracted.next_element.next_element, None)
# The gap where the extracted tag used to be has been mended.
content_1 = soup.find(text="Some content. ")
content_2 = soup.find(text=" More content.")
self.assertEqual(content_1.next_element, content_2)
self.assertEqual(content_1.next_sibling, content_2)
self.assertEqual(content_2.previous_element, content_1)
self.assertEqual(content_2.previous_sibling, content_1)
def test_extract_distinguishes_between_identical_strings(self):
soup = self.soup("<a>foo</a><b>bar</b>")
foo_1 = soup.a.string
bar_1 = soup.b.string
foo_2 = soup.new_string("foo")
bar_2 = soup.new_string("bar")
soup.a.append(foo_2)
soup.b.append(bar_2)
# Now there are two identical strings in the <a> tag, and two
# in the <b> tag. Let's remove the first "foo" and the second
# "bar".
foo_1.extract()
bar_2.extract()
self.assertEqual(foo_2, soup.a.string)
self.assertEqual(bar_2, soup.b.string)
def test_clear(self):
"""Tag.clear()"""
soup = self.soup("<p><a>String <em>Italicized</em></a> and another</p>")
# clear using extract()
a = soup.a
soup.p.clear()
self.assertEqual(len(soup.p.contents), 0)
self.assertTrue(hasattr(a, "contents"))
# clear using decompose()
em = a.em
a.clear(decompose=True)
self.assertFalse(hasattr(em, "contents"))
def test_string_set(self):
"""Tag.string = 'string'"""
soup = self.soup("<a></a> <b><c></c></b>")
soup.a.string = "foo"
self.assertEqual(soup.a.contents, ["foo"])
soup.b.string = "bar"
self.assertEqual(soup.b.contents, ["bar"])
def test_string_set_does_not_affect_original_string(self):
soup = self.soup("<a><b>foo</b><c>bar</c>")
soup.b.string = soup.c.string
self.assertEqual(soup.a.encode(), b"<a><b>bar</b><c>bar</c></a>")
def test_set_string_preserves_class_of_string(self):
soup = self.soup("<a></a>")
cdata = CData("foo")
soup.a.string = cdata
self.assertTrue(isinstance(soup.a.string, CData))
class TestElementObjects(SoupTest):
"""Test various features of element objects."""
def test_len(self):
"""The length of an element is its number of children."""
soup = self.soup("<top>1<b>2</b>3</top>")
# The BeautifulSoup object itself contains one element: the
# <top> tag.
self.assertEqual(len(soup.contents), 1)
self.assertEqual(len(soup), 1)
# The <top> tag contains three elements: the text node "1", the
# <b> tag, and the text node "3".
self.assertEqual(len(soup.top), 3)
self.assertEqual(len(soup.top.contents), 3)
def test_member_access_invokes_find(self):
"""Accessing a Python member .foo invokes find('foo')"""
soup = self.soup('<b><i></i></b>')
self.assertEqual(soup.b, soup.find('b'))
self.assertEqual(soup.b.i, soup.find('b').find('i'))
self.assertEqual(soup.a, None)
def test_deprecated_member_access(self):
soup = self.soup('<b><i></i></b>')
with warnings.catch_warnings(record=True) as w:
tag = soup.bTag
self.assertEqual(soup.b, tag)
self.assertEqual(
'.bTag is deprecated, use .find("b") instead.',
str(w[0].message))
def test_has_attr(self):
"""has_attr() checks for the presence of an attribute.
Please note note: has_attr() is different from
__in__. has_attr() checks the tag's attributes and __in__
checks the tag's chidlren.
"""
soup = self.soup("<foo attr='bar'>")
self.assertTrue(soup.foo.has_attr('attr'))
self.assertFalse(soup.foo.has_attr('attr2'))
def test_attributes_come_out_in_alphabetical_order(self):
markup = '<b a="1" z="5" m="3" f="2" y="4"></b>'
self.assertSoupEquals(markup, '<b a="1" f="2" m="3" y="4" z="5"></b>')
def test_string(self):
# A tag that contains only a text node makes that node
# available as .string.
soup = self.soup("<b>foo</b>")
self.assertEqual(soup.b.string, 'foo')
def test_empty_tag_has_no_string(self):
# A tag with no children has no .stirng.
soup = self.soup("<b></b>")
self.assertEqual(soup.b.string, None)
def test_tag_with_multiple_children_has_no_string(self):
# A tag with no children has no .string.
soup = self.soup("<a>foo<b></b><b></b></b>")
self.assertEqual(soup.b.string, None)
soup = self.soup("<a>foo<b></b>bar</b>")
self.assertEqual(soup.b.string, None)
# Even if all the children are strings, due to trickery,
# it won't work--but this would be a good optimization.
soup = self.soup("<a>foo</b>")
soup.a.insert(1, "bar")
self.assertEqual(soup.a.string, None)
def test_tag_with_recursive_string_has_string(self):
# A tag with a single child which has a .string inherits that
# .string.
soup = self.soup("<a><b>foo</b></a>")
self.assertEqual(soup.a.string, "foo")
self.assertEqual(soup.string, "foo")
def test_lack_of_string(self):
"""Only a tag containing a single text node has a .string."""
soup = self.soup("<b>f<i>e</i>o</b>")
self.assertFalse(soup.b.string)
soup = self.soup("<b></b>")
self.assertFalse(soup.b.string)
def test_all_text(self):
"""Tag.text and Tag.get_text(sep=u"") -> all child text, concatenated"""
soup = self.soup("<a>a<b>r</b> <r> t </r></a>")
self.assertEqual(soup.a.text, "ar t ")
self.assertEqual(soup.a.get_text(strip=True), "art")
self.assertEqual(soup.a.get_text(","), "a,r, , t ")
self.assertEqual(soup.a.get_text(",", strip=True), "a,r,t")
class TestCDAtaListAttributes(SoupTest):
"""Testing cdata-list attributes like 'class'.
"""
def test_single_value_becomes_list(self):
soup = self.soup("<a class='foo'>")
self.assertEqual(["foo"],soup.a['class'])
def test_multiple_values_becomes_list(self):
soup = self.soup("<a class='foo bar'>")
self.assertEqual(["foo", "bar"], soup.a['class'])
def test_multiple_values_separated_by_weird_whitespace(self):
soup = self.soup("<a class='foo\tbar\nbaz'>")
self.assertEqual(["foo", "bar", "baz"],soup.a['class'])
def test_attributes_joined_into_string_on_output(self):
soup = self.soup("<a class='foo\tbar'>")
self.assertEqual(b'<a class="foo bar"></a>', soup.a.encode())
def test_accept_charset(self):
soup = self.soup('<form accept-charset="ISO-8859-1 UTF-8">')
self.assertEqual(['ISO-8859-1', 'UTF-8'], soup.form['accept-charset'])
def test_cdata_attribute_applying_only_to_one_tag(self):
data = '<a accept-charset="ISO-8859-1 UTF-8"></a>'
soup = self.soup(data)
# We saw in another test that accept-charset is a cdata-list
# attribute for the <form> tag. But it's not a cdata-list
# attribute for any other tag.
self.assertEqual('ISO-8859-1 UTF-8', soup.a['accept-charset'])
class TestPersistence(SoupTest):
"Testing features like pickle and deepcopy."
def setUp(self):
super(TestPersistence, self).setUp()
self.page = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN"
"http://www.w3.org/TR/REC-html40/transitional.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>Beautiful Soup: We called him Tortoise because he taught us.</title>
<link rev="made" href="mailto:leonardr@segfault.org">
<meta name="Description" content="Beautiful Soup: an HTML parser optimized for screen-scraping.">
<meta name="generator" content="Markov Approximation 1.4 (module: leonardr)">
<meta name="author" content="Leonard Richardson">
</head>
<body>
<a href="foo">foo</a>
<a href="foo"><b>bar</b></a>
</body>
</html>"""
self.tree = self.soup(self.page)
def test_pickle_and_unpickle_identity(self):
# Pickling a tree, then unpickling it, yields a tree identical
# to the original.
dumped = pickle.dumps(self.tree, 2)
loaded = pickle.loads(dumped)
self.assertEqual(loaded.__class__, BeautifulSoup)
self.assertEqual(loaded.decode(), self.tree.decode())
def test_deepcopy_identity(self):
# Making a deepcopy of a tree yields an identical tree.
copied = copy.deepcopy(self.tree)
self.assertEqual(copied.decode(), self.tree.decode())
def test_unicode_pickle(self):
# A tree containing Unicode characters can be pickled.
html = u"<b>\N{SNOWMAN}</b>"
soup = self.soup(html)
dumped = pickle.dumps(soup, pickle.HIGHEST_PROTOCOL)
loaded = pickle.loads(dumped)
self.assertEqual(loaded.decode(), soup.decode())
class TestSubstitutions(SoupTest):
def test_default_formatter_is_minimal(self):
markup = u"<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>"
soup = self.soup(markup)
decoded = soup.decode(formatter="minimal")
# The < is converted back into < but the e-with-acute is left alone.
self.assertEqual(
decoded,
self.document_for(
u"<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>"))
def test_formatter_html(self):
markup = u"<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>"
soup = self.soup(markup)
decoded = soup.decode(formatter="html")
self.assertEqual(
decoded,
self.document_for("<b><<Sacré bleu!>></b>"))
def test_formatter_minimal(self):
markup = u"<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>"
soup = self.soup(markup)
decoded = soup.decode(formatter="minimal")
# The < is converted back into < but the e-with-acute is left alone.
self.assertEqual(
decoded,
self.document_for(
u"<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>"))
def test_formatter_null(self):
markup = u"<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>"
soup = self.soup(markup)
decoded = soup.decode(formatter=None)
# Neither the angle brackets nor the e-with-acute are converted.
# This is not valid HTML, but it's what the user wanted.
self.assertEqual(decoded,
self.document_for(u"<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>"))
def test_formatter_custom(self):
markup = u"<b><foo></b><b>bar</b>"
soup = self.soup(markup)
decoded = soup.decode(formatter = lambda x: x.upper())
# Instead of normal entity conversion code, the custom
# callable is called on every string.
self.assertEqual(
decoded,
self.document_for(u"<b><FOO></b><b>BAR</b>"))
def test_formatter_is_run_on_attribute_values(self):
markup = u'<a href="http://a.com?a=b&c=é">e</a>'
soup = self.soup(markup)
a = soup.a
expect_minimal = u'<a href="http://a.com?a=b&c=é">e</a>'
self.assertEqual(expect_minimal, a.decode())
self.assertEqual(expect_minimal, a.decode(formatter="minimal"))
expect_html = u'<a href="http://a.com?a=b&c=é">e</a>'
self.assertEqual(expect_html, a.decode(formatter="html"))
self.assertEqual(markup, a.decode(formatter=None))
expect_upper = u'<a href="HTTP://A.COM?A=B&C=É">E</a>'
self.assertEqual(expect_upper, a.decode(formatter=lambda x: x.upper()))
def test_prettify_accepts_formatter(self):
soup = BeautifulSoup("<html><body>foo</body></html>")
pretty = soup.prettify(formatter = lambda x: x.upper())
self.assertTrue("FOO" in pretty)
def test_prettify_outputs_unicode_by_default(self):
soup = self.soup("<a></a>")
self.assertEqual(unicode, type(soup.prettify()))
def test_prettify_can_encode_data(self):
soup = self.soup("<a></a>")
self.assertEqual(bytes, type(soup.prettify("utf-8")))
def test_html_entity_substitution_off_by_default(self):
markup = u"<b>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</b>"
soup = self.soup(markup)
encoded = soup.b.encode("utf-8")
self.assertEqual(encoded, markup.encode('utf-8'))
def test_encoding_substitution(self):
# Here's the <meta> tag saying that a document is
# encoded in Shift-JIS.
meta_tag = ('<meta content="text/html; charset=x-sjis" '
'http-equiv="Content-type"/>')
soup = self.soup(meta_tag)
# Parse the document, and the charset apprears unchanged.
self.assertEqual(soup.meta['content'], 'text/html; charset=x-sjis')
# Encode the document into some encoding, and the encoding is
# substituted into the meta tag.
utf_8 = soup.encode("utf-8")
self.assertTrue(b"charset=utf-8" in utf_8)
euc_jp = soup.encode("euc_jp")
self.assertTrue(b"charset=euc_jp" in euc_jp)
shift_jis = soup.encode("shift-jis")
self.assertTrue(b"charset=shift-jis" in shift_jis)
utf_16_u = soup.encode("utf-16").decode("utf-16")
self.assertTrue("charset=utf-16" in utf_16_u)
def test_encoding_substitution_doesnt_happen_if_tag_is_strained(self):
markup = ('<head><meta content="text/html; charset=x-sjis" '
'http-equiv="Content-type"/></head><pre>foo</pre>')
# Beautiful Soup used to try to rewrite the meta tag even if the
# meta tag got filtered out by the strainer. This test makes
# sure that doesn't happen.
strainer = SoupStrainer('pre')
soup = self.soup(markup, parse_only=strainer)
self.assertEqual(soup.contents[0].name, 'pre')
class TestEncoding(SoupTest):
"""Test the ability to encode objects into strings."""
def test_unicode_string_can_be_encoded(self):
html = u"<b>\N{SNOWMAN}</b>"
soup = self.soup(html)
self.assertEqual(soup.b.string.encode("utf-8"),
u"\N{SNOWMAN}".encode("utf-8"))
def test_tag_containing_unicode_string_can_be_encoded(self):
html = u"<b>\N{SNOWMAN}</b>"
soup = self.soup(html)
self.assertEqual(
soup.b.encode("utf-8"), html.encode("utf-8"))
def test_encoding_substitutes_unrecognized_characters_by_default(self):
html = u"<b>\N{SNOWMAN}</b>"
soup = self.soup(html)
self.assertEqual(soup.b.encode("ascii"), b"<b>☃</b>")
def test_encoding_can_be_made_strict(self):
html = u"<b>\N{SNOWMAN}</b>"
soup = self.soup(html)
self.assertRaises(
UnicodeEncodeError, soup.encode, "ascii", errors="strict")
def test_decode_contents(self):
html = u"<b>\N{SNOWMAN}</b>"
soup = self.soup(html)
self.assertEqual(u"\N{SNOWMAN}", soup.b.decode_contents())
def test_encode_contents(self):
html = u"<b>\N{SNOWMAN}</b>"
soup = self.soup(html)
self.assertEqual(
u"\N{SNOWMAN}".encode("utf8"), soup.b.encode_contents(
encoding="utf8"))
def test_deprecated_renderContents(self):
html = u"<b>\N{SNOWMAN}</b>"
soup = self.soup(html)
self.assertEqual(
u"\N{SNOWMAN}".encode("utf8"), soup.b.renderContents())
class TestNavigableStringSubclasses(SoupTest):
def test_cdata(self):
# None of the current builders turn CDATA sections into CData
# objects, but you can create them manually.
soup = self.soup("")
cdata = CData("foo")
soup.insert(1, cdata)
self.assertEqual(str(soup), "<![CDATA[foo]]>")
self.assertEqual(soup.find(text="foo"), "foo")
self.assertEqual(soup.contents[0], "foo")
def test_cdata_is_never_formatted(self):
"""Text inside a CData object is passed into the formatter.
But the return value is ignored.
"""
self.count = 0
def increment(*args):
self.count += 1
return "BITTER FAILURE"
soup = self.soup("")
cdata = CData("<><><>")
soup.insert(1, cdata)
self.assertEqual(
b"<![CDATA[<><><>]]>", soup.encode(formatter=increment))
self.assertEqual(1, self.count)
def test_doctype_ends_in_newline(self):
# Unlike other NavigableString subclasses, a DOCTYPE always ends
# in a newline.
doctype = Doctype("foo")
soup = self.soup("")
soup.insert(1, doctype)
self.assertEqual(soup.encode(), b"<!DOCTYPE foo>\n")
class TestSoupSelector(TreeTest):
HTML = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<title>The title</title>
<link rel="stylesheet" href="blah.css" type="text/css" id="l1">
</head>
<body>
<div id="main">
<div id="inner">
<h1 id="header1">An H1</h1>
<p>Some text</p>
<p class="onep" id="p1">Some more text</p>
<h2 id="header2">An H2</h2>
<p class="class1 class2 class3" id="pmulti">Another</p>
<a href="http://bob.example.org/" rel="friend met" id="bob">Bob</a>
<h2 id="header3">Another H2</h2>
<a id="me" href="http://simonwillison.net/" rel="me">me</a>
<span class="s1">
<a href="#" id="s1a1">span1a1</a>
<a href="#" id="s1a2">span1a2 <span id="s1a2s1">test</span></a>
<span class="span2">
<a href="#" id="s2a1">span2a1</a>
</span>
<span class="span3"></span>
</span>
</div>
<p lang="en" id="lang-en">English</p>
<p lang="en-gb" id="lang-en-gb">English UK</p>
<p lang="en-us" id="lang-en-us">English US</p>
<p lang="fr" id="lang-fr">French</p>
</div>
<div id="footer">
</div>
"""
def setUp(self):
self.soup = BeautifulSoup(self.HTML)
def assertSelects(self, selector, expected_ids):
el_ids = [el['id'] for el in self.soup.select(selector)]
el_ids.sort()
expected_ids.sort()
self.assertEqual(expected_ids, el_ids,
"Selector %s, expected [%s], got [%s]" % (
selector, ', '.join(expected_ids), ', '.join(el_ids)
)
)
assertSelect = assertSelects
def assertSelectMultiple(self, *tests):
for selector, expected_ids in tests:
self.assertSelect(selector, expected_ids)
def test_one_tag_one(self):
els = self.soup.select('title')
self.assertEqual(len(els), 1)
self.assertEqual(els[0].name, 'title')
self.assertEqual(els[0].contents, [u'The title'])
def test_one_tag_many(self):
els = self.soup.select('div')
self.assertEqual(len(els), 3)
for div in els:
self.assertEqual(div.name, 'div')
def test_tag_in_tag_one(self):
els = self.soup.select('div div')
self.assertSelects('div div', ['inner'])
def test_tag_in_tag_many(self):
for selector in ('html div', 'html body div', 'body div'):
self.assertSelects(selector, ['main', 'inner', 'footer'])
def test_tag_no_match(self):
self.assertEqual(len(self.soup.select('del')), 0)
def test_invalid_tag(self):
self.assertEqual(len(self.soup.select('tag%t')), 0)
def test_header_tags(self):
self.assertSelectMultiple(
('h1', ['header1']),
('h2', ['header2', 'header3']),
)
def test_class_one(self):
for selector in ('.onep', 'p.onep', 'html p.onep'):
els = self.soup.select(selector)
self.assertEqual(len(els), 1)
self.assertEqual(els[0].name, 'p')
self.assertEqual(els[0]['class'], ['onep'])
def test_class_mismatched_tag(self):
els = self.soup.select('div.onep')
self.assertEqual(len(els), 0)
def test_one_id(self):
for selector in ('div#inner', '#inner', 'div div#inner'):
self.assertSelects(selector, ['inner'])
def test_bad_id(self):
els = self.soup.select('#doesnotexist')
self.assertEqual(len(els), 0)
def test_items_in_id(self):
els = self.soup.select('div#inner p')
self.assertEqual(len(els), 3)
for el in els:
self.assertEqual(el.name, 'p')
self.assertEqual(els[1]['class'], ['onep'])
self.assertFalse(els[0].has_key('class'))
def test_a_bunch_of_emptys(self):
for selector in ('div#main del', 'div#main div.oops', 'div div#main'):
self.assertEqual(len(self.soup.select(selector)), 0)
def test_multi_class_support(self):
for selector in ('.class1', 'p.class1', '.class2', 'p.class2',
'.class3', 'p.class3', 'html p.class2', 'div#inner .class2'):
self.assertSelects(selector, ['pmulti'])
def test_multi_class_selection(self):
for selector in ('.class1.class3', '.class3.class2',
'.class1.class2.class3'):
self.assertSelects(selector, ['pmulti'])
def test_child_selector(self):
self.assertSelects('.s1 > a', ['s1a1', 's1a2'])
self.assertSelects('.s1 > a span', ['s1a2s1'])
def test_attribute_equals(self):
self.assertSelectMultiple(
('p[class="onep"]', ['p1']),
('p[id="p1"]', ['p1']),
('[class="onep"]', ['p1']),
('[id="p1"]', ['p1']),
('link[rel="stylesheet"]', ['l1']),
('link[type="text/css"]', ['l1']),
('link[href="blah.css"]', ['l1']),
('link[href="no-blah.css"]', []),
('[rel="stylesheet"]', ['l1']),
('[type="text/css"]', ['l1']),
('[href="blah.css"]', ['l1']),
('[href="no-blah.css"]', []),
('p[href="no-blah.css"]', []),
('[href="no-blah.css"]', []),
)
def test_attribute_tilde(self):
self.assertSelectMultiple(
('p[class~="class1"]', ['pmulti']),
('p[class~="class2"]', ['pmulti']),
('p[class~="class3"]', ['pmulti']),
('[class~="class1"]', ['pmulti']),
('[class~="class2"]', ['pmulti']),
('[class~="class3"]', ['pmulti']),
('a[rel~="friend"]', ['bob']),
('a[rel~="met"]', ['bob']),
('[rel~="friend"]', ['bob']),
('[rel~="met"]', ['bob']),
)
def test_attribute_startswith(self):
self.assertSelectMultiple(
('[rel^="style"]', ['l1']),
('link[rel^="style"]', ['l1']),
('notlink[rel^="notstyle"]', []),
('[rel^="notstyle"]', []),
('link[rel^="notstyle"]', []),
('link[href^="bla"]', ['l1']),
('a[href^="http://"]', ['bob', 'me']),
('[href^="http://"]', ['bob', 'me']),
('[id^="p"]', ['pmulti', 'p1']),
('[id^="m"]', ['me', 'main']),
('div[id^="m"]', ['main']),
('a[id^="m"]', ['me']),
)
def test_attribute_endswith(self):
self.assertSelectMultiple(
('[href$=".css"]', ['l1']),
('link[href$=".css"]', ['l1']),
('link[id$="1"]', ['l1']),
('[id$="1"]', ['l1', 'p1', 'header1', 's1a1', 's2a1', 's1a2s1']),
('div[id$="1"]', []),
('[id$="noending"]', []),
)
def test_attribute_contains(self):
self.assertSelectMultiple(
# From test_attribute_startswith
('[rel*="style"]', ['l1']),
('link[rel*="style"]', ['l1']),
('notlink[rel*="notstyle"]', []),
('[rel*="notstyle"]', []),
('link[rel*="notstyle"]', []),
('link[href*="bla"]', ['l1']),
('a[href*="http://"]', ['bob', 'me']),
('[href*="http://"]', ['bob', 'me']),
('[id*="p"]', ['pmulti', 'p1']),
('div[id*="m"]', ['main']),
('a[id*="m"]', ['me']),
# From test_attribute_endswith
('[href*=".css"]', ['l1']),
('link[href*=".css"]', ['l1']),
('link[id*="1"]', ['l1']),
('[id*="1"]', ['l1', 'p1', 'header1', 's1a1', 's1a2', 's2a1', 's1a2s1']),
('div[id*="1"]', []),
('[id*="noending"]', []),
# New for this test
('[href*="."]', ['bob', 'me', 'l1']),
('a[href*="."]', ['bob', 'me']),
('link[href*="."]', ['l1']),
('div[id*="n"]', ['main', 'inner']),
('div[id*="nn"]', ['inner']),
)
def test_attribute_exact_or_hypen(self):
self.assertSelectMultiple(
('p[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
('[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
('p[lang|="fr"]', ['lang-fr']),
('p[lang|="gb"]', []),
)
def test_attribute_exists(self):
self.assertSelectMultiple(
('[rel]', ['l1', 'bob', 'me']),
('link[rel]', ['l1']),
('a[rel]', ['bob', 'me']),
('[lang]', ['lang-en', 'lang-en-gb', 'lang-en-us', 'lang-fr']),
('p[class]', ['p1', 'pmulti']),
('[blah]', []),
('p[blah]', []),
)
def test_select_on_element(self):
# Other tests operate on the tree; this operates on an element
# within the tree.
inner = self.soup.find("div", id="main")
selected = inner.select("div")
# The <div id="inner"> tag was selected. The <div id="footer">
# tag was not.
self.assertSelectsIDs(selected, ['inner'])
|
tremby/deluge-yarss-plugin
|
yarss2/include/bs4/tests/test_tree.py
|
Python
|
gpl-3.0
| 65,254
|
"""HTML utilities suitable for global use."""
from __future__ import unicode_literals
import re
import string
try:
from urllib.parse import quote, unquote, urlsplit, urlunsplit
except ImportError: # Python 2
from urllib import quote, unquote
from urlparse import urlsplit, urlunsplit
from django.utils.safestring import SafeData, mark_safe
from django.utils.encoding import force_text, force_str
from django.utils.functional import allow_lazy
from django.utils import six
from django.utils.text import normalize_newlines
# Configuration for urlize() function.
TRAILING_PUNCTUATION = ['.', ',', ':', ';', '.)']
WRAPPING_PUNCTUATION = [('(', ')'), ('<', '>'), ('[', ']'), ('<', '>')]
# List of possible strings used for bullets in bulleted lists.
DOTS = ['·', '*', '\u2022', '•', '•', '•']
unencoded_ampersands_re = re.compile(r'&(?!(\w+|#\d+);)')
word_split_re = re.compile(r'(\s+)')
simple_url_re = re.compile(r'^https?://\w', re.IGNORECASE)
simple_url_2_re = re.compile(r'^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)$', re.IGNORECASE)
simple_email_re = re.compile(r'^\S+@\S+\.\S+$')
link_target_attribute_re = re.compile(r'(<a [^>]*?)target=[^\s>]+')
html_gunk_re = re.compile(r'(?:<br clear="all">|<i><\/i>|<b><\/b>|<em><\/em>|<strong><\/strong>|<\/?smallcaps>|<\/?uppercase>)', re.IGNORECASE)
hard_coded_bullets_re = re.compile(r'((?:<p>(?:%s).*?[a-zA-Z].*?</p>\s*)+)' % '|'.join([re.escape(x) for x in DOTS]), re.DOTALL)
trailing_empty_content_re = re.compile(r'(?:<p>(?: |\s|<br \/>)*?</p>\s*)+\Z')
strip_tags_re = re.compile(r'<[^>]*?>', re.IGNORECASE)
def escape(text):
"""
Returns the given text with ampersands, quotes and angle brackets encoded for use in HTML.
"""
return mark_safe(force_text(text).replace('&', '&').replace('<', '<').replace('>', '>').replace('"', '"').replace("'", '''))
escape = allow_lazy(escape, six.text_type)
_js_escapes = {
ord('\\'): '\\u005C',
ord('\''): '\\u0027',
ord('"'): '\\u0022',
ord('>'): '\\u003E',
ord('<'): '\\u003C',
ord('&'): '\\u0026',
ord('='): '\\u003D',
ord('-'): '\\u002D',
ord(';'): '\\u003B',
ord('\u2028'): '\\u2028',
ord('\u2029'): '\\u2029'
}
# Escape every ASCII character with a value less than 32.
_js_escapes.update((ord('%c' % z), '\\u%04X' % z) for z in range(32))
def escapejs(value):
"""Hex encodes characters for use in JavaScript strings."""
return mark_safe(force_text(value).translate(_js_escapes))
escapejs = allow_lazy(escapejs, six.text_type)
def conditional_escape(text):
"""
Similar to escape(), except that it doesn't operate on pre-escaped strings.
"""
if isinstance(text, SafeData):
return text
else:
return escape(text)
def format_html(format_string, *args, **kwargs):
"""
Similar to str.format, but passes all arguments through conditional_escape,
and calls 'mark_safe' on the result. This function should be used instead
of str.format or % interpolation to build up small HTML fragments.
"""
args_safe = map(conditional_escape, args)
kwargs_safe = dict([(k, conditional_escape(v)) for (k, v) in
six.iteritems(kwargs)])
return mark_safe(format_string.format(*args_safe, **kwargs_safe))
def format_html_join(sep, format_string, args_generator):
"""
A wrapper of format_html, for the common case of a group of arguments that
need to be formatted using the same format string, and then joined using
'sep'. 'sep' is also passed through conditional_escape.
'args_generator' should be an iterator that returns the sequence of 'args'
that will be passed to format_html.
Example:
format_html_join('\n', "<li>{0} {1}</li>", ((u.first_name, u.last_name)
for u in users))
"""
return mark_safe(conditional_escape(sep).join(
format_html(format_string, *tuple(args))
for args in args_generator))
def linebreaks(value, autoescape=False):
"""Converts newlines into <p> and <br />s."""
value = normalize_newlines(value)
paras = re.split('\n{2,}', value)
if autoescape:
paras = ['<p>%s</p>' % escape(p).replace('\n', '<br />') for p in paras]
else:
paras = ['<p>%s</p>' % p.replace('\n', '<br />') for p in paras]
return '\n\n'.join(paras)
linebreaks = allow_lazy(linebreaks, six.text_type)
def strip_tags(value):
"""Returns the given HTML with all tags stripped."""
return strip_tags_re.sub('', force_text(value))
strip_tags = allow_lazy(strip_tags)
def remove_tags(html, tags):
"""Returns the given HTML with given tags removed."""
tags = [re.escape(tag) for tag in tags.split()]
tags_re = '(%s)' % '|'.join(tags)
starttag_re = re.compile(r'<%s(/?>|(\s+[^>]*>))' % tags_re, re.U)
endtag_re = re.compile('</%s>' % tags_re)
html = starttag_re.sub('', html)
html = endtag_re.sub('', html)
return html
remove_tags = allow_lazy(remove_tags, six.text_type)
def strip_spaces_between_tags(value):
"""Returns the given HTML with spaces between tags removed."""
return re.sub(r'>\s+<', '><', force_text(value))
strip_spaces_between_tags = allow_lazy(strip_spaces_between_tags, six.text_type)
def strip_entities(value):
"""Returns the given HTML with all entities (&something;) stripped."""
return re.sub(r'&(?:\w+|#\d+);', '', force_text(value))
strip_entities = allow_lazy(strip_entities, six.text_type)
def fix_ampersands(value):
"""Returns the given HTML with all unencoded ampersands encoded correctly."""
return unencoded_ampersands_re.sub('&', force_text(value))
fix_ampersands = allow_lazy(fix_ampersands, six.text_type)
def smart_urlquote(url):
"Quotes a URL if it isn't already quoted."
# Handle IDN before quoting.
scheme, netloc, path, query, fragment = urlsplit(url)
try:
netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE
except UnicodeError: # invalid domain part
pass
else:
url = urlunsplit((scheme, netloc, path, query, fragment))
url = unquote(force_str(url))
# See http://bugs.python.org/issue2637
url = quote(url, safe=b'!*\'();:@&=+$,/?#[]~')
return force_text(url)
def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
"""
Converts any URLs in text into clickable links.
Works on http://, https://, www. links, and also on links ending in one of
the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org).
Links can have trailing punctuation (periods, commas, close-parens) and
leading punctuation (opening parens) and it'll still do the right thing.
If trim_url_limit is not None, the URLs in link text longer than this limit
will truncated to trim_url_limit-3 characters and appended with an elipsis.
If nofollow is True, the URLs in link text will get a rel="nofollow"
attribute.
If autoescape is True, the link text and URLs will get autoescaped.
"""
trim_url = lambda x, limit=trim_url_limit: limit is not None and (len(x) > limit and ('%s...' % x[:max(0, limit - 3)])) or x
safe_input = isinstance(text, SafeData)
words = word_split_re.split(force_text(text))
for i, word in enumerate(words):
match = None
if '.' in word or '@' in word or ':' in word:
# Deal with punctuation.
lead, middle, trail = '', word, ''
for punctuation in TRAILING_PUNCTUATION:
if middle.endswith(punctuation):
middle = middle[:-len(punctuation)]
trail = punctuation + trail
for opening, closing in WRAPPING_PUNCTUATION:
if middle.startswith(opening):
middle = middle[len(opening):]
lead = lead + opening
# Keep parentheses at the end only if they're balanced.
if (middle.endswith(closing)
and middle.count(closing) == middle.count(opening) + 1):
middle = middle[:-len(closing)]
trail = closing + trail
# Make URL we want to point to.
url = None
nofollow_attr = ' rel="nofollow"' if nofollow else ''
if simple_url_re.match(middle):
url = smart_urlquote(middle)
elif simple_url_2_re.match(middle):
url = smart_urlquote('http://%s' % middle)
elif not ':' in middle and simple_email_re.match(middle):
local, domain = middle.rsplit('@', 1)
try:
domain = domain.encode('idna').decode('ascii')
except UnicodeError:
continue
url = 'mailto:%s@%s' % (local, domain)
nofollow_attr = ''
# Make link.
if url:
trimmed = trim_url(middle)
if autoescape and not safe_input:
lead, trail = escape(lead), escape(trail)
url, trimmed = escape(url), escape(trimmed)
middle = '<a href="%s"%s>%s</a>' % (url, nofollow_attr, trimmed)
words[i] = mark_safe('%s%s%s' % (lead, middle, trail))
else:
if safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
elif safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
return ''.join(words)
urlize = allow_lazy(urlize, six.text_type)
def clean_html(text):
"""
Clean the given HTML. Specifically, do the following:
* Convert <b> and <i> to <strong> and <em>.
* Encode all ampersands correctly.
* Remove all "target" attributes from <a> tags.
* Remove extraneous HTML, such as presentational tags that open and
immediately close and <br clear="all">.
* Convert hard-coded bullets into HTML unordered lists.
* Remove stuff like "<p> </p>", but only if it's at the
bottom of the text.
"""
from django.utils.text import normalize_newlines
text = normalize_newlines(force_text(text))
text = re.sub(r'<(/?)\s*b\s*>', '<\\1strong>', text)
text = re.sub(r'<(/?)\s*i\s*>', '<\\1em>', text)
text = fix_ampersands(text)
# Remove all target="" attributes from <a> tags.
text = link_target_attribute_re.sub('\\1', text)
# Trim stupid HTML such as <br clear="all">.
text = html_gunk_re.sub('', text)
# Convert hard-coded bullets into HTML unordered lists.
def replace_p_tags(match):
s = match.group().replace('</p>', '</li>')
for d in DOTS:
s = s.replace('<p>%s' % d, '<li>')
return '<ul>\n%s\n</ul>' % s
text = hard_coded_bullets_re.sub(replace_p_tags, text)
# Remove stuff like "<p> </p>", but only if it's at the bottom
# of the text.
text = trailing_empty_content_re.sub('', text)
return text
clean_html = allow_lazy(clean_html, six.text_type)
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/django-1.5/django/utils/html.py
|
Python
|
bsd-3-clause
| 11,180
|
class A:
def foo(self):
self.a = <caret>{"1": 1, "2":2
|
asedunov/intellij-community
|
python/testData/codeInsight/smartEnter/dict.py
|
Python
|
apache-2.0
| 66
|
###
#
# Copyright Alan Kennedy.
#
# You may contact the copyright holder at this uri:
#
# http://www.xhaus.com/contact/modjy
#
# The licence under which this code is released is the Apache License v2.0.
#
# The terms and conditions of this license are listed in a file contained
# in the distribution that also contained this file, under the name
# LICENSE.txt.
#
# You may also read a copy of the license at the following web address.
#
# http://modjy.xhaus.com/LICENSE.txt
#
###
import sys
import synchronize
from java.io import File
from modjy_exceptions import *
class modjy_publisher:
def init_publisher(self):
self.cache = None
if self.params['app_directory']:
self.app_directory = self.expand_relative_path(self.params['app_directory'])
else:
self.app_directory = self.servlet_context.getRealPath('/')
self.params['app_directory'] = self.app_directory
if self.app_directory is not None and not self.app_directory in sys.path:
sys.path.append(self.app_directory)
def map_uri(self, req, environ):
source_uri = '%s%s%s' % (self.app_directory, File.separator, self.params['app_filename'])
callable_name = self.params['app_callable_name']
if self.params['callable_query_name']:
query_string = req.getQueryString()
if query_string:
for name_val in query_string.split('&'):
if name_val.find('=') != -1:
name, value = name_val.split('=', 1)
else:
name, value = name_val, ''
if name == self.params['callable_query_name']:
callable_name = value
else:
callable_name = ''
return source_uri, callable_name
def get_app_object(self, req, environ):
environ["SCRIPT_NAME"] = "%s%s" % (req.getContextPath(), req.getServletPath())
path_info = req.getPathInfo() or ""
environ["PATH_INFO"] = path_info
environ["PATH_TRANSLATED"] = File(self.app_directory, path_info).getPath()
if self.params['app_import_name']:
return self.get_app_object_importable(self.params['app_import_name'])
else:
if self.cache is None:
self.cache = {}
return self.get_app_object_old_style(req, environ)
get_app_object = synchronize.make_synchronized(get_app_object)
def get_app_object_importable(self, importable_name):
self.log.debug("Attempting to import application callable '%s'\n" % (importable_name, ))
# Under the importable mechanism, the cache contains a single object
if self.cache is None:
application, instantiable, method_name = self.load_importable(importable_name.strip())
if instantiable and self.params['cache_callables']:
application = application()
self.cache = application, instantiable, method_name
application, instantiable, method_name = self.cache
self.log.debug("Application is " + str(application))
if instantiable and not self.params['cache_callables']:
application = application()
self.log.debug("Instantiated application is " + str(application))
if method_name is not None:
if not hasattr(application, method_name):
self.log.fatal("Attribute error application callable '%s' as no method '%s'" % (application, method_name))
self.raise_exc(ApplicationNotFound, "Attribute error application callable '%s' as no method '%s'" % (application, method_name))
application = getattr(application, method_name)
self.log.debug("Application method is " + str(application))
return application
def load_importable(self, name):
try:
instantiable = False ; method_name = None
importable_name = name
if name.find('()') != -1:
instantiable = True
importable_name, method_name = name.split('()')
if method_name.startswith('.'):
method_name = method_name[1:]
if not method_name:
method_name = None
module_path, from_name = importable_name.rsplit('.', 1)
imported = __import__(module_path, globals(), locals(), [from_name])
imported = getattr(imported, from_name)
return imported, instantiable, method_name
except (ImportError, AttributeError), aix:
self.log.fatal("Import error import application callable '%s': %s\n" % (name, str(aix)))
self.raise_exc(ApplicationNotFound, "Failed to import app callable '%s': %s" % (name, str(aix)))
def get_app_object_old_style(self, req, environ):
source_uri, callable_name = self.map_uri(req, environ)
source_filename = source_uri
if not self.params['cache_callables']:
self.log.debug("Caching of callables disabled")
return self.load_object(source_filename, callable_name)
if not self.cache.has_key( (source_filename, callable_name) ):
self.log.debug("Callable object not in cache: %s#%s" % (source_filename, callable_name) )
return self.load_object(source_filename, callable_name)
app_callable, last_mod = self.cache.get( (source_filename, callable_name) )
self.log.debug("Callable object was in cache: %s#%s" % (source_filename, callable_name) )
if self.params['reload_on_mod']:
f = File(source_filename)
if f.lastModified() > last_mod:
self.log.info("Source file '%s' has been modified: reloading" % source_filename)
return self.load_object(source_filename, callable_name)
return app_callable
def load_object(self, path, callable_name):
try:
app_ns = {} ; execfile(path, app_ns)
app_callable = app_ns[callable_name]
f = File(path)
self.cache[ (path, callable_name) ] = (app_callable, f.lastModified())
return app_callable
except IOError, ioe:
self.raise_exc(ApplicationNotFound, "Application filename not found: %s" % path)
except KeyError, k:
self.raise_exc(NoCallable, "No callable named '%s' in %s" % (callable_name, path))
except Exception, x:
self.raise_exc(NoCallable, "Error loading jython callable '%s': %s" % (callable_name, str(x)) )
|
zephyrplugins/zephyr
|
zephyr.plugin.jython/jython2.5.2rc3/Lib/modjy/modjy_publish.py
|
Python
|
epl-1.0
| 6,547
|
__version__ = '3.6'
|
cortext/crawtextV2
|
~/venvs/crawler/lib/python2.7/site-packages/setuptools/version.py
|
Python
|
mit
| 20
|
# -*- coding: utf-8 -*-
#
# SelfTest/Signature/test_pkcs1_pss.py: Self-test for PKCS#1 PSS signatures
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
from __future__ import nested_scopes
__revision__ = "$Id$"
import unittest
from Crypto.PublicKey import RSA
from Crypto import Random
from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex
from Crypto.Hash import *
from Crypto.Signature import PKCS1_PSS as PKCS
from Crypto.Util.py3compat import *
def isStr(s):
t = ''
try:
t += s
except TypeError:
return 0
return 1
def rws(t):
"""Remove white spaces, tabs, and new lines from a string"""
for c in ['\t', '\n', ' ']:
t = t.replace(c,'')
return t
def t2b(t):
"""Convert a text string with bytes in hex form to a byte string"""
clean = b(rws(t))
if len(clean)%2 == 1:
raise ValueError("Even number of characters expected")
return a2b_hex(clean)
# Helper class to count how many bytes have been requested
# from the key's private RNG, w/o counting those used for blinding
class MyKey:
def __init__(self, key):
self._key = key
self.n = key.n
self.asked = 0
def _randfunc(self, N):
self.asked += N
return self._key._randfunc(N)
def sign(self, m):
return self._key.sign(m)
def has_private(self):
return self._key.has_private()
def decrypt(self, m):
return self._key.decrypt(m)
def verify(self, m, p):
return self._key.verify(m, p)
def encrypt(self, m, p):
return self._key.encrypt(m, p)
class PKCS1_PSS_Tests(unittest.TestCase):
# List of tuples with test data for PKCS#1 PSS
# Each tuple is made up by:
# Item #0: dictionary with RSA key component, or key to import
# Item #1: data to hash and sign
# Item #2: signature of the data #1, done with the key #0,
# and salt #3 after hashing it with #4
# Item #3: salt
# Item #4: hash object generator
_testData = (
#
# From in pss-vect.txt to be found in
# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip
#
(
# Private key
{
'n':'''a2 ba 40 ee 07 e3 b2 bd 2f 02 ce 22 7f 36 a1 95
02 44 86 e4 9c 19 cb 41 bb bd fb ba 98 b2 2b 0e
57 7c 2e ea ff a2 0d 88 3a 76 e6 5e 39 4c 69 d4
b3 c0 5a 1e 8f ad da 27 ed b2 a4 2b c0 00 fe 88
8b 9b 32 c2 2d 15 ad d0 cd 76 b3 e7 93 6e 19 95
5b 22 0d d1 7d 4e a9 04 b1 ec 10 2b 2e 4d e7 75
12 22 aa 99 15 10 24 c7 cb 41 cc 5e a2 1d 00 ee
b4 1f 7c 80 08 34 d2 c6 e0 6b ce 3b ce 7e a9 a5''',
'e':'''01 00 01''',
# In the test vector, only p and q were given...
# d is computed offline as e^{-1} mod (p-1)(q-1)
'd':'''50e2c3e38d886110288dfc68a9533e7e12e27d2aa56
d2cdb3fb6efa990bcff29e1d2987fb711962860e7391b1ce01
ebadb9e812d2fbdfaf25df4ae26110a6d7a26f0b810f54875e
17dd5c9fb6d641761245b81e79f8c88f0e55a6dcd5f133abd3
5f8f4ec80adf1bf86277a582894cb6ebcd2162f1c7534f1f49
47b129151b71'''
},
# Data to sign
'''85 9e ef 2f d7 8a ca 00 30 8b dc 47 11 93 bf 55
bf 9d 78 db 8f 8a 67 2b 48 46 34 f3 c9 c2 6e 64
78 ae 10 26 0f e0 dd 8c 08 2e 53 a5 29 3a f2 17
3c d5 0c 6d 5d 35 4f eb f7 8b 26 02 1c 25 c0 27
12 e7 8c d4 69 4c 9f 46 97 77 e4 51 e7 f8 e9 e0
4c d3 73 9c 6b bf ed ae 48 7f b5 56 44 e9 ca 74
ff 77 a5 3c b7 29 80 2f 6e d4 a5 ff a8 ba 15 98
90 fc''',
# Signature
'''8d aa 62 7d 3d e7 59 5d 63 05 6c 7e c6 59 e5 44
06 f1 06 10 12 8b aa e8 21 c8 b2 a0 f3 93 6d 54
dc 3b dc e4 66 89 f6 b7 95 1b b1 8e 84 05 42 76
97 18 d5 71 5d 21 0d 85 ef bb 59 61 92 03 2c 42
be 4c 29 97 2c 85 62 75 eb 6d 5a 45 f0 5f 51 87
6f c6 74 3d ed dd 28 ca ec 9b b3 0e a9 9e 02 c3
48 82 69 60 4f e4 97 f7 4c cd 7c 7f ca 16 71 89
71 23 cb d3 0d ef 5d 54 a2 b5 53 6a d9 0a 74 7e''',
# Salt
'''e3 b5 d5 d0 02 c1 bc e5 0c 2b 65 ef 88 a1 88 d8
3b ce 7e 61''',
# Hash algorithm
SHA
),
#
# Example 1.1 to be found in
# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip
#
(
# Private key
{
'n':'''a5 6e 4a 0e 70 10 17 58 9a 51 87 dc 7e a8 41 d1
56 f2 ec 0e 36 ad 52 a4 4d fe b1 e6 1f 7a d9 91
d8 c5 10 56 ff ed b1 62 b4 c0 f2 83 a1 2a 88 a3
94 df f5 26 ab 72 91 cb b3 07 ce ab fc e0 b1 df
d5 cd 95 08 09 6d 5b 2b 8b 6d f5 d6 71 ef 63 77
c0 92 1c b2 3c 27 0a 70 e2 59 8e 6f f8 9d 19 f1
05 ac c2 d3 f0 cb 35 f2 92 80 e1 38 6b 6f 64 c4
ef 22 e1 e1 f2 0d 0c e8 cf fb 22 49 bd 9a 21 37''',
'e':'''01 00 01''',
'd':'''33 a5 04 2a 90 b2 7d 4f 54 51 ca 9b bb d0 b4 47
71 a1 01 af 88 43 40 ae f9 88 5f 2a 4b be 92 e8
94 a7 24 ac 3c 56 8c 8f 97 85 3a d0 7c 02 66 c8
c6 a3 ca 09 29 f1 e8 f1 12 31 88 44 29 fc 4d 9a
e5 5f ee 89 6a 10 ce 70 7c 3e d7 e7 34 e4 47 27
a3 95 74 50 1a 53 26 83 10 9c 2a ba ca ba 28 3c
31 b4 bd 2f 53 c3 ee 37 e3 52 ce e3 4f 9e 50 3b
d8 0c 06 22 ad 79 c6 dc ee 88 35 47 c6 a3 b3 25'''
},
# Message
'''cd c8 7d a2 23 d7 86 df 3b 45 e0 bb bc 72 13 26
d1 ee 2a f8 06 cc 31 54 75 cc 6f 0d 9c 66 e1 b6
23 71 d4 5c e2 39 2e 1a c9 28 44 c3 10 10 2f 15
6a 0d 8d 52 c1 f4 c4 0b a3 aa 65 09 57 86 cb 76
97 57 a6 56 3b a9 58 fe d0 bc c9 84 e8 b5 17 a3
d5 f5 15 b2 3b 8a 41 e7 4a a8 67 69 3f 90 df b0
61 a6 e8 6d fa ae e6 44 72 c0 0e 5f 20 94 57 29
cb eb e7 7f 06 ce 78 e0 8f 40 98 fb a4 1f 9d 61
93 c0 31 7e 8b 60 d4 b6 08 4a cb 42 d2 9e 38 08
a3 bc 37 2d 85 e3 31 17 0f cb f7 cc 72 d0 b7 1c
29 66 48 b3 a4 d1 0f 41 62 95 d0 80 7a a6 25 ca
b2 74 4f d9 ea 8f d2 23 c4 25 37 02 98 28 bd 16
be 02 54 6f 13 0f d2 e3 3b 93 6d 26 76 e0 8a ed
1b 73 31 8b 75 0a 01 67 d0''',
# Signature
'''90 74 30 8f b5 98 e9 70 1b 22 94 38 8e 52 f9 71
fa ac 2b 60 a5 14 5a f1 85 df 52 87 b5 ed 28 87
e5 7c e7 fd 44 dc 86 34 e4 07 c8 e0 e4 36 0b c2
26 f3 ec 22 7f 9d 9e 54 63 8e 8d 31 f5 05 12 15
df 6e bb 9c 2f 95 79 aa 77 59 8a 38 f9 14 b5 b9
c1 bd 83 c4 e2 f9 f3 82 a0 d0 aa 35 42 ff ee 65
98 4a 60 1b c6 9e b2 8d eb 27 dc a1 2c 82 c2 d4
c3 f6 6c d5 00 f1 ff 2b 99 4d 8a 4e 30 cb b3 3c''',
# Salt
'''de e9 59 c7 e0 64 11 36 14 20 ff 80 18 5e d5 7f
3e 67 76 af''',
# Hash
SHA
),
#
# Example 1.2 to be found in
# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip
#
(
# Private key
{
'n':'''a5 6e 4a 0e 70 10 17 58 9a 51 87 dc 7e a8 41 d1
56 f2 ec 0e 36 ad 52 a4 4d fe b1 e6 1f 7a d9 91
d8 c5 10 56 ff ed b1 62 b4 c0 f2 83 a1 2a 88 a3
94 df f5 26 ab 72 91 cb b3 07 ce ab fc e0 b1 df
d5 cd 95 08 09 6d 5b 2b 8b 6d f5 d6 71 ef 63 77
c0 92 1c b2 3c 27 0a 70 e2 59 8e 6f f8 9d 19 f1
05 ac c2 d3 f0 cb 35 f2 92 80 e1 38 6b 6f 64 c4
ef 22 e1 e1 f2 0d 0c e8 cf fb 22 49 bd 9a 21 37''',
'e':'''01 00 01''',
'd':'''33 a5 04 2a 90 b2 7d 4f 54 51 ca 9b bb d0 b4 47
71 a1 01 af 88 43 40 ae f9 88 5f 2a 4b be 92 e8
94 a7 24 ac 3c 56 8c 8f 97 85 3a d0 7c 02 66 c8
c6 a3 ca 09 29 f1 e8 f1 12 31 88 44 29 fc 4d 9a
e5 5f ee 89 6a 10 ce 70 7c 3e d7 e7 34 e4 47 27
a3 95 74 50 1a 53 26 83 10 9c 2a ba ca ba 28 3c
31 b4 bd 2f 53 c3 ee 37 e3 52 ce e3 4f 9e 50 3b
d8 0c 06 22 ad 79 c6 dc ee 88 35 47 c6 a3 b3 25'''
},
# Message
'''85 13 84 cd fe 81 9c 22 ed 6c 4c cb 30 da eb 5c
f0 59 bc 8e 11 66 b7 e3 53 0c 4c 23 3e 2b 5f 8f
71 a1 cc a5 82 d4 3e cc 72 b1 bc a1 6d fc 70 13
22 6b 9e''',
# Signature
'''3e f7 f4 6e 83 1b f9 2b 32 27 41 42 a5 85 ff ce
fb dc a7 b3 2a e9 0d 10 fb 0f 0c 72 99 84 f0 4e
f2 9a 9d f0 78 07 75 ce 43 73 9b 97 83 83 90 db
0a 55 05 e6 3d e9 27 02 8d 9d 29 b2 19 ca 2c 45
17 83 25 58 a5 5d 69 4a 6d 25 b9 da b6 60 03 c4
cc cd 90 78 02 19 3b e5 17 0d 26 14 7d 37 b9 35
90 24 1b e5 1c 25 05 5f 47 ef 62 75 2c fb e2 14
18 fa fe 98 c2 2c 4d 4d 47 72 4f db 56 69 e8 43''',
# Salt
'''ef 28 69 fa 40 c3 46 cb 18 3d ab 3d 7b ff c9 8f
d5 6d f4 2d''',
# Hash
SHA
),
#
# Example 2.1 to be found in
# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip
#
(
# Private key
{
'n':'''01 d4 0c 1b cf 97 a6 8a e7 cd bd 8a 7b f3 e3 4f
a1 9d cc a4 ef 75 a4 74 54 37 5f 94 51 4d 88 fe
d0 06 fb 82 9f 84 19 ff 87 d6 31 5d a6 8a 1f f3
a0 93 8e 9a bb 34 64 01 1c 30 3a d9 91 99 cf 0c
7c 7a 8b 47 7d ce 82 9e 88 44 f6 25 b1 15 e5 e9
c4 a5 9c f8 f8 11 3b 68 34 33 6a 2f d2 68 9b 47
2c bb 5e 5c ab e6 74 35 0c 59 b6 c1 7e 17 68 74
fb 42 f8 fc 3d 17 6a 01 7e dc 61 fd 32 6c 4b 33
c9''',
'e':'''01 00 01''',
'd':'''02 7d 14 7e 46 73 05 73 77 fd 1e a2 01 56 57 72
17 6a 7d c3 83 58 d3 76 04 56 85 a2 e7 87 c2 3c
15 57 6b c1 6b 9f 44 44 02 d6 bf c5 d9 8a 3e 88
ea 13 ef 67 c3 53 ec a0 c0 dd ba 92 55 bd 7b 8b
b5 0a 64 4a fd fd 1d d5 16 95 b2 52 d2 2e 73 18
d1 b6 68 7a 1c 10 ff 75 54 5f 3d b0 fe 60 2d 5f
2b 7f 29 4e 36 01 ea b7 b9 d1 ce cd 76 7f 64 69
2e 3e 53 6c a2 84 6c b0 c2 dd 48 6a 39 fa 75 b1'''
},
# Message
'''da ba 03 20 66 26 3f ae db 65 98 48 11 52 78 a5
2c 44 fa a3 a7 6f 37 51 5e d3 36 32 10 72 c4 0a
9d 9b 53 bc 05 01 40 78 ad f5 20 87 51 46 aa e7
0f f0 60 22 6d cb 7b 1f 1f c2 7e 93 60''',
# Signature
'''01 4c 5b a5 33 83 28 cc c6 e7 a9 0b f1 c0 ab 3f
d6 06 ff 47 96 d3 c1 2e 4b 63 9e d9 13 6a 5f ec
6c 16 d8 88 4b dd 99 cf dc 52 14 56 b0 74 2b 73
68 68 cf 90 de 09 9a db 8d 5f fd 1d ef f3 9b a4
00 7a b7 46 ce fd b2 2d 7d f0 e2 25 f5 46 27 dc
65 46 61 31 72 1b 90 af 44 53 63 a8 35 8b 9f 60
76 42 f7 8f ab 0a b0 f4 3b 71 68 d6 4b ae 70 d8
82 78 48 d8 ef 1e 42 1c 57 54 dd f4 2c 25 89 b5
b3''',
# Salt
'''57 bf 16 0b cb 02 bb 1d c7 28 0c f0 45 85 30 b7
d2 83 2f f7''',
SHA
),
#
# Example 8.1 to be found in
# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip
#
(
# Private key
{
'n':'''49 53 70 a1 fb 18 54 3c 16 d3 63 1e 31 63 25 5d
f6 2b e6 ee e8 90 d5 f2 55 09 e4 f7 78 a8 ea 6f
bb bc df 85 df f6 4e 0d 97 20 03 ab 36 81 fb ba
6d d4 1f d5 41 82 9b 2e 58 2d e9 f2 a4 a4 e0 a2
d0 90 0b ef 47 53 db 3c ee 0e e0 6c 7d fa e8 b1
d5 3b 59 53 21 8f 9c ce ea 69 5b 08 66 8e de aa
dc ed 94 63 b1 d7 90 d5 eb f2 7e 91 15 b4 6c ad
4d 9a 2b 8e fa b0 56 1b 08 10 34 47 39 ad a0 73
3f''',
'e':'''01 00 01''',
'd':'''6c 66 ff e9 89 80 c3 8f cd ea b5 15 98 98 83 61
65 f4 b4 b8 17 c4 f6 a8 d4 86 ee 4e a9 13 0f e9
b9 09 2b d1 36 d1 84 f9 5f 50 4a 60 7e ac 56 58
46 d2 fd d6 59 7a 89 67 c7 39 6e f9 5a 6e ee bb
45 78 a6 43 96 6d ca 4d 8e e3 de 84 2d e6 32 79
c6 18 15 9c 1a b5 4a 89 43 7b 6a 61 20 e4 93 0a
fb 52 a4 ba 6c ed 8a 49 47 ac 64 b3 0a 34 97 cb
e7 01 c2 d6 26 6d 51 72 19 ad 0e c6 d3 47 db e9'''
},
# Message
'''81 33 2f 4b e6 29 48 41 5e a1 d8 99 79 2e ea cf
6c 6e 1d b1 da 8b e1 3b 5c ea 41 db 2f ed 46 70
92 e1 ff 39 89 14 c7 14 25 97 75 f5 95 f8 54 7f
73 56 92 a5 75 e6 92 3a f7 8f 22 c6 99 7d db 90
fb 6f 72 d7 bb 0d d5 74 4a 31 de cd 3d c3 68 58
49 83 6e d3 4a ec 59 63 04 ad 11 84 3c 4f 88 48
9f 20 97 35 f5 fb 7f da f7 ce c8 ad dc 58 18 16
8f 88 0a cb f4 90 d5 10 05 b7 a8 e8 4e 43 e5 42
87 97 75 71 dd 99 ee a4 b1 61 eb 2d f1 f5 10 8f
12 a4 14 2a 83 32 2e db 05 a7 54 87 a3 43 5c 9a
78 ce 53 ed 93 bc 55 08 57 d7 a9 fb''',
# Signature
'''02 62 ac 25 4b fa 77 f3 c1 ac a2 2c 51 79 f8 f0
40 42 2b 3c 5b af d4 0a 8f 21 cf 0f a5 a6 67 cc
d5 99 3d 42 db af b4 09 c5 20 e2 5f ce 2b 1e e1
e7 16 57 7f 1e fa 17 f3 da 28 05 2f 40 f0 41 9b
23 10 6d 78 45 aa f0 11 25 b6 98 e7 a4 df e9 2d
39 67 bb 00 c4 d0 d3 5b a3 55 2a b9 a8 b3 ee f0
7c 7f ec db c5 42 4a c4 db 1e 20 cb 37 d0 b2 74
47 69 94 0e a9 07 e1 7f bb ca 67 3b 20 52 23 80
c5''',
# Salt
'''1d 65 49 1d 79 c8 64 b3 73 00 9b e6 f6 f2 46 7b
ac 4c 78 fa''',
SHA
)
)
def testSign1(self):
for i in range(len(self._testData)):
# Build the key
comps = [ long(rws(self._testData[i][0][x]),16) for x in ('n','e','d') ]
key = MyKey(RSA.construct(comps))
# Hash function
h = self._testData[i][4].new()
# Data to sign
h.update(t2b(self._testData[i][1]))
# Salt
test_salt = t2b(self._testData[i][3])
key._randfunc = lambda N: test_salt
# The real test
signer = PKCS.new(key)
self.failUnless(signer.can_sign())
s = signer.sign(h)
self.assertEqual(s, t2b(self._testData[i][2]))
def testVerify1(self):
for i in range(len(self._testData)):
# Build the key
comps = [ long(rws(self._testData[i][0][x]),16) for x in ('n','e') ]
key = MyKey(RSA.construct(comps))
# Hash function
h = self._testData[i][4].new()
# Data to sign
h.update(t2b(self._testData[i][1]))
# Salt
test_salt = t2b(self._testData[i][3])
# The real test
key._randfunc = lambda N: test_salt
verifier = PKCS.new(key)
self.failIf(verifier.can_sign())
result = verifier.verify(h, t2b(self._testData[i][2]))
self.failUnless(result)
def testSignVerify(self):
h = SHA.new()
h.update(b('blah blah blah'))
rng = Random.new().read
key = MyKey(RSA.generate(1024,rng))
# Helper function to monitor what's request from MGF
global mgfcalls
def newMGF(seed,maskLen):
global mgfcalls
mgfcalls += 1
return bchr(0x00)*maskLen
# Verify that PSS is friendly to all ciphers
for hashmod in (MD2,MD5,SHA,SHA224,SHA256,SHA384,RIPEMD):
h = hashmod.new()
h.update(b('blah blah blah'))
# Verify that sign() asks for as many random bytes
# as the hash output size
key.asked = 0
signer = PKCS.new(key)
s = signer.sign(h)
self.failUnless(signer.verify(h, s))
self.assertEqual(key.asked, h.digest_size)
h = SHA.new()
h.update(b('blah blah blah'))
# Verify that sign() uses a different salt length
for sLen in (0,3,21):
key.asked = 0
signer = PKCS.new(key, saltLen=sLen)
s = signer.sign(h)
self.assertEqual(key.asked, sLen)
self.failUnless(signer.verify(h, s))
# Verify that sign() uses the custom MGF
mgfcalls = 0
signer = PKCS.new(key, newMGF)
s = signer.sign(h)
self.assertEqual(mgfcalls, 1)
self.failUnless(signer.verify(h, s))
# Verify that sign() does not call the RNG
# when salt length is 0, even when a new MGF is provided
key.asked = 0
mgfcalls = 0
signer = PKCS.new(key, newMGF, 0)
s = signer.sign(h)
self.assertEqual(key.asked,0)
self.assertEqual(mgfcalls, 1)
self.failUnless(signer.verify(h, s))
def get_tests(config={}):
tests = []
tests += list_test_cases(PKCS1_PSS_Tests)
return tests
if __name__ == '__main__':
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4
|
ktan2020/legacy-automation
|
win/Lib/site-packages/Crypto/SelfTest/Signature/test_pkcs1_pss.py
|
Python
|
mit
| 20,598
|
from __future__ import print_function
import unittest
from scrapy.http import Request
from scrapy.utils.request import request_fingerprint, _fingerprint_cache, \
request_authenticate, request_httprepr
class UtilsRequestTest(unittest.TestCase):
def test_request_fingerprint(self):
r1 = Request("http://www.example.com/query?id=111&cat=222")
r2 = Request("http://www.example.com/query?cat=222&id=111")
self.assertEqual(request_fingerprint(r1), request_fingerprint(r1))
self.assertEqual(request_fingerprint(r1), request_fingerprint(r2))
r1 = Request('http://www.example.com/hnnoticiaj1.aspx?78132,199')
r2 = Request('http://www.example.com/hnnoticiaj1.aspx?78160,199')
self.assertNotEqual(request_fingerprint(r1), request_fingerprint(r2))
# make sure caching is working
self.assertEqual(request_fingerprint(r1), _fingerprint_cache[r1][None])
r1 = Request("http://www.example.com/members/offers.html")
r2 = Request("http://www.example.com/members/offers.html")
r2.headers['SESSIONID'] = b"somehash"
self.assertEqual(request_fingerprint(r1), request_fingerprint(r2))
r1 = Request("http://www.example.com/")
r2 = Request("http://www.example.com/")
r2.headers['Accept-Language'] = b'en'
r3 = Request("http://www.example.com/")
r3.headers['Accept-Language'] = b'en'
r3.headers['SESSIONID'] = b"somehash"
self.assertEqual(request_fingerprint(r1), request_fingerprint(r2), request_fingerprint(r3))
self.assertEqual(request_fingerprint(r1),
request_fingerprint(r1, include_headers=['Accept-Language']))
self.assertNotEqual(request_fingerprint(r1),
request_fingerprint(r2, include_headers=['Accept-Language']))
self.assertEqual(request_fingerprint(r3, include_headers=['accept-language', 'sessionid']),
request_fingerprint(r3, include_headers=['SESSIONID', 'Accept-Language']))
r1 = Request("http://www.example.com")
r2 = Request("http://www.example.com", method='POST')
r3 = Request("http://www.example.com", method='POST', body=b'request body')
self.assertNotEqual(request_fingerprint(r1), request_fingerprint(r2))
self.assertNotEqual(request_fingerprint(r2), request_fingerprint(r3))
# cached fingerprint must be cleared on request copy
r1 = Request("http://www.example.com")
fp1 = request_fingerprint(r1)
r2 = r1.replace(url="http://www.example.com/other")
fp2 = request_fingerprint(r2)
self.assertNotEqual(fp1, fp2)
def test_request_authenticate(self):
r = Request("http://www.example.com")
request_authenticate(r, 'someuser', 'somepass')
self.assertEqual(r.headers['Authorization'], b'Basic c29tZXVzZXI6c29tZXBhc3M=')
def test_request_httprepr(self):
r1 = Request("http://www.example.com")
self.assertEqual(request_httprepr(r1), b'GET / HTTP/1.1\r\nHost: www.example.com\r\n\r\n')
r1 = Request("http://www.example.com/some/page.html?arg=1")
self.assertEqual(request_httprepr(r1), b'GET /some/page.html?arg=1 HTTP/1.1\r\nHost: www.example.com\r\n\r\n')
r1 = Request("http://www.example.com", method='POST', headers={"Content-type": b"text/html"}, body=b"Some body")
self.assertEqual(request_httprepr(r1), b'POST / HTTP/1.1\r\nHost: www.example.com\r\nContent-Type: text/html\r\n\r\nSome body')
def test_request_httprepr_for_non_http_request(self):
# the representation is not important but it must not fail.
request_httprepr(Request("file:///tmp/foo.txt"))
request_httprepr(Request("ftp://localhost/tmp/foo.txt"))
if __name__ == "__main__":
unittest.main()
|
agreen/scrapy
|
tests/test_utils_request.py
|
Python
|
bsd-3-clause
| 3,836
|
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from time import time
from datetime import datetime
from google.appengine.ext import db
class WarningLog(db.Model):
date = db.DateTimeProperty(auto_now_add=True)
event = db.StringProperty()
message = db.StringProperty()
attachment_id = db.IntegerProperty()
queue_name = db.StringProperty()
bot_id = db.StringProperty()
@classmethod
def record(cls, event, message=None, attachment_id=None, queue_name=None, bot_id=None):
entity = cls(event=event, message=message, queue_name=queue_name, bot_id=bot_id, attachment_id=attachment_id)
entity.put()
return entity
|
klim-iv/phantomjs-qt5
|
src/webkit/Tools/QueueStatusServer/model/warninglog.py
|
Python
|
bsd-3-clause
| 2,147
|
#!/usr/bin/env python
#
# Copyright 2011 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains logic for sorting goog.provide and goog.require statements.
Closurized JavaScript files use goog.provide and goog.require statements at the
top of the file to manage dependencies. These statements should be sorted
alphabetically, however, it is common for them to be accompanied by inline
comments or suppression annotations. In order to sort these statements without
disrupting their comments and annotations, the association between statements
and comments/annotations must be maintained while sorting.
RequireProvideSorter: Handles checking/fixing of provide/require statements.
"""
from closure_linter import javascripttokens
from closure_linter import tokenutil
# Shorthand
Type = javascripttokens.JavaScriptTokenType
class RequireProvideSorter(object):
"""Checks for and fixes alphabetization of provide and require statements.
When alphabetizing, comments on the same line or comments directly above a
goog.provide or goog.require statement are associated with that statement and
stay with the statement as it gets sorted.
"""
def CheckProvides(self, token):
"""Checks alphabetization of goog.provide statements.
Iterates over tokens in given token stream, identifies goog.provide tokens,
and checks that they occur in alphabetical order by the object being
provided.
Args:
token: A token in the token stream before any goog.provide tokens.
Returns:
A tuple containing the first provide token in the token stream and a list
of provided objects sorted alphabetically. For example:
(JavaScriptToken, ['object.a', 'object.b', ...])
None is returned if all goog.provide statements are already sorted.
"""
provide_tokens = self._GetRequireOrProvideTokens(token, 'goog.provide')
provide_strings = self._GetRequireOrProvideTokenStrings(provide_tokens)
sorted_provide_strings = sorted(provide_strings)
if provide_strings != sorted_provide_strings:
return [provide_tokens[0], sorted_provide_strings]
return None
def CheckRequires(self, token):
"""Checks alphabetization of goog.require statements.
Iterates over tokens in given token stream, identifies goog.require tokens,
and checks that they occur in alphabetical order by the dependency being
required.
Args:
token: A token in the token stream before any goog.require tokens.
Returns:
A tuple containing the first require token in the token stream and a list
of required dependencies sorted alphabetically. For example:
(JavaScriptToken, ['object.a', 'object.b', ...])
None is returned if all goog.require statements are already sorted.
"""
require_tokens = self._GetRequireOrProvideTokens(token, 'goog.require')
require_strings = self._GetRequireOrProvideTokenStrings(require_tokens)
sorted_require_strings = sorted(require_strings)
if require_strings != sorted_require_strings:
return (require_tokens[0], sorted_require_strings)
return None
def FixProvides(self, token):
"""Sorts goog.provide statements in the given token stream alphabetically.
Args:
token: The first token in the token stream.
"""
self._FixProvidesOrRequires(
self._GetRequireOrProvideTokens(token, 'goog.provide'))
def FixRequires(self, token):
"""Sorts goog.require statements in the given token stream alphabetically.
Args:
token: The first token in the token stream.
"""
self._FixProvidesOrRequires(
self._GetRequireOrProvideTokens(token, 'goog.require'))
def _FixProvidesOrRequires(self, tokens):
"""Sorts goog.provide or goog.require statements.
Args:
tokens: A list of goog.provide or goog.require tokens in the order they
appear in the token stream. i.e. the first token in this list must
be the first goog.provide or goog.require token.
"""
strings = self._GetRequireOrProvideTokenStrings(tokens)
sorted_strings = sorted(strings)
# Make a separate pass to remove any blank lines between goog.require/
# goog.provide tokens.
first_token = tokens[0]
last_token = tokens[-1]
i = last_token
while i != first_token:
if i.type is Type.BLANK_LINE:
tokenutil.DeleteToken(i)
i = i.previous
# A map from required/provided object name to tokens that make up the line
# it was on, including any comments immediately before it or after it on the
# same line.
tokens_map = self._GetTokensMap(tokens)
# Iterate over the map removing all tokens.
for name in tokens_map:
tokens_to_delete = tokens_map[name]
for i in tokens_to_delete:
tokenutil.DeleteToken(i)
# Re-add all tokens in the map in alphabetical order.
insert_after = tokens[0].previous
for string in sorted_strings:
for i in tokens_map[string]:
tokenutil.InsertTokenAfter(i, insert_after)
insert_after = i
def _GetRequireOrProvideTokens(self, token, token_string):
"""Gets all goog.provide or goog.require tokens in the given token stream.
Args:
token: The first token in the token stream.
token_string: One of 'goog.provide' or 'goog.require' to indicate which
tokens to find.
Returns:
A list of goog.provide or goog.require tokens in the order they appear in
the token stream.
"""
tokens = []
while token:
if token.type == Type.IDENTIFIER:
if token.string == token_string:
tokens.append(token)
elif token.string not in ['goog.require', 'goog.provide']:
# The goog.provide and goog.require identifiers are at the top of the
# file. So if any other identifier is encountered, return.
break
token = token.next
return tokens
def _GetRequireOrProvideTokenStrings(self, tokens):
"""Gets a list of strings corresponding to the given list of tokens.
The string will be the next string in the token stream after each token in
tokens. This is used to find the object being provided/required by a given
goog.provide or goog.require token.
Args:
tokens: A list of goog.provide or goog.require tokens.
Returns:
A list of object names that are being provided or required by the given
list of tokens. For example:
['object.a', 'object.c', 'object.b']
"""
token_strings = []
for token in tokens:
name = tokenutil.Search(token, Type.STRING_TEXT).string
token_strings.append(name)
return token_strings
def _GetTokensMap(self, tokens):
"""Gets a map from object name to tokens associated with that object.
Starting from the goog.provide/goog.require token, searches backwards in the
token stream for any lines that start with a comment. These lines are
associated with the goog.provide/goog.require token. Also associates any
tokens on the same line as the goog.provide/goog.require token with that
token.
Args:
tokens: A list of goog.provide or goog.require tokens.
Returns:
A dictionary that maps object names to the tokens associated with the
goog.provide or goog.require of that object name. For example:
{
'object.a': [JavaScriptToken, JavaScriptToken, ...],
'object.b': [...]
}
The list of tokens includes any comment lines above the goog.provide or
goog.require statement and everything after the statement on the same
line. For example, all of the following would be associated with
'object.a':
/** @suppress {extraRequire} */
goog.require('object.a'); // Some comment.
"""
tokens_map = {}
for token in tokens:
object_name = tokenutil.Search(token, Type.STRING_TEXT).string
# If the previous line starts with a comment, presume that the comment
# relates to the goog.require or goog.provide and keep them together when
# sorting.
first_token = token
previous_first_token = tokenutil.GetFirstTokenInPreviousLine(first_token)
while previous_first_token.IsAnyType(Type.COMMENT_TYPES):
first_token = previous_first_token
previous_first_token = tokenutil.GetFirstTokenInPreviousLine(
first_token)
# Find the last token on the line.
last_token = tokenutil.GetLastTokenInSameLine(token)
all_tokens = self._GetTokenList(first_token, last_token)
tokens_map[object_name] = all_tokens
return tokens_map
def _GetTokenList(self, first_token, last_token):
"""Gets a list of all tokens from first_token to last_token, inclusive.
Args:
first_token: The first token to get.
last_token: The last token to get.
Returns:
A list of all tokens between first_token and last_token, including both
first_token and last_token.
Raises:
Exception: If the token stream ends before last_token is reached.
"""
token_list = []
token = first_token
while token != last_token:
if not token:
raise Exception('ran out of tokens')
token_list.append(token)
token = token.next
token_list.append(last_token)
return token_list
|
dstockwell/catapult
|
tracing/third_party/closure_linter/closure_linter/requireprovidesorter.py
|
Python
|
bsd-3-clause
| 9,826
|
"""Copyright (c) 2010-2012 David Rio Vierra
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE."""
#-# Modified by D.C.-G. for translation purpose
#!# Tests for file chooser
"""
mcplatform.py
Platform-specific functions, folder paths, and the whole fixed/portable nonsense.
"""
import logging
log = logging.getLogger(__name__)
import directories
import os
from os.path import dirname, exists, join
import sys
import platform
enc = sys.getfilesystemencoding()
hasXlibDisplay = False
if sys.platform == "win32":
if platform.architecture()[0] == "32bit":
plat = "win32"
if platform.architecture()[0] == "64bit":
plat = "win-amd64"
sys.path.append(join(directories.getDataDir(), "pymclevel", "build", "lib." + plat + "-2.6").encode(enc))
elif sys.platform in ['linux2', 'darwin']:
try:
import Xlib.display
import Xlib.X
import Xlib.protocol
hasXlibDisplay = True
except ImportError:
hasXlibDisplay = None
os.environ["YAML_ROOT"] = join(directories.getDataDir(), "pymclevel").encode(enc)
from pygame import display
from albow import request_new_filename, request_old_filename
from albow.translate import _
from pymclevel import minecraftSaveFileDir, getMinecraftProfileDirectory, getSelectedProfile
from datetime import datetime
import re
import subprocess
try:
import pygtk
pygtk.require('2.0')
import gtk
if gtk.pygtk_version < (2,3,90):
raise ImportError
hasGtk = True
except ImportError:
hasGtk = False #Using old method as fallback
texturePacksDir = os.path.join(getMinecraftProfileDirectory(getSelectedProfile()), "texturepacks")
#Compatibility layer for filters:
filtersDir = directories.filtersDir
schematicsDir = directories.schematicsDir
#!# Disabling platform specific file chooser:
#!# Please, don't touch these two lines and the 'platChooser' stuff. -- D.C.-G.
# platChooser = sys.platform in ('linux2', 'darwin')
platChooser = sys.platform == 'darwin'
def getTexturePacks():
try:
return os.listdir(texturePacksDir)
except:
return []
# for k,v in os.environ.iteritems():
# try:
# os.environ[k] = v.decode(sys.getfilesystemencoding())
# except:
# continue
if sys.platform == "win32":
try:
from win32 import win32gui
from win32 import win32api
from win32.lib import win32con
except ImportError:
import win32gui
import win32api
import win32con
try:
import win32com.client
from win32com.shell import shell, shellcon # @UnresolvedImport
except:
pass
try:
import pywintypes
except:
pass
if sys.platform == 'darwin':
cmd_name = "Cmd"
option_name = "Opt"
else:
cmd_name = "Ctrl"
option_name = "Alt"
def OSXVersionChecker(name,compare):
"""Rediculously complicated function to compare current System version to inputted version."""
if compare != 'gt' and compare != 'lt' and compare != 'eq' and compare != 'gteq' and compare != 'lteq':
print "Invalid version check {}".format(compare)
return False
if sys.platform == 'darwin':
try:
systemVersion = platform.mac_ver()[0].split('.')
if len(systemVersion) == 2:
systemVersion.append('0')
major, minor, patch = 10, 0, 0
if name.lower() == 'cheetah':
minor = 0
patch = 4
elif name.lower() == 'puma':
minor = 1
patch = 5
elif name.lower() == 'jaguar':
minor = 2
patch = 8
elif name.lower() == 'panther':
minor = 3
patch = 9
elif name.lower() == 'tiger':
minor = 4
patch = 11
elif name.lower() == 'snow_leopard':
minor = 5
patch = 8
elif name.lower() == 'snow_leopard':
minor = 6
patch = 8
elif name.lower() == 'lion':
minor = 7
patch = 5
elif name.lower() == 'mountain_lion':
minor = 8
patch = 5
elif name.lower() == 'mavericks':
minor = 9
patch = 5
elif name.lower() == 'yosemite':
minor = 10
patch = 0
else:
major = 0
if int(systemVersion[0]) > int(major):
ret_val = 1
elif int(systemVersion[0]) < int(major):
ret_val = -1
else:
if int(systemVersion[1]) > int(minor):
ret_val = 1
elif int(systemVersion[1]) < int(minor):
ret_val = -1
else:
if int(systemVersion[2]) > int(patch):
ret_val = 1
elif int(systemVersion[2]) < int(patch):
ret_val = -1
else:
ret_val = 0
if ret_val == 0 and (compare == 'eq' or compare == 'gteq' or compare == 'lteq'):
return True
elif ret_val == -1 and (compare == 'lt' or compare == 'lteq'):
return True
elif ret_val == 1 and (compare == 'gt' or compare == 'gteq'):
return True
except:
print "An error occured determining the system version"
return False
else:
return False
lastSchematicsDir = None
lastSaveDir = None
def askOpenFile(title='Select a Minecraft level....', schematics=False, suffixes=None):
global lastSchematicsDir, lastSaveDir
if not suffixes:
suffixes = ["mclevel", "dat", "mine", "mine.gz"]
suffixesChanged = False
else:
suffixesChanged = True
initialDir = lastSaveDir or minecraftSaveFileDir
if schematics:
initialDir = lastSchematicsDir or directories.schematicsDir
def _askOpen(_suffixes):
if schematics:
_suffixes.append("schematic")
_suffixes.append("schematic.gz")
_suffixes.append("zip")
_suffixes.append("inv")
# BO support
_suffixes.append("bo2")
_suffixes.append("bo3")
if sys.platform == "win32": #!#
if suffixesChanged:
sendSuffixes = _suffixes
else:
sendSuffixes = None
return askOpenFileWin32(title, schematics, initialDir, sendSuffixes)
elif hasGtk and not platChooser: #!# #Linux (When GTK 2.4 or newer is installed)
return askOpenFileGtk(title, _suffixes, initialDir)
else:
log.debug("Calling internal file chooser.")
log.debug("'initialDir' is %s (%s)"%(repr(initialDir), type(initialDir)))
try:
iDir = initialDir.encode(enc)
except Exception, e:
iDir = initialDir
log.debug("Could not encode 'initialDir' %s"%repr(initialDir))
log.debug("Encode function returned: %s"%e)
return request_old_filename(suffixes=_suffixes, directory=iDir)
filename = _askOpen(suffixes)
if filename:
if schematics:
lastSchematicsDir = dirname(filename)
else:
lastSaveDir = dirname(filename)
return filename
def askOpenFileWin32(title, schematics, initialDir, suffixes=None):
try:
# if schematics:
if not suffixes:
f = ('Levels and Schematics\0*.mclevel;*.dat;*.mine;*.mine.gz;*.schematic;*.zip;*.schematic.gz;*.inv\0' +
'*.*\0*.*\0\0')
else:
f = "All\0"
for suffix in suffixes:
f += "*." + suffix + ";"
f += "\0*.*\0\0"
# else:
# f = ('Levels (*.mclevel, *.dat;*.mine;*.mine.gz;)\0' +
# '*.mclevel;*.dat;*.mine;*.mine.gz;*.zip;*.lvl\0' +
# '*.*\0*.*\0\0')
(filename, customfilter, flags) = win32gui.GetOpenFileNameW(
hwndOwner=display.get_wm_info()['window'],
InitialDir=initialDir,
Flags=(win32con.OFN_EXPLORER
| win32con.OFN_NOCHANGEDIR
| win32con.OFN_FILEMUSTEXIST
| win32con.OFN_LONGNAMES
# |win32con.OFN_EXTENSIONDIFFERENT
),
Title=title,
Filter=f,
)
except Exception:
#print "Open File: ", e
pass
else:
return filename
def askOpenFileGtk(title, suffixes, initialDir):
fls = []
def run_dlg():
chooser = gtk.FileChooserDialog(title,
None, gtk.FILE_CHOOSER_ACTION_SAVE,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_OPEN, gtk.RESPONSE_OK))
chooser.set_default_response(gtk.RESPONSE_OK)
chooser.set_current_folder(initialDir)
chooser.set_current_name("world") #For some reason the Windows isn't closing if this line ins missing or the parameter is ""
#Add custom Filter
filter = gtk.FileFilter()
filter.set_name("Levels and Schematics")
for suffix in suffixes:
filter.add_pattern("*."+suffix)
chooser.add_filter(filter)
#Add "All files" Filter
filter = gtk.FileFilter()
filter.set_name("All files")
filter.add_pattern("*")
chooser.add_filter(filter)
response = chooser.run()
if response == gtk.RESPONSE_OK:
fls.append(chooser.get_filename())
else:
fls.append(None)
chooser.destroy()
gtk.main_quit()
gtk.idle_add(run_dlg)
gtk.main()
return fls[0]
def askSaveSchematic(initialDir, displayName, fileFormat):
dt = datetime.now().strftime("%Y-%m-%d--%H-%M-%S")
return askSaveFile(initialDir,
title=_('Save this schematic...'),
defaultName=displayName + "_" + dt + "." + fileFormat,
filetype=_('Minecraft Schematics (*.{0})\0*.{0}\0\0').format(fileFormat),
suffix=fileFormat,
)
def askCreateWorld(initialDir):
defaultName = name = _("Untitled World")
i = 0
while exists(join(initialDir, name)):
i += 1
name = defaultName + " " + str(i)
return askSaveFile(initialDir,
title=_('Name this new world.'),
defaultName=name,
filetype=_('Minecraft World\0*.*\0\0'),
suffix="",
)
def askSaveFile(initialDir, title, defaultName, filetype, suffix):
if sys.platform == "win32": #!#
try:
(filename, customfilter, flags) = win32gui.GetSaveFileNameW(
hwndOwner=display.get_wm_info()['window'],
InitialDir=initialDir,
Flags=win32con.OFN_EXPLORER | win32con.OFN_NOCHANGEDIR | win32con.OFN_OVERWRITEPROMPT,
File=defaultName,
DefExt=suffix,
Title=title,
Filter=filetype,
)
except Exception, e:
print "Error getting file name: ", e
return
try:
filename = filename[:filename.index('\0')]
filename = filename.decode(sys.getfilesystemencoding())
except:
pass
elif hasGtk and not platChooser: #!# #Linux (When GTK 2.4 or newer is installed)
fls = []
def run_dlg():
chooser = gtk.FileChooserDialog(title,
None, gtk.FILE_CHOOSER_ACTION_SAVE,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_SAVE, gtk.RESPONSE_OK))
chooser.set_default_response(gtk.RESPONSE_OK)
chooser.set_current_folder(initialDir)
chooser.set_current_name(defaultName)
#Add custom Filter
filter = gtk.FileFilter()
filter.set_name(filetype[:filetype.index("\0")])
filter.add_pattern("*." + suffix)
chooser.add_filter(filter)
#Add "All files" Filter
filter = gtk.FileFilter()
filter.set_name("All files")
filter.add_pattern("*")
chooser.add_filter(filter)
response = chooser.run()
if response == gtk.RESPONSE_OK:
fls.append(chooser.get_filename())
else:
fls.append(None)
chooser.destroy()
gtk.main_quit()
gtk.idle_add(run_dlg)
gtk.main()
filename = fls[0]
else: #Fallback
log.debug("Calling internal file chooser.")
log.debug("'initialDir' is %s (%s)"%(repr(initialDir), type(initialDir)))
log.debug("'defaultName' is %s (%s)"%(repr(defaultName), type(defaultName)))
try:
iDir = initialDir.encode(enc)
except:
iDir = initialDir
log.debug("Could not encode 'initialDir' %s"%repr(initialDir))
log.debug("Encode function returned: %s"%e)
try:
dName = defaultName.encode(enc)
except:
dName = defaultName
log.debug("Could not encode 'defaultName' %s"%repr(defaultName))
log.debug("Encode function returned: %s"%e)
filename = request_new_filename(prompt=title,
suffix=("." + suffix) if suffix else "",
directory=iDir,
filename=dName,
pathname=None)
return filename
# Start Open Folder Dialogs
# TODO: Possibly get an OS X dialog
def askOpenFolderWin32(title, initialDir):
try:
desktop_pidl = shell.SHGetFolderLocation(0, shellcon.CSIDL_DESKTOP, 0, 0)
pidl, display_name, image_list = shell.SHBrowseForFolder (
win32gui.GetDesktopWindow(),
desktop_pidl,
"Choose a folder",
0,
None,
None
)
return shell.SHGetPathFromIDList(pidl)
except pywintypes.com_error as e:
if e.args[0] == -2147467259:
print "Invalid folder selected"
pass
def askOpenFolderGtk(title, initialDir):
if hasGtk:
fls = []
def run_dlg():
chooser = gtk.FileChooserDialog(title,
None, gtk.FILE_CHOOSER_ACTION_SAVE,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_OPEN, gtk.RESPONSE_OK))
chooser.set_default_response(gtk.RESPONSE_OK)
chooser.set_current_folder(initialDir)
chooser.set_current_name("world")
chooser.set_action(gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
response = chooser.run()
if response == gtk.RESPONSE_OK:
fls.append(chooser.get_filename()) # Returns the folder path if gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER is the action
else:
fls.append(None)
chooser.destroy()
gtk.main_quit()
gtk.idle_add(run_dlg)
gtk.main()
return fls[0]
else:
print "You currently need gtk to use an Open Folder Dialog!"
# End Open Folder Dialogs
# if sys.platform == "win32":
# try:
#
# (filename, customfilter, flags) = win32gui.GetSaveFileNameW(
# hwndOwner = display.get_wm_info()['window'],
# # InitialDir=minecraftSaveFileDir,
# Flags=win32con.OFN_EXPLORER | win32con.OFN_NOCHANGEDIR | win32con.OFN_OVERWRITEPROMPT,
# File=initialDir + os.sep + displayName,
# DefExt=fileFormat,
# Title=,
# Filter=,
# )
# except Exception, e:
# print "Error getting filename: {0!r}".format(e)
# return
#
# elif sys.platform == "darwin" and AppKit is not None:
# sp = AppKit.NSSavePanel.savePanel()
# sp.setDirectory_(initialDir)
# sp.setAllowedFileTypes_([fileFormat])
# # sp.setFilename_(self.editor.level.displayName)
#
# if sp.runModal() == 0:
# return; # pressed cancel
#
# filename = sp.filename()
# AppKit.NSApp.mainWindow().makeKeyWindow()
#
# else:
#
# filename = request_new_filename(prompt = "Save this schematic...",
# suffix = ".{0}".format(fileFormat),
# directory = initialDir,
# filename = displayName,
# pathname = None)
#
# return filename
def platform_open(path):
try:
if sys.platform == "win32":
os.startfile(path)
# os.system('start ' + path + '\'')
elif sys.platform == "darwin":
# os.startfile(path)
os.system('open "' + path + '"')
else:
os.system('xdg-open "' + path + '"')
except Exception, e:
print "platform_open failed on {0}: {1}".format(sys.platform, e)
win32_window_size = True
#=============================================================================
#=============================================================================
# DESKTOP ENVIRONMENTS AND OS SIDE WINDOW MANAGEMENT.
#
# The idea is to have a single object to deal with underling OS specific window management interface.
# This will help to save/restore MCEdit window sate, position and size.
#
# TODO:
# * Test on the actually unsupported Linux DEs.
# * Review WWindowHandler class for Windows.
# * Create a DWindowHandler class for Darwin (OSX).
# Window states
MINIMIZED = 0
NORMAL = 1
MAXIMIZED = 2
FULLSCREEN = 3
#=============================================================================
# Linux desktop environment detection.
#
# source: http://stackoverflow.com/questions/2035657/what-is-my-current-desktop-environment (http://stackoverflow.com/a/21213358)
#
# Tweaked, of course ;)
#
def get_desktop_environment():
#From http://stackoverflow.com/questions/2035657/what-is-my-current-desktop-environment
# and http://ubuntuforums.org/showthread.php?t=652320
# and http://ubuntuforums.org/showthread.php?t=652320
# and http://ubuntuforums.org/showthread.php?t=1139057
if sys.platform in ["win32", "cygwin"]:
return "windows"
elif sys.platform == "darwin":
return "mac"
else: #Most likely either a POSIX system or something not much common
ds = os.environ.get("DESKTOP_SESSION", None)
if ds in ('default', None):
ds = os.environ.get("XDG_CURRENT_DESKTOP", None)
if ds is not None: #easier to match if we doesn't have to deal with caracter cases
desktop_session = ds.lower()
found = re.findall(r"gnome|unity|cinnamon|mate|xfce4|lxde|fluxbox|blackbox|openbox|icewm|jwm|afterstep|trinity|kde", desktop_session, re.I)
if len(found) == 1:
return found[0]
elif len(found) > 1:
print "Houston? We have a problem...\n\nThe desktop environment can't be found: '%s' has been detected to be %s alltogeteher."%(ds, " and ".join((", ".join(found[:-1]), found[-1])))
return 'unknown'
## Special cases ##
# Canonical sets $DESKTOP_SESSION to Lubuntu rather than LXDE if using LXDE.
# There is no guarantee that they will not do the same with the other desktop environments.
elif "xfce" in desktop_session or desktop_session.startswith("xubuntu"):
return "xfce4"
elif desktop_session.startswith("ubuntu"):
return "unity"
elif desktop_session.startswith("lubuntu"):
return "lxde"
elif desktop_session.startswith("kubuntu"):
return "kde"
elif desktop_session.startswith("razor"): # e.g. razorkwin
return "razor-qt"
elif desktop_session.startswith("wmaker"): # e.g. wmaker-common
return "windowmaker"
if os.environ.get('KDE_FULL_SESSION', None) == 'true':
return "kde"
elif os.environ.get('GNOME_DESKTOP_SESSION_ID', None):
if not "deprecated" in os.environ.get('GNOME_DESKTOP_SESSION_ID', None):
return "gnome2"
#From http://ubuntuforums.org/showthread.php?t=652320
elif is_running("xfce-mcs-manage"):
return "xfce4"
elif is_running("ksmserver"):
return "kde"
return "unknown"
def is_running(process):
#From http://www.bloggerpolis.com/2011/05/how-to-check-if-a-process-is-running-using-python/
# and http://richarddingwall.name/2009/06/18/windows-equivalents-of-ps-and-kill-commands/
try: #Linux/Unix
s = subprocess.Popen(["ps", "axw"],stdout=subprocess.PIPE)
except: #Windows
s = subprocess.Popen(["tasklist", "/v"],stdout=subprocess.PIPE)
for x in s.stdout:
if re.search(process, x):
return True
return False
#=============================================================================
# Window handling.
desktop_environment = get_desktop_environment()
DEBUG_WM = False
USE_WM = True
# Desktops settings
# Each entry in the platform sub-dictionaries represent which object is used to get/set the window metrics.
#
# For Linux:
# Valid entries are: position_gap, position_getter, position_setter, size_getter, size_setter and state.
# Entries can be omitted; default values will be used.
# For position_gap the default is (0, 0, False, False)
# For the other ones, the default is the Pygame window object.
#
# position_gap is used on some environment to restore the windows at the coords it was formerly.
# The two first values of the tuple are the amount of pixels to add to the window x and y coords.
# The two last ones tell whether these pixels shall be added only once (at program startup) or always.
#
desktops = {'linux2': {
'cinnamon': { # Actually, there's a bug when resizing on XCinnamon.
'position_setter': 'parent',
'position_getter': 'parent.parent',
'position_gap': (9, 8, True, True),
'state': 'parent'
},
'gnome': {
'position_setter': 'parent',
'position_getter': 'parent.parent',
'size_setter': 'parent',
'size_getter': 'parent',
'state': 'parent'
},
'kde': {
'position_setter': 'parent',
'position_getter': 'parent.parent.parent',
'state': 'parent'
},
'unity': {
'position_setter': 'parent',
'position_getter': 'parent.parent.parent',
'position_gap': (10, 10, False, False),
'size_setter': 'parent',
'state': 'parent'
}
},
# 'win32': {},
# 'darwin': {}
}
# The environments in the next definition need to be tested.
linux_unsuported = ('afterstep',
'blackbox',
'fluxbox',
'gnome2',
'icewm',
'jwm',
'lxde',
'mate',
'openbox',
'razor-qt',
'trinity',
'windowmaker',
'xfce4')
# Window handlers classes
class BaseWindowHandler:
"""Abstract class for the platform specific window handlers.
If initialized, this class casts a NotImplementedError."""
desk_env = desktop_environment
def __init__(self, *args, **kwargs):
"""..."""
if not len(kwargs):
raise NotImplementedError, "Abstract class."
self.mode = kwargs['mode']
def set_mode(self, size, mode):
"""Wrapper for pygame.display.set_mode()."""
display.set_mode(size, mode)
def get_root_rect(self):
"""..."""
raise NotImplementedError, "Abstract method."
def get_size(self):
"""..."""
raise NotImplementedError, "Abstract method."
def set_size(self, size, update=True):
"""..."""
raise NotImplementedError, "Abstract method."
def get_position(self):
"""..."""
raise NotImplementedError, "Abstract method."
def set_position(self, pos, update=True):
"""..."""
raise NotImplementedError, "Abstract method."
def get_state(self):
"""..."""
raise NotImplementedError, "Abstract method."
def set_state(self, state=NORMAL, size=(-1, -1), pos=(-1, -1), update=True):
"""..."""
raise NotImplementedError, "Abstract method."
def flush(self):
"""Just does nothing..."""
return
def sync(self):
"""Just does nothing..."""
return
class XWindowHandler(BaseWindowHandler):
"""Object to deal with XWindow managers (Linux)."""
desk_env = desktop_environment
def __init__(self, pos=(0, 0), size=(0, 0), mode=None):
"""Set up the internal handlers."""
BaseWindowHandler.__init__(self, pos=pos, size=size, mode=mode)
self.mode = mode
# setup the internal data, especially the Xlib object we need.
# Tests
if DEBUG_WM:
print "#" * 72
print "XWindowHandler.__init__"
print "Desktop environment:", desktop_environment
dis = self.display = Xlib.display.Display()
pygame_win = dis.create_resource_object('window', display.get_wm_info()['window'])
pygame_win_id = pygame_win.id
if DEBUG_WM:
root = dis.screen().root
active_wid_id = root.get_full_property(dis.intern_atom('_NET_ACTIVE_WINDOW'), Xlib.X.AnyPropertyType).value[0]
active_win = dis.create_resource_object('window', active_wid_id)
# Print pygame_win and active_win styff
for (win, name) in ((pygame_win, 'pygame_win'), (active_win, 'active_win')):
print "=" * 72
print "%s guts"%name, "(ID %s)"%win.id
print "-" * 72
print "* State"
prop = win.get_full_property(dis.intern_atom("_NET_WM_STATE"), 4)
print " ", prop
if prop:
print dir(prop)
print "* Geometry"
print " ", win.get_geometry()
parent = win.query_tree().parent
p = '%s.parent'%name
while parent.id != root.id:
print "-" * 72
print p, "ID", parent.id
print "* State"
prop = parent.get_full_property(dis.intern_atom("_NET_WM_STATE"), 4)
print " ", prop
if prop:
print dir(prop)
print "* Geometry"
print " ", parent.get_geometry()
parent = parent.query_tree().parent
p += ".parent"
# Size handlers
self.base_handler = pygame_win
self.base_handler_id = pygame_win.id
size = desktops['linux2'][self.desk_env].get('size_getter', None)
if size:
if DEBUG_WM:
print "size_getter.split('.')", size.split('.')
handler = pygame_win
for item in size.split('.'):
handler = getattr(handler.query_tree(), item)
self.sizeGetter = handler
else:
self.sizeGetter = pygame_win
size = desktops['linux2'][self.desk_env].get('size_setter', None)
if size:
if DEBUG_WM:
print "size_setter.split('.')", size.split('.')
handler = pygame_win
for item in size.split('.'):
handler = getattr(handler.query_tree(), item)
self.sizeSetter = handler
else:
self.sizeSetter = pygame_win
# Position handlers
pos = desktops['linux2'][self.desk_env].get('position_getter', None)
if pos:
if DEBUG_WM:
print "pos_getter.split('.')", pos.split('.')
handler = pygame_win
for item in pos.split('.'):
handler = getattr(handler.query_tree(), item)
self.positionGetter = handler
else:
self.positionGetter = pygame_win
pos = desktops['linux2'][self.desk_env].get('position_setter', None)
if pos:
if DEBUG_WM:
print "pos_setter.split('.')", pos.split('.')
handler = pygame_win
for item in pos.split('.'):
handler = getattr(handler.query_tree(), item)
self.positionSetter = handler
else:
self.positionSetter = pygame_win
# Position gap. Used to correct wrong positions on some environments.
self.position_gap = desktops['linux2'][self.desk_env].get('position_gap', (0, 0, False, False))
self.starting = True
self.gx, self.gy = 0, 0
# State handler
state = desktops['linux2'][self.desk_env].get('state', None)
if state:
if DEBUG_WM:
print "state.split('.')", state.split('.')
handler = pygame_win
for item in state.split('.'):
handler = getattr(handler.query_tree(), item)
self.stateHandler = handler
else:
self.stateHandler = pygame_win
if DEBUG_WM:
print "self.positionGetter:", self.positionGetter, 'ID:', self.positionGetter.id
print "self.positionSetter:", self.positionSetter, 'ID:', self.positionSetter.id
print "self.sizeGetter:", self.sizeGetter, 'ID:', self.sizeGetter.id
print "self.sizeSetter:", self.sizeSetter, 'ID:', self.sizeSetter.id
print "self.stateHandler:", self.stateHandler, 'ID:', self.stateHandler.id
print self.stateHandler.get_wm_state()
def get_root_rect(self):
"""Return a four values tuple containing the position and size of the very first OS window object."""
geom = self.display.screen().root.get_geometry()
return (geom.x, geom.y, geom.width, geom.height)
def get_size(self):
"""Return the window actual size as a tuple (width, height)."""
geom = self.sizeGetter.get_geometry()
if DEBUG_WM:
print "Actual size is", geom.width, geom.height
return (geom.width, geom.height)
def set_size(self, size, update=True):
"""Set the window size.
:size: list or tuple: the new size.
Raises a TypeError if something else than a list or a tuple is sent."""
if type(size) in (list, tuple):
# Call the Xlib object handling the size to update it.
if DEBUG_WM:
print "Setting size to", size
print "actual size", self.get_size()
self.sizeSetter.configure(width=size[0], height=size[1])
if update:
self.sync()
else:
# Raise a Type error.
raise TypeError, "%s is not a list or a tuple."%size
def get_position(self):
"""Return the window actual position as a tuple."""
geom = self.positionGetter.get_geometry()
x, y = geom.x, geom.y
# if DEBUG_WM:
# print "Actual position is", x, y
return (x, y)
def set_position(self, pos, update=True):
"""Set the window position.
:pos: list or tuple: the new position (x, y).
:update: bool: wheteher to call the internal sync method."""
if DEBUG_WM:
print "Setting position to", pos
if type(pos) in (list, tuple):
gx, gy = 0 or self.gx, 0 or self.gy
if self.starting:
gx, gy = self.position_gap[:2]
if self.position_gap[2]:
self.gx = gx
if self.position_gap[3]:
self.gy = gy
self.starting = False
# Call the Xlib object handling the position to update it.
self.positionSetter.configure(x=pos[0] + gx, y=pos[1] + gy)
if update:
self.sync()
else:
# Raise a Type error.
raise TypeError, "%s is not a list or a tuple."%pos
def get_state(self):
"""Return wheter the window is maximized or not, or minimized or full screen."""
state = self.stateHandler.get_full_property(self.display.intern_atom("_NET_WM_STATE"), 4)
# if DEBUG_WM:
# print "state_1.value", state.value
# print "max vert", self.display.intern_atom("_NET_WM_STATE_MAXIMIZED_VERT") ,self.display.intern_atom("_NET_WM_STATE_MAXIMIZED_VERT") in state.value
# print "max horz", self.display.intern_atom("_NET_WM_STATE_MAXIMIZED_HORZ"), self.display.intern_atom("_NET_WM_STATE_MAXIMIZED_HORZ") in state.value
if self.display.intern_atom("_NET_WM_STATE_MAXIMIZED_HORZ") in state.value and self.display.intern_atom("_NET_WM_STATE_MAXIMIZED_VERT") in state.value:
# if DEBUG_WM:
# print MAXIMIZED
return MAXIMIZED
elif self.display.intern_atom("_NET_WM_STATE_HIDEN") in state.value:
# if DEBUG_WM:
# print MINIMIZED
return MINIMIZED
elif self.display.intern_atom("_NET_WM_STATE_FULLSCREEN") in state.value:
# if DEBUG_WM:
# print FULLSCREEN
return FULLSCREEN
# if DEBUG_WM:
# print NORMAL
return NORMAL
def set_state(self, state=NORMAL, size=(-1, -1), pos=(-1, -1), update=True):
"""Set wheter the window is maximized or not, or minimized or full screen.
If no argument is given, assume the state will be windowed and not maximized.
If arguments are given, only the first is relevant. The other ones are ignored.
** Only maximized and normal states are implemented for now. **
:state: valid arguments:
'minimized', MINIMIZED, 0.
'normal', NORMAL, 1: windowed, not maximized.
'maximized', MAXIMIZED, 2.
'fullscreen, FULLSCREEN, 3.
:size: list, tuple: the new size; if (-1, -1) self.get_size() is used.
If one element is -1 it is replaced by the corresponding valur from self.get_size().
:pos: list, tuple: the new position; if (-1, -1), self.get_position is used.
If one element is -1 it is replaced by the corresponding valur from self.get_position().
:update: bool: whether to call the internal flush method."""
if state not in (0, MINIMIZED, 'minimized',1, NORMAL, 'normal', 2, MAXIMIZED, 'maximized', 3, FULLSCREEN, 'fullscreen'):
# Raise a value error.
raise ValueError, "Invalid state argument: %s is not a correct value"%state
if type(size) not in (list, tuple):
raise TypeError, "Invalid size argument: %s is not a list or a tuple."
if type(pos) not in (list, tuple):
raise TypeError, "Invalid pos argument: %s is not a list or a tuple."
if state in (1, NORMAL, 'normal'):
size = list(size)
sz = self.get_size()
if size[0] == -1:
size[0] = sz[0]
if size[1] == -1:
size[1] = sz[1]
pos = list(pos)
ps = self.get_position()
if pos[0] == -1:
pos[0] = ps[0]
if pos[1] == -1:
pos[1] = ps[1]
self.set_mode(size, self.mode)
self.set_position(pos)
elif state in (0, MINIMIZED, 'minimized'):
pass
elif state in (2, MAXIMIZED, 'maximized'):
data = [1, self.display.intern_atom("_NET_WM_STATE_MAXIMIZED_VERT", False), self.display.intern_atom("_NET_WM_STATE_MAXIMIZED_HORZ", False)]
data = (data + ([0] * (5 - len(data))))[:5]
if DEBUG_WM:
print self.stateHandler.get_wm_state()
print "creating event", Xlib.protocol.event.ClientMessage
print dir(self.stateHandler)
x_event = Xlib.protocol.event.ClientMessage(window=self.stateHandler, client_type=self.display.intern_atom("_NET_WM_STATE", False), data=(32, (data)))
if DEBUG_WM:
print "sending event"
self.display.screen().root.send_event(x_event, event_mask=Xlib.X.SubstructureRedirectMask)
if DEBUG_WM:
print self.stateHandler.get_wm_state()
elif state in (3, FULLSCREEN, 'fullscreen'):
pass
if update:
self.flush()
def flush(self):
"""Wrapper around Xlib.Display.flush()"""
if DEBUG_WM:
print "* flushing display"
self.display.flush()
def sync(self):
"""Wrapper around Xlib.Display.sync()"""
if DEBUG_WM:
print "* syncing display"
self.display.sync()
#=======================================================================
# WARNING: This class has been built on Linux using wine.
# Please review this code and change it consequently before using it without '--debug-wm' switch!
class WWindowHandler(BaseWindowHandler):
"""Object to deal with Microsoft Window managers."""
desk_env = desktop_environment
def __init__(self, pos=(0, 0), size=(0, 0), mode=None):
"""Set up the internal handlers."""
BaseWindowHandler.__init__(self, pos=pos, size=size, mode=mode)
# Tests
if DEBUG_WM:
print "#" * 72
print "WWindowHandler.__init__"
print "Desktop environment:", desktop_environment
for item in dir(win32con):
if 'maxim' in item.lower() or 'minim' in item.lower() or 'full' in item.lower():
print item, getattr(win32con, item)
self.base_handler = display
self.base_handler_id = display.get_wm_info()['window']
if platform.dist() == ('', '', ''):
# We're running on a native Windows.
def set_mode(self, size, mode):
"""Wrapper for pygame.display.set_mode()."""
# Windows pygame implementation seem to work on the display mode and size on it's own...
return
else:
# We're running on wine.
def set_mode(self, size, mode):
"""Wrapper for pygame.display.set_mode()."""
if getattr(self, 'wine_state_fix', False):
self.set_size(size)
self.wine_state_fix = True
else:
self.wine_state_fix = False
def get_root_rect(self):
"""Return a four values tuple containing the position and size of the very first OS window object."""
flags, showCmd, ptMin, ptMax, rect = win32gui.GetWindowPlacement(win32gui.GetDesktopWindow())
return rect
def get_size(self):
"""Return the window actual size as a tuple (width, height)."""
flags, showCmd, ptMin, ptMax, rect = win32gui.GetWindowPlacement(self.base_handler_id)
w = rect[2] - rect[0]
h = rect[3] - rect[1]
return (w, h)
def set_size(self, size, update=True):
"""Set the window size.
:size: list or tuple: the new size.
:mode: bool: (re)set the pygame.display mode; self.mode must be a pygame display mode object.
Raises a TypeError if something else than a list or a tuple is sent."""
if type(size) in (list, tuple):
w, h = size
cx, cy = win32gui.GetCursorPos()
if DEBUG_WM:
print "Settin size to", size
print "actual size", self.get_size()
print "actual position", self.get_position()
print 'cursor pos', cx, cy
flags, showCmd, ptMin, ptMax, rect = win32gui.GetWindowPlacement(self.base_handler_id)
if DEBUG_WM:
print "set_size rect", rect, "ptMin", ptMin, "ptMax", ptMax, "flags", flags
x = rect[0]
y = rect[1]
rect = (x, y, x + w, y + h)
win32gui.SetWindowPlacement(self.base_handler_id, (0, showCmd, ptMin, ptMax, rect))
else:
# Raise a Type error.
raise TypeError, "%s is not a list or a tuple."%repr(size)
def get_position(self):
"""Return the window actual position as a tuple."""
(flags, showCmd, ptMin, ptMax, rect) = win32gui.GetWindowPlacement(self.base_handler_id)
x, y, r, b = rect
return (x, y)
def set_position(self, pos, update=True):
"""Set the window position.
:pos: list or tuple: the new position (x, y)."""
if DEBUG_WM:
print "Setting position to", pos
if type(pos) in (list, tuple):
self.first_pos = False
x, y = pos
if update:
flags, showCmd, ptMin, ptMax, rect = win32gui.GetWindowPlacement(self.base_handler_id)
if DEBUG_WM:
print "set_position rect", rect, "ptMin", ptMin, "ptMax", ptMax
realW = rect[2] - rect[0]
realH = rect[3] - rect[1]
if DEBUG_WM:
print 'rect[0]', rect[0], 'rect[1]', rect[1]
print 'realW', realW, 'realH', realH
print 'cursor pos', win32gui.GetCursorPos()
rect = (x, y, x + realW, y + realH)
win32gui.SetWindowPlacement(self.base_handler_id, (0, showCmd, ptMin, ptMax, rect))
else:
# Raise a Type error.
raise TypeError, "%s is not a list or a tuple."%repr(pos)
def get_state(self):
"""Return wheter the window is maximized or not, or minimized or full screen."""
flags, state, ptMin, ptMax, rect = win32gui.GetWindowPlacement(self.base_handler_id)
if DEBUG_WM:
print "state", state
if state == win32con.SW_MAXIMIZE:
return MAXIMIZED
elif state == win32con.SW_MINIMIZE:
return MINIMIZED
return NORMAL
def set_state(self, state=NORMAL, size=(-1, -1), pos=(-1, -1), update=True):
"""Set wheter the window is maximized or not, or minimized or full screen.
If no argument is given, assume the state will be windowed and not maximized.
If arguments are given, only the first is relevant. The other ones are ignored.
** Only maximized and normal states are implemented for now. **
:state: valid arguments:
'minimized', MINIMIZED, 0.
'normal', NORMAL, 1: windowed, not maximized.
'maximized', MAXIMIZED, 2.
'fullscreen, FULLSCREEN, 3.
:size: list, tuple: the new size; if (-1, -1) self.get_size() is used.
If one element is -1 it is replaced by the corresponding valur from self.get_size().
:pos: list, tuple: the new position; if (-1, -1), self.get_position is used.
If one element is -1 it is replaced by the corresponding valur from self.get_position().
:update: bool: whether to call the internal flush method."""
if state not in (0, MINIMIZED, 'minimized',1, NORMAL, 'normal', 2, MAXIMIZED, 'maximized', 3, FULLSCREEN, 'fullscreen'):
# Raise a value error.
raise ValueError, "Invalid state argument: %s is not a correct value"%state
if type(size) not in (list, tuple):
raise TypeError, "Invalid size argument: %s is not a list or a tuple."
if type(pos) not in (list, tuple):
raise TypeError, "Invalid pos argument: %s is not a list or a tuple."
if state in (1, NORMAL, 'normal'):
size = list(size)
sz = self.get_size()
if size[0] == -1:
size[0] = sz[0]
if size[1] == -1:
size[1] = sz[1]
pos = list(pos)
ps = self.get_position()
if pos[0] == -1:
pos[0] = ps[0]
if pos[1] == -1:
pos[1] = ps[1]
self.set_mode(size, self.mode)
self.set_position(pos)
elif state in (0, MINIMIZED, 'minimized'):
pass
elif state in (2, MAXIMIZED, 'maximized'):
win32gui.ShowWindow(self.base_handler_id, win32con.SW_MAXIMIZE)
elif state in (3, FULLSCREEN, 'fullscreen'):
pass
WindowHandler = None
def setupWindowHandler():
"""'Link' the corresponding window handler class to WindowHandler."""
# Don't initialize the window handler here.
# We need MCEdit display objects to get the right object.
global WindowHandler
if USE_WM:
log.warn("Initializing window management...")
if sys.platform == 'linux2':
if XWindowHandler.desk_env == 'unknown':
log.warning("Your desktop environment could not be determined. The support for window sizing/moving is not availble.")
elif XWindowHandler.desk_env in linux_unsuported:
log.warning("Your desktop environment is not yet supported for window sizing/moving.")
else:
WindowHandler = XWindowHandler
log.info("XWindowHandler initialized.")
elif sys.platform == 'win32':
WindowHandler = WWindowHandler
log.info("WWindowHandler initialized.")
return WindowHandler
# setupWindowHandler()
|
skinny121/MCEdit-TallWorlds
|
mcplatform.py
|
Python
|
isc
| 47,038
|
import random
from animation import *
from loader import *
import player
import pygame
from pygame.locals import *
from pygame.sprite import *
#lista de explosoes a desenhar
#init explosoes
explosoes = []
weapon_pickups = []
health_pickups = []
toasty = False
def asteroids(player1,player2,playerSprite1,playerSprite2,asteroidField):
explosion_images = load_sliced_sprites(100,90,'kaboom.png')
for hit in pygame.sprite.groupcollide(playerSprite1, asteroidField.asteroidSprites, 0, 1):
__explosion()
explosoes.append(AnimatedSprite(explosion_images,player1.coordinates[0],player1.coordinates[1]))
if player1.kill_player():
return 1
if player2.shiptype != 0:
for hit in pygame.sprite.groupcollide(playerSprite2, asteroidField.asteroidSprites, 0, 1):
__explosion()
explosoes.append(AnimatedSprite(explosion_images,player2.coordinates[0],player2.coordinates[1]))
if player2.kill_player():
return 1
return 0
def lasers(player1,player2,playerSprite1,playerSprite2,asteroidField):
explosion_images = load_sliced_sprites(20,20,'explosion-sprite.png')
# DETECTAR COLISOES ENTRE LASERS DO JOGADOR 1 E A NAVE DO JOGADOR 2
# so se existe esse jogador.
if player2.shiptype != 0:
for hit in pygame.sprite.groupcollide(player1.laserSprite, playerSprite2, 1, 0):
__explosion()
explosoes.append(AnimatedSprite(explosion_images,hit.impactpoint[0],hit.impactpoint[1]))
hit.kill()
#aumentar tiros acertados
player1.statistics[2]+=1
#aumentar score
player1.statistics[3]+=3
player2.set_hitpoints(player1.weapon_damage[player1.curweapon]*player1.damage_multiplier)
if(player2.get_hitpoints() <= 0):
if player2.kill_player():
return 1
# DETECTAR COLISOES ENTRE LASERS DO JOGADOR 2 E A NAVE DO JOGADOR 1
for hit in pygame.sprite.groupcollide(player2.laserSprite, playerSprite1, 1, 0):
__explosion()
explosoes.append(AnimatedSprite(explosion_images,hit.impactpoint[0],hit.impactpoint[1]))
hit.kill()
player2.statistics[2]+=1
player2.statistics[3]+=3
player1.set_hitpoints(player2.weapon_damage[player2.curweapon]*player2.damage_multiplier)
if(player1.get_hitpoints() <= 0):
if player1.kill_player()==1:
return 1
# DETECTAR COLISOES ENTRE LASERS DO JOGADOR 1 E ASTEROIDES
for hit in pygame.sprite.groupcollide(asteroidField.asteroidSprites, player1.laserSprite, 0, 1):
__explosion()
explosoes.append(AnimatedSprite(explosion_images,hit.rect.centerx,hit.rect.centery))
#aumentar tiros acertados
player1.statistics[2]+=1
#aumentar score
player1.statistics[3]+=5
hit.hitpoints += (player1.weapon_damage[player1.curweapon]*player1.damage_multiplier)
ret = hit.kill_asteroid()
if ret == 1:
hit.kill()
# DETECTAR COLISOES ENTRE LASERS DO JOGADOR 2 E ASTEROIDES
for hit in pygame.sprite.groupcollide(asteroidField.asteroidSprites, player2.laserSprite, 0, 1):
__explosion()
explosoes.append(AnimatedSprite(explosion_images,hit.rect.centerx,hit.rect.centery))
#aumentar tiros acertados
player2.statistics[2]+=1
#aumentar score
player2.statistics[3]+=5
hit.hitpoints += (player2.weapon_damage[player2.curweapon]*player2.damage_multiplier)
ret = hit.kill_asteroid()
if ret == 1:
hit.kill()
for hit in pygame.sprite.groupcollide(player1.laserSprite, player2.laserSprite, 0, 1):
__explosion()
explosoes.append(AnimatedSprite(explosion_images,hit.rect.centerx,hit.rect.centery))
player1.statistics[2]+=1
hit.kill()
return 0
def pickup_powerup(powerup,powerupSprite,player,playerSprite,tipo):
for hit in pygame.sprite.groupcollide(powerupSprite, playerSprite, 1, 0):
if tipo == 1:
pickup_images_health = load_sliced_sprites(61,57,'health_pickup2.png')
health_pickups.append(AnimatedSprite(pickup_images_health,powerup.rect.centerx-30,powerup.rect.centery-28))
__pickup_shield()
##cura uma percentagem do max de hitpoints que e 50
player.hitpoints += (player.maxhitpoints) * powerup.healfactor
if player.hitpoints > player.maxhitpoints:
player.hitpoints = player.maxhitpoints
return 1
if tipo == 2:
pickup_images_weapons = load_sliced_sprites(61,57,'weapon_pickup.png')
weapon_pickups.append(AnimatedSprite(pickup_images_weapons,powerup.rect.centerx-30,powerup.rect.centery-28))
player.ammo += powerup.ammo
player.damage_multiplier = powerup.damagefactor
if powerup.damagefactor == 4:
quad_damage = load_sound('toasty.wav')
quad_damage.set_volume(1)
quad_damage.play()
toasty = True
player.powerup_time = 1
__pickup_weapon()
return 1
if tipo == 3:
pickup_images_weapons = load_sliced_sprites(61,57,'health_pickup.png')
weapon_pickups.append(AnimatedSprite(pickup_images_weapons,powerup.rect.centerx-30,powerup.rect.centery-28))
player.mines_ammo += 5
__pickup_weapon()
return 1
else:
return 0
def __explosion():
fact = random.randrange(0,4)
explosion = load_sound("explosion"+str(fact+1)+".wav")
#explosion.set_volume(0.2)
explosion.play()
def __pickup_shield():
sound = load_sound('powerupshield.wav')
sound.set_volume(0.6)
sound.play()
def __pickup_weapon():
sound = load_sound('powerupweapon.wav')
sound.set_volume(0.6)
sound.play()
|
borgaster/SpaceWarsEvolved
|
physics.py
|
Python
|
mit
| 6,670
|
#!/usr/bin/env python3
# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the bumpfee RPC.
Verifies that the bumpfee RPC creates replacement transactions successfully when
its preconditions are met, and returns appropriate errors in other cases.
This module consists of around a dozen individual test cases implemented in the
top-level functions named as test_<test_case_description>. The test functions
can be disabled or reordered if needed for debugging. If new test cases are
added in the future, they should try to follow the same convention and not
make assumptions about execution order.
"""
from decimal import Decimal
import io
from test_framework.blocktools import add_witness_commitment, create_block, create_coinbase, send_to_witness
from test_framework.messages import BIP125_SEQUENCE_NUMBER, CTransaction
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
hex_str_to_bytes,
)
from test_framework import util
WALLET_PASSPHRASE = "test"
WALLET_PASSPHRASE_TIMEOUT = 3600
# Fee rates (sat/vB)
INSUFFICIENT = 1
ECONOMICAL = 50
NORMAL = 100
HIGH = 500
TOO_HIGH = 100000
class BumpFeeTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
self.extra_args = [[
"-walletrbf={}".format(i),
"-mintxfee=0.00002",
"-addresstype=bech32",
] for i in range(self.num_nodes)]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def clear_mempool(self):
# Clear mempool between subtests. The subtests may only depend on chainstate (utxos)
self.nodes[1].generate(1)
self.sync_all()
def run_test(self):
util.node_fastmerkle = self.nodes[0]
# Encrypt wallet for test_locked_wallet_fails test
self.nodes[1].encryptwallet(WALLET_PASSPHRASE)
self.nodes[1].walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
peer_node, rbf_node = self.nodes
rbf_node_address = rbf_node.getnewaddress()
# fund rbf node with 10 coins of 0.001 btc (100,000 satoshis)
self.log.info("Mining blocks...")
peer_node.generate(110)
self.sync_all()
for _ in range(25):
peer_node.sendtoaddress(rbf_node_address, 0.001)
self.sync_all()
peer_node.generate(1)
self.sync_all()
assert_equal(rbf_node.getbalance()['bitcoin'], Decimal("0.025"))
self.log.info("Running tests")
dest_address = peer_node.getnewaddress()
for mode in ["default", "fee_rate"]:
test_simple_bumpfee_succeeds(self, mode, rbf_node, peer_node, dest_address)
self.test_invalid_parameters(rbf_node, peer_node, dest_address)
test_segwit_bumpfee_succeeds(self, rbf_node, dest_address)
test_nonrbf_bumpfee_fails(self, peer_node, dest_address)
test_notmine_bumpfee_fails(self, rbf_node, peer_node, dest_address)
test_bumpfee_with_descendant_fails(self, rbf_node, rbf_node_address, dest_address)
test_dust_to_fee(self, rbf_node, dest_address)
test_watchonly_psbt(self, peer_node, rbf_node, dest_address)
test_rebumping(self, rbf_node, dest_address)
test_rebumping_not_replaceable(self, rbf_node, dest_address)
test_unconfirmed_not_spendable(self, rbf_node, rbf_node_address)
test_bumpfee_metadata(self, rbf_node, dest_address)
test_locked_wallet_fails(self, rbf_node, dest_address)
test_change_script_match(self, rbf_node, dest_address)
test_settxfee(self, rbf_node, dest_address)
test_maxtxfee_fails(self, rbf_node, dest_address)
# These tests wipe out a number of utxos that are expected in other tests
test_small_output_with_feerate_succeeds(self, rbf_node, dest_address)
test_no_more_inputs_fails(self, rbf_node, dest_address)
def test_invalid_parameters(self, rbf_node, peer_node, dest_address):
self.log.info('Test invalid parameters')
rbfid = spend_one_input(rbf_node, dest_address)
self.sync_mempools((rbf_node, peer_node))
assert rbfid in rbf_node.getrawmempool() and rbfid in peer_node.getrawmempool()
for key in ["totalFee", "feeRate"]:
assert_raises_rpc_error(-3, "Unexpected key {}".format(key), rbf_node.bumpfee, rbfid, {key: NORMAL})
# Bumping to just above minrelay should fail to increase the total fee enough.
assert_raises_rpc_error(-8, "Insufficient total fee 0.00000257, must be at least 0.00002284 (oldFee 0.00000999 + incrementalFee 0.00001285)",
rbf_node.bumpfee, rbfid, {"fee_rate": INSUFFICIENT})
self.log.info("Test invalid fee rate settings")
assert_raises_rpc_error(-8, "Insufficient total fee 0.00, must be at least 0.00002284 (oldFee 0.00000999 + incrementalFee 0.00001285)",
rbf_node.bumpfee, rbfid, {"fee_rate": 0})
assert_raises_rpc_error(-4, "Specified or calculated fee 0.257 is too high (cannot be higher than -maxtxfee 0.10",
rbf_node.bumpfee, rbfid, {"fee_rate": TOO_HIGH})
assert_raises_rpc_error(-3, "Amount out of range", rbf_node.bumpfee, rbfid, {"fee_rate": -1})
for value in [{"foo": "bar"}, True]:
assert_raises_rpc_error(-3, "Amount is not a number or string", rbf_node.bumpfee, rbfid, {"fee_rate": value})
assert_raises_rpc_error(-3, "Invalid amount", rbf_node.bumpfee, rbfid, {"fee_rate": ""})
self.log.info("Test explicit fee rate raises RPC error if both fee_rate and conf_target are passed")
assert_raises_rpc_error(-8, "Cannot specify both conf_target and fee_rate. Please provide either a confirmation "
"target in blocks for automatic fee estimation, or an explicit fee rate.",
rbf_node.bumpfee, rbfid, {"conf_target": NORMAL, "fee_rate": NORMAL})
self.log.info("Test explicit fee rate raises RPC error if both fee_rate and estimate_mode are passed")
assert_raises_rpc_error(-8, "Cannot specify both estimate_mode and fee_rate",
rbf_node.bumpfee, rbfid, {"estimate_mode": "economical", "fee_rate": NORMAL})
self.log.info("Test invalid conf_target settings")
assert_raises_rpc_error(-8, "confTarget and conf_target options should not both be set",
rbf_node.bumpfee, rbfid, {"confTarget": 123, "conf_target": 456})
self.log.info("Test invalid estimate_mode settings")
for k, v in {"number": 42, "object": {"foo": "bar"}}.items():
assert_raises_rpc_error(-3, "Expected type string for estimate_mode, got {}".format(k),
rbf_node.bumpfee, rbfid, {"estimate_mode": v})
for mode in ["foo", Decimal("3.1415"), "sat/B", "BTC/kB"]:
assert_raises_rpc_error(-8, 'Invalid estimate_mode parameter, must be one of: "unset", "economical", "conservative"',
rbf_node.bumpfee, rbfid, {"estimate_mode": mode})
self.clear_mempool()
def test_simple_bumpfee_succeeds(self, mode, rbf_node, peer_node, dest_address):
self.log.info('Test simple bumpfee: {}'.format(mode))
rbfid = spend_one_input(rbf_node, dest_address)
rbftx = rbf_node.gettransaction(rbfid)
self.sync_mempools((rbf_node, peer_node))
assert rbfid in rbf_node.getrawmempool() and rbfid in peer_node.getrawmempool()
if mode == "fee_rate":
bumped_psbt = rbf_node.psbtbumpfee(rbfid, {"fee_rate": NORMAL})
bumped_tx = rbf_node.bumpfee(rbfid, {"fee_rate": NORMAL})
else:
bumped_psbt = rbf_node.psbtbumpfee(rbfid)
bumped_tx = rbf_node.bumpfee(rbfid)
assert_equal(bumped_tx["errors"], [])
assert bumped_tx["fee"] > -rbftx["fee"]['bitcoin']
assert_equal(bumped_tx["origfee"], -rbftx["fee"]['bitcoin'])
assert "psbt" not in bumped_tx
assert_equal(bumped_psbt["errors"], [])
assert bumped_psbt["fee"] > -rbftx["fee"]['bitcoin']
assert_equal(bumped_psbt["origfee"], -rbftx["fee"]['bitcoin'])
assert "psbt" in bumped_psbt
# check that bumped_tx propagates, original tx was evicted and has a wallet conflict
self.sync_mempools((rbf_node, peer_node))
assert bumped_tx["txid"] in rbf_node.getrawmempool()
assert bumped_tx["txid"] in peer_node.getrawmempool()
assert rbfid not in rbf_node.getrawmempool()
assert rbfid not in peer_node.getrawmempool()
oldwtx = rbf_node.gettransaction(rbfid)
assert len(oldwtx["walletconflicts"]) > 0
# check wallet transaction replaces and replaced_by values
bumpedwtx = rbf_node.gettransaction(bumped_tx["txid"])
assert_equal(oldwtx["replaced_by_txid"], bumped_tx["txid"])
assert_equal(bumpedwtx["replaces_txid"], rbfid)
self.clear_mempool()
def test_segwit_bumpfee_succeeds(self, rbf_node, dest_address):
self.log.info('Test that segwit-sourcing bumpfee works')
# Create a transaction with segwit output, then create an RBF transaction
# which spends it, and make sure bumpfee can be called on it.
segwit_in = next(u for u in rbf_node.listunspent() if u["amount"] == Decimal("0.001"))
segwit_out = rbf_node.getaddressinfo(rbf_node.getnewaddress(address_type='bech32'))
segwitid = send_to_witness(
use_p2wsh=False,
node=rbf_node,
utxo=segwit_in,
pubkey=segwit_out["pubkey"],
encode_p2sh=False,
amount=Decimal("0.0009"),
sign=True)
rbfraw = rbf_node.createrawtransaction([{
'txid': segwitid,
'vout': 0,
"sequence": BIP125_SEQUENCE_NUMBER
}], [{dest_address: Decimal("0.0005")},
{rbf_node.getrawchangeaddress(): Decimal("0.0003")},
{"fee": "0.0001"}])
rbfsigned = rbf_node.signrawtransactionwithwallet(rbfraw)
rbfid = rbf_node.sendrawtransaction(rbfsigned["hex"])
assert rbfid in rbf_node.getrawmempool()
bumped_tx = rbf_node.bumpfee(rbfid)
assert bumped_tx["txid"] in rbf_node.getrawmempool()
assert rbfid not in rbf_node.getrawmempool()
self.clear_mempool()
def test_nonrbf_bumpfee_fails(self, peer_node, dest_address):
self.log.info('Test that we cannot replace a non RBF transaction')
not_rbfid = peer_node.sendtoaddress(dest_address, Decimal("0.00090000"))
assert_raises_rpc_error(-4, "not BIP 125 replaceable", peer_node.bumpfee, not_rbfid)
self.clear_mempool()
def test_notmine_bumpfee_fails(self, rbf_node, peer_node, dest_address):
self.log.info('Test that it cannot bump fee if non-owned inputs are included')
# here, the rbftx has a peer_node coin and then adds a rbf_node input
# Note that this test depends upon the RPC code checking input ownership prior to change outputs
# (since it can't use fundrawtransaction, it lacks a proper change output)
fee = Decimal("0.001")
utxos = [node.listunspent(query_options={'minimumAmount': fee})[-1] for node in (rbf_node, peer_node)]
inputs = [{
"txid": utxo["txid"],
"vout": utxo["vout"],
"address": utxo["address"],
"sequence": BIP125_SEQUENCE_NUMBER
} for utxo in utxos]
output_val = sum(utxo["amount"] for utxo in utxos) - fee
rawtx = rbf_node.createrawtransaction(inputs, [{dest_address: output_val}, {"fee": fee}])
signedtx = rbf_node.signrawtransactionwithwallet(rawtx)
signedtx = peer_node.signrawtransactionwithwallet(signedtx["hex"])
rbfid = rbf_node.sendrawtransaction(signedtx["hex"])
assert_raises_rpc_error(-4, "Transaction contains inputs that don't belong to this wallet",
rbf_node.bumpfee, rbfid)
self.clear_mempool()
def test_bumpfee_with_descendant_fails(self, rbf_node, rbf_node_address, dest_address):
self.log.info('Test that fee cannot be bumped when it has descendant')
# parent is send-to-self, so we don't have to check which output is change when creating the child tx
parent_id = spend_one_input(rbf_node, rbf_node_address)
input_val = rbf_node.getrawtransaction(parent_id, 1)["vout"][0]["value"]
tx = rbf_node.createrawtransaction([{"txid": parent_id, "vout": 0}], [{dest_address: "0.0002"}, {"fee": input_val-Decimal("0.0002")}])
tx = rbf_node.signrawtransactionwithwallet(tx)
rbf_node.sendrawtransaction(tx["hex"])
assert_raises_rpc_error(-8, "Transaction has descendants in the wallet", rbf_node.bumpfee, parent_id)
self.clear_mempool()
def test_small_output_with_feerate_succeeds(self, rbf_node, dest_address):
self.log.info('Testing small output with feerate bump succeeds')
# Make sure additional inputs exist
rbf_node.generatetoaddress(101, rbf_node.getnewaddress())
rbfid = spend_one_input(rbf_node, dest_address)
input_list = rbf_node.getrawtransaction(rbfid, 1)["vin"]
assert_equal(len(input_list), 1)
original_txin = input_list[0]
self.log.info('Keep bumping until transaction fee out-spends non-destination value')
tx_fee = 0
while True:
input_list = rbf_node.getrawtransaction(rbfid, 1)["vin"]
new_item = list(input_list)[0]
assert_equal(len(input_list), 1)
assert_equal(original_txin["txid"], new_item["txid"])
assert_equal(original_txin["vout"], new_item["vout"])
rbfid_new_details = rbf_node.bumpfee(rbfid)
rbfid_new = rbfid_new_details["txid"]
raw_pool = rbf_node.getrawmempool()
assert rbfid not in raw_pool
assert rbfid_new in raw_pool
rbfid = rbfid_new
tx_fee = rbfid_new_details["fee"]
# Total value from input not going to destination
if tx_fee > Decimal('0.00050000'):
break
# input(s) have been added
final_input_list = rbf_node.getrawtransaction(rbfid, 1)["vin"]
assert_greater_than(len(final_input_list), 1)
# Original input is in final set
assert [txin for txin in final_input_list
if txin["txid"] == original_txin["txid"]
and txin["vout"] == original_txin["vout"]]
rbf_node.generatetoaddress(1, rbf_node.getnewaddress())
assert_equal(rbf_node.gettransaction(rbfid)["confirmations"], 1)
self.clear_mempool()
def test_dust_to_fee(self, rbf_node, dest_address):
self.log.info('Test that bumped output that is dust is dropped to fee')
rbfid = spend_one_input(rbf_node, dest_address)
fulltx = rbf_node.getrawtransaction(rbfid, 1)
# The DER formatting used by Bitcoin to serialize ECDSA signatures means that signatures can have a
# variable size of 70-72 bytes (or possibly even less), with most being 71 or 72 bytes. The signature
# in the witness is divided by 4 for the vsize, so this variance can take the weight across a 4-byte
# boundary
# ELEMENTS: 116 vbytes added (9 for fee spk+value, 99 for assets, 3 for value tags, 3 for null nonces, 2 for elements tx encoding)
# size of transaction (p2wpkh, 1 input, 3 outputs): 257 vbytes
if not 140 + 116 <= fulltx["vsize"] <= 141 + 116:
raise AssertionError("Invalid tx vsize of {} (256-257 expected), full tx: {}".format(fulltx["vsize"], fulltx))
# Bump with fee_rate of 350.25 sat/vB vbytes to create dust.
# ELEMENTS: Expected bump fee of 257 vbytes * fee_rate 0.00190000 BTC / 1000 vbytes = 0.00048830 BTC
# Expected fee is 141 vbytes * fee_rate 0.00350250 BTC / 1000 vbytes = 0.00049385 BTC.
# or occasionally 140 vbytes * fee_rate 0.00350250 BTC / 1000 vbytes = 0.00049035 BTC.
# Dust should be dropped to the fee, so actual bump fee is 0.00050000 BTC.
bumped_tx = rbf_node.bumpfee(rbfid, {"fee_rate": 190.00})
full_bumped_tx = rbf_node.getrawtransaction(bumped_tx["txid"], 1)
assert_equal(bumped_tx["fee"], Decimal("0.00050000"))
assert_equal(len(fulltx["vout"]), 3)
assert_equal(len(full_bumped_tx["vout"]), 2) #change output is eliminated
assert_equal(full_bumped_tx["vout"][0]['value'], Decimal("0.00050000"))
self.clear_mempool()
def test_settxfee(self, rbf_node, dest_address):
self.log.info('Test settxfee')
assert_raises_rpc_error(-8, "txfee cannot be less than min relay tx fee", rbf_node.settxfee, Decimal('0.000005'))
assert_raises_rpc_error(-8, "txfee cannot be less than wallet min fee", rbf_node.settxfee, Decimal('0.000015'))
# check that bumpfee reacts correctly to the use of settxfee (paytxfee)
rbfid = spend_one_input(rbf_node, dest_address)
requested_feerate = Decimal("0.00025000")
rbf_node.settxfee(requested_feerate)
bumped_tx = rbf_node.bumpfee(rbfid)
actual_feerate = bumped_tx["fee"] * 1000 / rbf_node.getrawtransaction(bumped_tx["txid"], True)["vsize"]
# Assert that the difference between the requested feerate and the actual
# feerate of the bumped transaction is small.
assert_greater_than(Decimal("0.00001000"), abs(requested_feerate - actual_feerate))
rbf_node.settxfee(Decimal("0.00000000")) # unset paytxfee
# check that settxfee respects -maxtxfee
self.restart_node(1, ['-maxtxfee=0.000025'] + self.extra_args[1])
assert_raises_rpc_error(-8, "txfee cannot be more than wallet max tx fee", rbf_node.settxfee, Decimal('0.00003'))
self.restart_node(1, self.extra_args[1])
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
self.connect_nodes(1, 0)
self.clear_mempool()
def test_maxtxfee_fails(self, rbf_node, dest_address):
self.log.info('Test that bumpfee fails when it hits -maxtxfee')
# size of bumped transaction (p2wpkh, 1 input, 2 outputs): 141 vbytes
# expected bump fee of 141 vbytes * 0.00200000 BTC / 1000 vbytes = 0.00002820 BTC
# which exceeds maxtxfee and is expected to raise
self.restart_node(1, ['-maxtxfee=0.000025'] + self.extra_args[1])
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
rbfid = spend_one_input(rbf_node, dest_address)
assert_raises_rpc_error(-4, "Unable to create transaction. Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)", rbf_node.bumpfee, rbfid)
self.restart_node(1, self.extra_args[1])
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
self.connect_nodes(1, 0)
self.clear_mempool()
def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
self.log.info('Test that PSBT is returned for bumpfee in watchonly wallets')
priv_rec_desc = "wpkh([00000001/84'/1'/0']tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0/*)#rweraev0"
pub_rec_desc = rbf_node.getdescriptorinfo(priv_rec_desc)["descriptor"]
priv_change_desc = "wpkh([00000001/84'/1'/0']tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/*)#j6uzqvuh"
pub_change_desc = rbf_node.getdescriptorinfo(priv_change_desc)["descriptor"]
# Create a wallet with private keys that can sign PSBTs
rbf_node.createwallet(wallet_name="signer", disable_private_keys=False, blank=True)
signer = rbf_node.get_wallet_rpc("signer")
assert signer.getwalletinfo()['private_keys_enabled']
reqs = [{
"desc": priv_rec_desc,
"timestamp": 0,
"range": [0,1],
"internal": False,
"keypool": False # Keys can only be imported to the keypool when private keys are disabled
},
{
"desc": priv_change_desc,
"timestamp": 0,
"range": [0, 0],
"internal": True,
"keypool": False
}]
if self.options.descriptors:
result = signer.importdescriptors(reqs)
else:
result = signer.importmulti(reqs)
assert_equal(result, [{'success': True}, {'success': True}])
# Create another wallet with just the public keys, which creates PSBTs
rbf_node.createwallet(wallet_name="watcher", disable_private_keys=True, blank=True)
watcher = rbf_node.get_wallet_rpc("watcher")
assert not watcher.getwalletinfo()['private_keys_enabled']
reqs = [{
"desc": pub_rec_desc,
"timestamp": 0,
"range": [0, 10],
"internal": False,
"keypool": True,
"watchonly": True,
"active": True,
}, {
"desc": pub_change_desc,
"timestamp": 0,
"range": [0, 10],
"internal": True,
"keypool": True,
"watchonly": True,
"active": True,
}]
if self.options.descriptors:
result = watcher.importdescriptors(reqs)
else:
result = watcher.importmulti(reqs)
assert_equal(result, [{'success': True}, {'success': True}])
funding_address1 = watcher.getnewaddress(address_type='bech32')
funding_address2 = watcher.getnewaddress(address_type='bech32')
# ELEMENTS: start with 50% more funds since our transaction will be 688 bytes vs 444 in Bitcoin
peer_node.sendmany("", {funding_address1: 0.0015, funding_address2: 0.0015})
peer_node.generate(1)
self.sync_all()
# Create single-input PSBT for transaction to be bumped
psbt = watcher.walletcreatefundedpsbt([], [{dest_address: 0.0005}], 0, {"feeRate": 0.00001}, True)['psbt']
psbt_signed = signer.walletprocesspsbt(psbt=psbt, sign=True, sighashtype="ALL", bip32derivs=True)
psbt_final = watcher.finalizepsbt(psbt_signed["psbt"])
original_txid = watcher.sendrawtransaction(psbt_final["hex"])
assert_equal(len(watcher.decodepsbt(psbt)["inputs"]), 1)
# Bump fee, obnoxiously high to add additional watchonly input
bumped_psbt = watcher.psbtbumpfee(original_txid, {"fee_rate": HIGH})
assert_greater_than(len(watcher.decodepsbt(bumped_psbt['psbt'])["inputs"]), 1)
assert "txid" not in bumped_psbt
assert_equal(bumped_psbt["origfee"], -watcher.gettransaction(original_txid)["fee"]['bitcoin'])
assert not watcher.finalizepsbt(bumped_psbt["psbt"])["complete"]
# Sign bumped transaction
bumped_psbt_signed = signer.walletprocesspsbt(psbt=bumped_psbt["psbt"], sign=True, sighashtype="ALL", bip32derivs=True)
bumped_psbt_final = watcher.finalizepsbt(bumped_psbt_signed["psbt"])
assert bumped_psbt_final["complete"]
# Broadcast bumped transaction
bumped_txid = watcher.sendrawtransaction(bumped_psbt_final["hex"])
assert bumped_txid in rbf_node.getrawmempool()
assert original_txid not in rbf_node.getrawmempool()
rbf_node.unloadwallet("watcher")
rbf_node.unloadwallet("signer")
self.clear_mempool()
def test_rebumping(self, rbf_node, dest_address):
self.log.info('Test that re-bumping the original tx fails, but bumping successor works')
rbfid = spend_one_input(rbf_node, dest_address)
bumped = rbf_node.bumpfee(rbfid, {"fee_rate": ECONOMICAL})
assert_raises_rpc_error(-4, "already bumped", rbf_node.bumpfee, rbfid, {"fee_rate": NORMAL})
rbf_node.bumpfee(bumped["txid"], {"fee_rate": NORMAL})
self.clear_mempool()
def test_rebumping_not_replaceable(self, rbf_node, dest_address):
self.log.info('Test that re-bumping non-replaceable fails')
rbfid = spend_one_input(rbf_node, dest_address)
bumped = rbf_node.bumpfee(rbfid, {"fee_rate": ECONOMICAL, "replaceable": False})
assert_raises_rpc_error(-4, "Transaction is not BIP 125 replaceable", rbf_node.bumpfee, bumped["txid"],
{"fee_rate": NORMAL})
self.clear_mempool()
def test_unconfirmed_not_spendable(self, rbf_node, rbf_node_address):
self.log.info('Test that unconfirmed outputs from bumped txns are not spendable')
rbfid = spend_one_input(rbf_node, rbf_node_address)
rbftx = rbf_node.gettransaction(rbfid)["hex"]
assert rbfid in rbf_node.getrawmempool()
bumpid = rbf_node.bumpfee(rbfid)["txid"]
assert bumpid in rbf_node.getrawmempool()
assert rbfid not in rbf_node.getrawmempool()
# check that outputs from the bump transaction are not spendable
# due to the replaces_txid check in CWallet::AvailableCoins
assert_equal([t for t in rbf_node.listunspent(minconf=0, include_unsafe=False) if t["txid"] == bumpid], [])
# submit a block with the rbf tx to clear the bump tx out of the mempool,
# then invalidate the block so the rbf tx will be put back in the mempool.
# This makes it possible to check whether the rbf tx outputs are
# spendable before the rbf tx is confirmed.
block = submit_block_with_tx(rbf_node, rbftx)
# Can not abandon conflicted tx
assert_raises_rpc_error(-5, 'Transaction not eligible for abandonment', lambda: rbf_node.abandontransaction(txid=bumpid))
rbf_node.invalidateblock(block.hash)
# Call abandon to make sure the wallet doesn't attempt to resubmit
# the bump tx and hope the wallet does not rebroadcast before we call.
rbf_node.abandontransaction(bumpid)
assert bumpid not in rbf_node.getrawmempool()
assert rbfid in rbf_node.getrawmempool()
# check that outputs from the rbf tx are not spendable before the
# transaction is confirmed, due to the replaced_by_txid check in
# CWallet::AvailableCoins
assert_equal([t for t in rbf_node.listunspent(minconf=0, include_unsafe=False) if t["txid"] == rbfid], [])
# check that the main output from the rbf tx is spendable after confirmed
rbf_node.generate(1)
rbf_node_address_unconfidential = rbf_node.getaddressinfo(rbf_node_address)["unconfidential"]
assert_equal(
sum(1 for t in rbf_node.listunspent(minconf=0, include_unsafe=False)
if t["txid"] == rbfid and t["address"] == rbf_node_address_unconfidential and t["spendable"]), 1)
self.clear_mempool()
def test_bumpfee_metadata(self, rbf_node, dest_address):
self.log.info('Test that bumped txn metadata persists to new txn record')
assert(rbf_node.getbalance()["bitcoin"] < 49)
rbf_node.generatetoaddress(101, rbf_node.getnewaddress())
rbfid = rbf_node.sendtoaddress(dest_address, 49, "comment value", "to value")
bumped_tx = rbf_node.bumpfee(rbfid)
bumped_wtx = rbf_node.gettransaction(bumped_tx["txid"])
assert_equal(bumped_wtx["comment"], "comment value")
assert_equal(bumped_wtx["to"], "to value")
self.clear_mempool()
def test_locked_wallet_fails(self, rbf_node, dest_address):
self.log.info('Test that locked wallet cannot bump txn')
rbfid = spend_one_input(rbf_node, dest_address)
rbf_node.walletlock()
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first.",
rbf_node.bumpfee, rbfid)
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
self.clear_mempool()
def test_change_script_match(self, rbf_node, dest_address):
self.log.info('Test that the same change addresses is used for the replacement transaction when possible')
def get_change_address(tx):
tx_details = rbf_node.getrawtransaction(tx, 1)
txout_addresses = [txout['scriptPubKey']['addresses'][0] for txout in tx_details["vout"] if txout['scriptPubKey']['type'] != 'fee']
return [address for address in txout_addresses if rbf_node.getaddressinfo(address)["ischange"]]
# Check that there is only one change output
rbfid = spend_one_input(rbf_node, dest_address)
change_addresses = get_change_address(rbfid)
assert_equal(len(change_addresses), 1)
# Now find that address in each subsequent tx, and no other change
bumped_total_tx = rbf_node.bumpfee(rbfid, {"fee_rate": ECONOMICAL})
assert_equal(change_addresses, get_change_address(bumped_total_tx['txid']))
bumped_rate_tx = rbf_node.bumpfee(bumped_total_tx["txid"])
assert_equal(change_addresses, get_change_address(bumped_rate_tx['txid']))
self.clear_mempool()
def spend_one_input(node, dest_address, change_size=Decimal("0.00049000")):
tx_input = dict(
sequence=BIP125_SEQUENCE_NUMBER, **next(u for u in node.listunspent() if u["amount"] == Decimal("0.00100000")))
destinations = [{dest_address: Decimal("0.00050000")}]
if change_size > 0:
destinations.append({node.getrawchangeaddress(): change_size})
destinations.append({"fee": Decimal("0.00001")})
rawtx = node.createrawtransaction([tx_input], destinations)
signedtx = node.signrawtransactionwithwallet(rawtx)
txid = node.sendrawtransaction(signedtx["hex"])
return txid
def submit_block_with_tx(node, tx):
ctx = CTransaction()
ctx.deserialize(io.BytesIO(hex_str_to_bytes(tx)))
tip = node.getbestblockhash()
height = node.getblockcount() + 1
block_time = node.getblockheader(tip)["mediantime"] + 1
block = create_block(int(tip, 16), create_coinbase(height), block_time)
block.vtx.append(ctx)
block.rehash()
block.hashMerkleRoot = block.calc_merkle_root()
add_witness_commitment(block)
block.solve()
node.submitblock(block.serialize().hex())
return block
def test_no_more_inputs_fails(self, rbf_node, dest_address):
self.log.info('Test that bumpfee fails when there are no available confirmed outputs')
# feerate rbf requires confirmed outputs when change output doesn't exist or is insufficient
rbf_node.generatetoaddress(1, dest_address)
# spend all funds, no change output
rbfid = rbf_node.sendtoaddress(rbf_node.getnewaddress(), rbf_node.getbalance()['bitcoin'], "", "", True)
assert_raises_rpc_error(-4, "Unable to create transaction. Insufficient funds", rbf_node.bumpfee, rbfid)
self.clear_mempool()
if __name__ == "__main__":
BumpFeeTest().main()
|
ElementsProject/elements
|
test/functional/wallet_bumpfee.py
|
Python
|
mit
| 29,549
|
#!/usr/bin/env python3
from ..action import Action
class ConsoleDeployer:
def __init__(self, logger):
self.logger = logger
def read(self, relative):
raise Exception('can\'t read from console deployer')
def send(self, work, actions):
for action in actions:
if action.type == action.ADD:
prefix = '((lime))+'
elif action.type == action.DEL:
prefix = '((blue))-'
elif action.type != action.NOP:
prefix = '((red))!'
else:
continue
self.logger.info(prefix + '((reset)) ' + action.path)
return True
|
r3c/Creep
|
creep/src/deployers/console.py
|
Python
|
mit
| 668
|
from datetime import datetime
from twitter_update import api # , get_server_availability
from log import log_twitter
if __name__ == "__main__":
server_statuses = api.GetHomeTimeline(exclude_replies=True, since_id=753756834154938368)
for s in server_statuses:
log_twitter.info(s)
server_statuses.sort(key=lambda status: status.id)
log_twitter.info('Sorted')
for s in server_statuses:
log_twitter.info(s)
print('%s _do_refresh with %s new status' % (datetime.now().strftime("[%Y/%m/%d] [%I:%M%p]"), len(server_statuses)))
"""
statuses = api.GetHomeTimeline(exclude_replies=True, count=200)
print('Received %s statuses' % len(statuses))
for s in statuses:
avail = get_server_availability(s.text)
print('{0:7} - {1}'.format(avail.name, s.text))
"""
|
fp12/sfv-bot
|
src/twitter_servers_availability.py
|
Python
|
mit
| 828
|
import sys
N = int(input())
A = list(map(int, sys.stdin.readline().split(' ')))
cnt = 0
for a in A:
cnt += 1 if a & 1 == 1 else 0
print('YES' if cnt % 2 == 0 else 'NO')
|
nel215/atcoder-grand-contest
|
agc010/a.py
|
Python
|
mit
| 175
|
from hatch.template import File
from hatch.utils.fs import Path
from hatchling.__about__ import __version__
from hatchling.metadata.utils import DEFAULT_METADATA_VERSION
from ..new.licenses_multiple import get_files as get_template_files
from .utils import update_record_file_contents
def get_files(**kwargs):
metadata_directory = kwargs.get('metadata_directory', '')
files = []
for f in get_template_files(**kwargs):
first_part = f.path.parts[0]
if first_part == 'LICENSES':
files.append(File(Path(metadata_directory, 'license_files', 'LICENSES', f.path.parts[1]), f.contents))
if first_part != kwargs['package_name']:
continue
files.append(f)
files.append(File(Path(metadata_directory, 'entry_points.txt'), ''))
files.append(
File(
Path(metadata_directory, 'WHEEL'),
f"""\
Wheel-Version: 1.0
Generator: hatch {__version__}
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any
""",
)
)
files.append(
File(
Path(metadata_directory, 'METADATA'),
f"""\
Metadata-Version: {DEFAULT_METADATA_VERSION}
Name: {kwargs['project_name_normalized']}
Version: 0.0.1
""",
)
)
record_file = File(Path(metadata_directory, 'RECORD'), '')
update_record_file_contents(record_file, files)
files.append(record_file)
return files
|
ofek/hatch
|
tests/helpers/templates/wheel/standard_default_license_multiple.py
|
Python
|
mit
| 1,407
|
class Book(object):
def __init__(self, isbn, title, description=None, category=None):
self.isbn = isbn
self.title = title
self.description = description
self.score = 0
self.readers = []
self.category = category
def assign_rating(self, score):
self.score = score
def add_reader(self, user):
self.readers.append(user)
class User(object):
def __init__(self, name):
self.username = name
self.score = 0
self.books_read = []
self.users_followed = []
def get_book_by_isbn(self, isbn):
for book in self.books_read:
if book.isbn == isbn:
return book
return None
def follows(self, user):
self.users_followed.append(user)
def reads(self, book):
assert isinstance(book, Book)
self.books_read.append(book)
book.add_reader(self)
def increment_score(self, score):
self.score = self.score + score
|
gabalese/suggest-me-a-book
|
models/models.py
|
Python
|
mit
| 1,001
|
"""
Django settings for aniauth project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
SETTINGS_ROOT = os.path.abspath(os.path.dirname(__file__))
PROJECT_APP_ROOT = os.path.abspath(os.path.dirname(SETTINGS_ROOT))
PROJECT_ROOT = os.path.abspath(os.path.dirname(PROJECT_APP_ROOT))
PUBLIC_ROOT = os.path.abspath(os.path.join(PROJECT_ROOT, 'public'))
# Production Settings
# Core
ADMINS = (
('root', 'root@localhost'),
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(PROJECT_ROOT, 'templates')
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
# Insert your TEMPLATE_CONTEXT_PROCESSORS here or use this
# list if you haven't customized them:
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# Auth
AUTH_USER_MODEL = 'accounts.User'
LOGIN_REDIRECT_URL = 'profile'
LOGIN_URL = 'login'
ACTIVATE_URL = 'activate'
ACTIVATION_KEY_TIMEOUT_DAYS = 2
# Crispy Forms
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# SECURITY WARNING: keep the secret key used in production secret!
with open(os.path.join(SETTINGS_ROOT, 'secret_key')) as f:
SECRET_KEY = f.read().strip()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# Application definition
PREREQ_APPS = [
'crispy_forms',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
PROJECT_APPS = [
'accounts',
]
INSTALLED_APPS = PREREQ_APPS + PROJECT_APPS
MIDDLEWARE_CLASSES = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'aniauth.urls'
WSGI_APPLICATION = 'aniauth.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_ROOT, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-gb'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(PUBLIC_ROOT, 'static')
MEDIA_ROOT = os.path.join(PUBLIC_ROOT, 'media')
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
|
randomic/antinub-auth
|
aniauth/settings/base.py
|
Python
|
mit
| 3,664
|
#!/usr/bin/env python
# compiler.py
# Dynamo
#
# Created by John Holdsworth on 18/06/2015.
# Copyright (c) 2015 John Holdsworth. All rights reserved.
import os
import re
productName = os.getenv( "PRODUCT_NAME" )
imports = ""
props = ""
code = {}
match = 0
file = open( productName+".shtml", "r" )
stml = file.read()
def replacer(m):
global imports, props, match, code
out = "";
content = m.group(2).rstrip()
if m.group(1) == '@':
imports += content
elif m.group(1) == '!':
props += content
elif m.group(1) == '=':
out = "__%d__" % match
code[out] = "\"\nresponse += %s\nresponse += \"" % content
match = match + 1
else:
out = "__%d__" % match
code[out] = "\"\n%s\nresponse += \"" % content
match = match + 1
return out;
stml = re.sub( r"<%(@|!|=|)(.*?)%>\n?", replacer, stml, 0, re.DOTALL )
stml = re.sub( r"(\"|\\(?!\())", r"\\\1", stml )
stml = re.sub( r"\r", r"\\r", stml )
stml = re.sub( r"\n", r"\\n", stml )
for key in code:
stml = re.sub( key, code[key], stml )
file = open( productName+".swift", "w" )
file.write( '''//
// compiled from %s.shtml
//
import Foundation
#if os(OSX)
import Dynamo
#endif
%s
@objc (%sSwiftlet)
public class %sSwiftlet: SessionApplication {
%s
override public func processRequest( out: DynamoHTTPConnection, pathInfo: String, parameters: [String:String], cookies: [String:String] ) {
var response = ""
response += "%s"
out.response( response )
}
}
''' % (productName, imports, productName, productName, props, stml) )
|
TribeMedia/Dynamo
|
Utilities/sspcompiler.py
|
Python
|
mit
| 1,607
|
# pylint: disable=E1101
"""
BaseSink.py
"""
from cps2zmq.gather import BaseSink
# Filters/Handlers/Loggers would do bulk of work here
class LogSink(BaseSink):
def handle_pub(self, msg):
"""
Figure out extent to which this'll be overridden.
"""
self.msgs_recv += 1
self._logger.info('Received message %s', msg)
topic = msg.pop(0).decode('utf-8')
log = msg.pop().decode('utf-8')
self._logger.info('topic %s', topic)
self._logger.info('message %s', log)
topic_split = self.handle_topic(topic)
print('topic_split', topic_split)
self.handle_log(log)
def handle_topic(self, topic):
return topic.split('.')
def handle_log(self, log):
print('handle_log', log)
if __name__ == '__main__':
sink = LogSink("1", "tcp://127.0.0.1:5557", "tcp://127.0.0.1:5558", ['MameWorker'], log_to_file=True)
sink.start()
sink.report()
sink.close()
|
goosechooser/cps2-zmq
|
cps2zmq/gather/LogSink.py
|
Python
|
mit
| 969
|
# backwards!
from .upload_inline import UploadInlineMixin # noqa
|
rouxcode/django-filer-addons
|
filer_addons/filer_gui/admin/inlines.py
|
Python
|
mit
| 67
|
import pytari2600.cpu.instructions as instructions
import pytari2600.cpu.addressing as addressing
import pytari2600.cpu.pc_state as pc_state
import unittest
class DummyClocks(object):
def __init__(self):
self.system_clock = 0
class DummyMemory(object):
def __init__(self):
self.dummy_read = 8
self.dummy_read16 = 16
self.dummy_sp_write = 19
self.dummy_read_sp = 23
def read(self, address):
return self.dummy_read
def read16(self, address):
return self.dummy_read16
def readSp(self, address):
return self.dummy_read_sp
def write(self, address, data):
self.dummy_write = data
def writeSp(self, address, data):
self.dummy_write_sp = data
class TestInstructions(unittest.TestCase):
def test_reading(self):
current_pc_state = pc_state.PC_State()
memory = DummyMemory()
memory.dummy_read = 2
reading = instructions.Reading(current_pc_state, memory)
self.assertEqual(reading.get_reading_time(), 2*current_pc_state.CYCLES_TO_CLOCK)
self.assertEqual(reading.read(1), 2)
memory.dummy_read = 4
reading = instructions.NullReading(current_pc_state, memory)
self.assertEqual(reading.get_reading_time(), 1*current_pc_state.CYCLES_TO_CLOCK)
self.assertEqual(reading.read(1), 0) # Null read does nothing
current_pc_state.A = 8
reading = instructions.AccumulatorReading(current_pc_state, memory)
self.assertEqual(reading.get_reading_time(), 1*current_pc_state.CYCLES_TO_CLOCK)
self.assertEqual(reading.read(1), 8)
def test_writing(self):
current_pc_state = pc_state.PC_State()
memory = DummyMemory()
writing = instructions.Writing(current_pc_state, memory)
writing.write(0,20)
self.assertEqual(writing.get_writing_time(), 2*current_pc_state.CYCLES_TO_CLOCK)
self.assertEqual(memory.dummy_write, 20)
writing = instructions.NullWriting(current_pc_state, memory)
writing.write(0,21)
self.assertEqual(writing.get_writing_time(), 0*current_pc_state.CYCLES_TO_CLOCK)
self.assertEqual(memory.dummy_write, 20) # Null write doesn't do anything
writing = instructions.AccumulatorWriting(current_pc_state, memory)
writing.write(0,22)
self.assertEqual(writing.get_writing_time(), 1*current_pc_state.CYCLES_TO_CLOCK)
self.assertEqual(current_pc_state.A, 22)
writing = instructions.RegWriting(current_pc_state, memory)
writing.write(0,23)
self.assertEqual(writing.get_writing_time(), 1*current_pc_state.CYCLES_TO_CLOCK)
self.assertEqual(memory.dummy_write, 23)
class TestInstructionExec(unittest.TestCase):
def test_simple_exec(self):
current_pc_state = pc_state.PC_State()
instruction_exec = instructions.InstructionExec(current_pc_state)
data = 7
instruction_exec.NOP_exec(data)
instruction_exec.OR_exec(data)
instruction_exec.ASL_exec(data)
instruction_exec.AND_exec(data)
instruction_exec.CLC_exec(data)
instruction_exec.CLD_exec(data)
instruction_exec.CLI_exec(data)
instruction_exec.CLV_exec(data)
instruction_exec.SEC_exec(data)
instruction_exec.SED_exec(data)
instruction_exec.SEI_exec(data)
instruction_exec.BIT_exec(data)
instruction_exec.ROL_exec(data)
instruction_exec.EOR_exec(data)
instruction_exec.LSR_exec(data)
instruction_exec.ROR_exec(data)
instruction_exec.LDY_exec(data)
instruction_exec.LDA_exec(data)
instruction_exec.LDX_exec(data)
instruction_exec.CMP_exec(data)
instruction_exec.CPX_exec(data)
instruction_exec.CPY_exec(data)
instruction_exec.ADC_exec(data)
instruction_exec.SBC_exec(data)
instruction_exec.INC_exec(data)
instruction_exec.TNoStatus_exec(data)
instruction_exec.TStatus_exec(data)
instruction_exec.STA_exec(data)
instruction_exec.STY_exec(data)
instruction_exec.STX_exec(data)
instruction_exec.SAX_exec(data)
instruction_exec.DEC_exec(data)
instruction_exec.DCP_exec(data)
instruction_exec.SLO_exec(data)
instruction_exec.set_status_NZ(data)
(a, b, c) = (1, 2, 3)
instruction_exec.subc(a, b, c)
instruction_exec.cmp(a, b)
class TestInstructions(unittest.TestCase):
def test_execute(self):
clocks = DummyClocks()
clocks.system_clocks = 10000
current_pc_state = pc_state.PC_State()
memory = DummyMemory()
instruction_exec = instructions.InstructionExec(current_pc_state).OR_exec
read = instructions.Reading(current_pc_state, memory)
write = instructions.Writing(current_pc_state, memory)
address = addressing.AddressIZX(current_pc_state, memory)
instruction = []
instruction.append(instructions.Instruction(clocks, current_pc_state, instruction_exec))
instruction.append(instructions.ReadWriteInstruction(clocks, current_pc_state, address, read, write, instruction_exec))
instruction.append(instructions.BreakInstruction(clocks, current_pc_state, memory, instruction_exec))
instruction.append(instructions.JumpSubRoutineInstruction(clocks, current_pc_state, memory, instruction_exec))
instruction.append(instructions.ReturnFromSubRoutineInstruction(clocks, current_pc_state, memory, instruction_exec))
instruction.append(instructions.ReturnFromInterrupt(clocks, current_pc_state, memory, instruction_exec))
instruction.append(instructions.BranchInstruction(clocks, current_pc_state, memory, 0x80, 0x80, instruction_exec))
instruction.append(instructions.SingleByteInstruction(clocks, current_pc_state, current_pc_state.A, current_pc_state.A, instruction_exec))
instruction.append(instructions.JumpInstruction(clocks, current_pc_state, address, instruction_exec))
instruction.append(instructions.PHPInstruction(clocks, current_pc_state, memory, instruction_exec))
instruction.append(instructions.PLPInstruction(clocks, current_pc_state, memory, instruction_exec))
instruction.append(instructions.PHAInstruction(clocks, current_pc_state, memory, instruction_exec))
instruction.append(instructions.PLAInstruction(clocks, current_pc_state, memory, instruction_exec))
for ins in instruction:
ins.execute()
if __name__ == '__main__':
unittest.main()
|
ajgrah2000/pytari2600
|
pytari2600/test/test_instructions.py
|
Python
|
mit
| 6,597
|
"""
=================
Confidence Levels
=================
When setting the sigma levels for ChainConsumer, we need to be careful
if we are talking about 1D or 2D Gaussians. For 1D Gaussians, 1 and 2 :math:`\sigma` correspond
to 68% and 95% confidence levels. However, for a a 2D Gaussian, integrating over 1 and 2 :math:`\sigma`
levels gives 39% and 86% confidence levels.
By default ChainConsumer uses the 2D levels, such that the contours will line up and agree with the
marginalised distributions shown above them, however you can also choose to switch to using the 1D
Gaussian method, such that the contour encloses 68% and 95% confidence regions, by switching `sigma2d` to `False`
"""
import numpy as np
from numpy.random import multivariate_normal
from chainconsumer import ChainConsumer
np.random.seed(0)
data = multivariate_normal([0, 0], [[1, 0], [0, 1]], size=1000000)
c = ChainConsumer().add_chain(data, parameters=["$x$", "$y$"])
c.configure(flip=False, sigma2d=False, sigmas=[1, 2]) # The default case, so you don't need to specify sigma2d
fig = c.plotter.plot()
fig.set_size_inches(3 + fig.get_size_inches()) # Resize fig for doco. You don't need this.
###############################################################################
# Demonstrating the 1D Gaussian confidence levels. Notice the change in contour size
# The contours shown below now show the 68% and 95% confidence regions.
c = ChainConsumer().add_chain(data, parameters=["$x$", "$y$"])
c.configure(flip=False, sigma2d=True, sigmas=[1, 2])
fig = c.plotter.plot()# -*- coding: utf-8 -*-
fig.set_size_inches(3 + fig.get_size_inches()) # Resize fig for doco. You don't need this.
|
Samreay/ChainConsumer
|
examples/customisations/plot_confidence_levels.py
|
Python
|
mit
| 1,673
|
import pytest
import mock
from team.server import app
def is_html(r):
assert r.status_code == 200
assert r.headers['Content-type'] == 'text/html; charset=utf-8'
def test_get_home():
client = app.test_client()
r = client.get('/')
is_html(r)
def test_get_cards():
client = app.test_client()
r = client.get('/cards')
is_html(r)
assert '<div class="page">' in r.data
def test_get_whereabouts_date():
client = app.test_client()
r = client.get('/whereabouts/2014-07-17')
is_html(r)
assert '<h1>Team whereabouts for 2014-07-17' in r.data
def test_get_whereabouts_today():
client = app.test_client()
r = client.get('/whereabouts')
is_html(r)
assert '<h1>Team whereabouts for 20' in r.data
@mock.patch('team.server.photos.get', return_value=("IMAGE", "image/jpeg"))
def test_get_photos(mock_get):
client = app.test_client()
r = client.get('/photo/nickname')
mock_get.assert_called_with('nickname')
assert r.data == "IMAGE"
assert r.content_type == "image/jpeg"
|
LandRegistry/team-dashboard
|
tests/test_server.py
|
Python
|
mit
| 1,048
|
# -*- coding: utf-8 -*-
"""
Django settings for tempo project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from __future__ import absolute_import, unicode_literals
import environ
ROOT_DIR = environ.Path(__file__) - 3 # (tempo/config/settings/base.py - 3 = tempo/)
APPS_DIR = ROOT_DIR.path('tempo')
# Load operating system environment variables and then prepare to use them
env = environ.Env()
# .env file, should load only in development environment
READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False)
if READ_DOT_ENV_FILE:
# Operating System Environment variables have precedence over variables defined in the .env file,
# that is to say variables from the .env files will only be used if not defined
# as environment variables.
env_file = str(ROOT_DIR.path('.env'))
print('Loading : {}'.format(env_file))
env.read_env(env_file)
print('The .env file has been loaded. See base.py for more information')
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = [
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
]
THIRD_PARTY_APPS = [
'crispy_forms', # Form layouts
'crispy_forms_semantic_ui',
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
'taggit',
'rest_framework',
'sekizai',
]
# Apps specific for this project go here.
LOCAL_APPS = [
# custom users app
'tempo.users.apps.UsersConfig',
# Your stuff: custom apps go here
'tempo.common',
'tempo.events',
]
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'tempo.users.middleware.UserTimezoneMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
# MIGRATIONS CONFIGURATION
# ------------------------------------------------------------------------------
MIGRATION_MODULES = {
'sites': 'tempo.contrib.sites.migrations'
}
# DEBUG
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool('DJANGO_DEBUG', False)
# FIXTURE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL CONFIGURATION
# ------------------------------------------------------------------------------
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# MANAGER CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = [
("""Eliot Berriot""", 'contact@eliotberriot.com'),
]
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': env.db('DATABASE_URL', default='postgres:///tempo'),
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# GENERAL CONFIGURATION
# ------------------------------------------------------------------------------
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = env('TIME_ZONE', default='UTC')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'sekizai.context_processors.sekizai',
# Your stuff: custom template context processors go here
'tempo.events.context_processors.models',
'tempo.events.context_processors.timezone',
'tempo.events.context_processors.raven',
],
'builtins': [
'django.contrib.staticfiles.templatetags.staticfiles',
'django.templatetags.i18n',
],
},
},
]
# See: http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap4'
# STATIC FILE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = [
str(APPS_DIR.path('static')),
]
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
# MEDIA CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(APPS_DIR('media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# URL Configuration
# ------------------------------------------------------------------------------
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# PASSWORD VALIDATION
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
# ------------------------------------------------------------------------------
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# AUTHENTICATION CONFIGURATION
# ------------------------------------------------------------------------------
AUTHENTICATION_BACKENDS = [
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
]
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = 'username'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
ACCOUNT_ALLOW_REGISTRATION = env.bool('DJANGO_ACCOUNT_ALLOW_REGISTRATION', True)
ACCOUNT_ADAPTER = 'tempo.users.adapters.AccountAdapter'
SOCIALACCOUNT_ADAPTER = 'tempo.users.adapters.SocialAccountAdapter'
# Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = 'users.User'
LOGIN_REDIRECT_URL = 'users:redirect'
LOGIN_URL = 'account_login'
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify'
# django-compressor
# ------------------------------------------------------------------------------
INSTALLED_APPS += ['compressor']
STATICFILES_FINDERS += ['compressor.finders.CompressorFinder']
# Location of root django.contrib.admin URL, use {% url 'admin:index' %}
ADMIN_URL = r'^admin/'
# Your common stuff: Below this line define 3rd party library settings
# ------------------------------------------------------------------------------
CRISPY_TEMPLATE_PACK = 'semantic-ui'
CRISPY_ALLOWED_TEMPLATE_PACKS = ['semantic-ui']
TAGGIT_CASE_INSENSITIVE = True
LOGIN_REDIRECT_URL = 'events:timeline'
RAVEN_JS_DSN = env('RAVEN_JS_DSN', default=None)
|
EliotBerriot/tempo
|
config/settings/base.py
|
Python
|
mit
| 10,605
|
from sikuli import *
import Constants
reload(Constants)
class MainMenu(object):
states = [
'Start',
'CollectStart',
'MailCollect',
'ChallengesCollect',
'ChallengesCollect',
'End'
]
def __init__(self, botInfo):
self.botInfo = botInfo;
self.state = 'Start'
def run(self):
print self.state
if self.state == 'Start':
if self.botInfo.leveledUp:
self.state = 'CollectStart'
else:
self.state = 'End'
elif self.state == 'CollectStart':
if exists("mail_found.png", .25):
click("mail_found.png")
self.state = 'MailCollect'
elif exists("challenge_found.png", .25):
self.state = 'ChallengesCollect'
click("challenge_found.png")
sleep(.5)
click(Constants.DETAILS)
else:
self.state = 'End'
elif self.state == 'MailCollect':
if exists("receive.png", .1):
click(Constants.RECEIVE_ALL)
sleep(.25)
click(Constants.CONFIRM)
sleep(.25)
click(Constants.CLOSE)
elif exists("no_more_mail.png", .1):
self.state = 'CollectStart'
Constants.MENU_BAR.click(Constants.HOME_CHRISTMAS)
sleep(.25)
elif self.state == 'ChallengesCollect':
click(Constants.RECEIVE_ALL)
sleep(.25)
click(Constants.CLOSE)
self.state = 'ChallengesPage2'
elif self.state == 'ChallengesPage2':
if exists("page_2.png", .1):
click("page_2.png")
sleep(.25)
self.state = 'ChallengesCollect'
else:
self.state = 'End'
Constants.MENU_BAR.click(Constants.HOME_CHRISTMAS)
elif self.state == 'End':
self.state = 'Start'
self.botInfo.leveledUp = False
self.botInfo.state = 'Quest'
self.botInfo.questMenu.state = 'Start'
def review_gold_card(self):
pass
def copy_deck(self):
click(Constants.MENU)
click("edit_deck.png")
wait("copy_deck.png")
click("copy_deck.png")
click(Constants.CONFIRM)
wait("copy_deck.png")
click(Constants.HOME_CHRISTMAS)
|
Stephen-Zhang/Ishtaria-Sikuli
|
MainMenu.sikuli/MainMenu.py
|
Python
|
mit
| 2,447
|
#!/usr/bin/env python3
# Weather Summary
import argparse
import datetime
import fnmatch
import os
import piexif
from libxmp.utils import file_to_dict
from lib import srtm
parser = argparse.ArgumentParser(description="Lookup a weather report for the location/time an image was captured.")
parser.add_argument("project", help="geotagged image")
args = parser.parse_args()
def dms_to_decimal(degrees, minutes, seconds, sign=' '):
"""Convert degrees, minutes, seconds into decimal degrees.
>>> dms_to_decimal(10, 10, 10)
10.169444444444444
>>> dms_to_decimal(8, 9, 10, 'S')
-8.152777777777779
"""
return (-1 if sign[0] in 'SWsw' else 1) * (
float(degrees[0] / degrees[1]) +
float(minutes[0] / minutes[1]) / 60 +
float(seconds[0] / seconds[1]) / 3600
)
from os.path import expanduser
home = expanduser("~")
keyfile = os.path.join(home, '.forecastio')
if not os.path.isfile(keyfile):
print("you must sign up for a free apikey at forecast.io and insert it as a single line inside a file called ~/.forecastio (with no other text in the file)")
quit()
fio = open(home + '/.forecastio')
apikey = fio.read().rstrip()
fio.close()
if not len(apikey):
print("Cannot lookup weather because no forecastio apikey found.")
quit()
files = []
for file in sorted(os.listdir(args.project)):
if fnmatch.fnmatch(file, '*.jpg') or fnmatch.fnmatch(file, '*.JPG'):
files.append(file)
def get_image_info(file):
full_path = os.path.join(args.project, file)
exif_dict = piexif.load(full_path)
elat = exif_dict['GPS'][piexif.GPSIFD.GPSLatitude]
lat = dms_to_decimal(elat[0], elat[1], elat[2],
exif_dict['GPS'][piexif.GPSIFD.GPSLatitudeRef].decode('utf-8'))
elon = exif_dict['GPS'][piexif.GPSIFD.GPSLongitude]
lon = dms_to_decimal(elon[0], elon[1], elon[2],
exif_dict['GPS'][piexif.GPSIFD.GPSLongitudeRef].decode('utf-8'))
#print(lon)
ealt = exif_dict['GPS'][piexif.GPSIFD.GPSAltitude]
alt = ealt[0] / ealt[1]
#exif_dict[GPS + 'MapDatum'])
#print('lon ref', exif_dict['GPS'][piexif.GPSIFD.GPSLongitudeRef])
# print exif.exif_keys
if piexif.ImageIFD.DateTime in exif_dict['0th']:
strdate, strtime = exif_dict['0th'][piexif.ImageIFD.DateTime].decode('utf-8').split()
print(strdate, strtime)
year, month, day = strdate.split(':')
hour, minute, second = strtime.split(':')
#d = datetime.date(int(year), int(month), int(day))
#t = datetime.time(int(hour), int(minute), int(second))
#dt = datetime.datetime.combine(d, t)
dt = datetime.datetime(int(year), int(month), int(day),
int(hour), int(minute), int(second))
unix_sec = float(dt.strftime('%s'))
return lat, lon, alt, unix_sec
if len(files) == 0:
print("No image files found at:", args.project)
quit()
lat1, lon1, alt1, unix_sec1 = get_image_info(files[0]) # first
if abs(lat1) < 0.01 and abs(lon1) < 0.01:
print("first image in list geotag fail")
print("sorry, probably you should just remove the image manually ...")
print("and then start everything over from scratch.")
quit()
lat2, lon2, alt2, unix_sec2 = get_image_info(files[-1]) # last
lat = (lat1 + lat2) * 0.5
lon = (lon1 + lon2) * 0.5
unix_sec = int((unix_sec1 + unix_sec2) * 0.5)
ref = [ lat1, lon1, 0.0 ]
print("NED reference location:", ref)
# local surface approximation
srtm.initialize( ref, 6000, 6000, 30)
surface = srtm.ned_interp([0, 0])
print("SRTM surface elevation below first image: %.1fm %.1fft (egm96)" %
(surface, surface / 0.3048) )
print("start pos, time:", lat1, lon1, alt1, unix_sec1)
print("midpoint: ", lat, lon, unix_sec)
print("end pos, time:", lat2, lon2, alt2, unix_sec2)
print("flight duration (not including landing maneuver): %.1f min" %
((unix_sec2 - unix_sec1) / 60.0) )
# lookup the data for the midpoint of the flight (just because ... ?)
if unix_sec < 1:
print("Cannot lookup weather because gps didn't report unix time.")
else:
print("## Weather")
d = datetime.datetime.utcfromtimestamp(unix_sec)
print(d.strftime("%Y-%m-%d-%H:%M:%S"))
url = 'https://api.darksky.net/forecast/' + apikey + '/%.8f,%.8f,%.d' % (lat, lon, unix_sec)
import urllib.request, json
response = urllib.request.urlopen(url)
wx = json.loads(response.read())
mph2kt = 0.868976
mb2inhg = 0.0295299830714
if 'currently' in wx:
currently = wx['currently']
#for key in currently:
# print key, ':', currently[key]
if 'icon' in currently:
icon = currently['icon']
print("- Conditions: " + icon)
if 'temperature' in currently:
tempF = currently['temperature']
tempC = (tempF - 32.0) * 5 / 9
print("- Temperature: %.1f F" % tempF + " (%.1f C)" % tempC)
if 'dewPoint' in currently:
dewF = currently['dewPoint']
dewC = (dewF - 32.0) * 5 / 9
print("- Dewpoint: %.1f F" % dewF + " (%.1f C)" % dewC)
if 'humidity' in currently:
hum = currently['humidity']
print("- Humidity: %.0f%%" % (hum * 100.0))
if 'pressure' in currently:
mbar = currently['pressure']
inhg = mbar * mb2inhg
else:
mbar = 0
inhg = 11.11
print("- Pressure: %.2f inhg" % inhg + " (%.1f mbar)" % mbar)
if 'windSpeed' in currently:
wind_mph = currently['windSpeed']
wind_kts = wind_mph * mph2kt
else:
wind_mph = 0
wind_kts = 0
if 'windBearing' in currently:
wind_deg = currently['windBearing']
else:
wind_deg = 0
print("- Wind %d deg @ %.1f kt (%.1f mph)" % (wind_deg, wind_kts, wind_mph) + "\n")
if 'visibility' in currently:
vis = currently['visibility']
print("- Visibility: %.1f miles" % vis)
if 'cloudCover' in currently:
cov = currently['cloudCover']
print("- Cloud Cover: %.0f%%" % (cov * 100.0))
print("- METAR: KXYZ " + d.strftime("%d%H%M") + "Z" +
" %03d%02dKT" % (round(wind_deg/10)*10, round(wind_kts)) +
" " + ("%.1f" % vis).rstrip('0').rstrip(".") + "SM" +
" " + ("%.0f" % tempC).replace('-', 'M') + "/" +
("%.0f" % dewC).replace('-', 'M') +
" A%.0f=\n" % (inhg*100)
)
|
UASLab/ImageAnalysis
|
scripts/99-wx-report.py
|
Python
|
mit
| 6,553
|
from flask import request, session
from flask import Blueprint
import api
import json
import mimetypes
import os.path
import api.auth
import asyncio
import threading
from api.annotations import api_wrapper
from api.common import flat_multi
from api.exceptions import *
blueprint = Blueprint("scoreboard_api", __name__)
# guess there's nothing here :P
|
EasyCTF/easyctf-2015
|
api/api/routes/scoreboard.py
|
Python
|
mit
| 355
|
import matplotlib.pyplot as plt
import time
import random
from collections import deque
import numpy as np
import datetime
from xbee import XBee,ZigBee
import serial
import time
a1 = deque([0]*100)
ax = plt.axes(xlim=(0, 20), ylim=(0, 10))
line, = plt.plot(a1,linewidth=4)
plt.ion()
plt.ylim([0,100])
plt.show()
PORT = 'your-serial-port'
BAUD_RATE = 9600
# Open serial port
print("about to open serial")
ser = serial.Serial(PORT, BAUD_RATE)
# This creates the graph and will redraw it based on new data that comes through the serial port
def runGraph(i):
# Read a data point from the serial port, this reads 5 characters
try:
item = ser.read(5)
float(item)
# Add the item to the data list
a1.appendleft(item)
except ValueError:
print ("not a float")
return
# Redraw the graph
datatoplot = a1.pop()
line.set_ydata(a1)
plt.draw()
plt.xlabel("Time Passed (Seconds)")
plt.ylabel("Water Level (Meters)")
plt.title("Dynamic Real Time Water Detection Disaster Preventer")
t = datetime.datetime.now().time()
i += 1
time.sleep(0.01)
plt.pause(0.000001)
i = 0
while True:
runGraph(i)
i+=1
ser.close()
# file.close()
|
GeorgePlukov/FloodWatch
|
graphingplotter2.py
|
Python
|
mit
| 1,308
|
import base64
import hashlib
import hmac
import simplejson
import time
import os
from mod_drops.settings import DISQUS_SECRET_KEY, DISQUS_PUBLIC_KEY
from mod_drops.settings import STATIC_URL, BASE_DIR
def get_disqus_sso(user):
# generate icon and avatar
url = "http://localhost:8000/"
avatar = os.path.join(url, user.profile.picture.url[1:])
avatar = "http://www.smallcats.org/sitebuilder/images/Andean_cat_face-161x134.jpg"
# create a JSON packet of our data attributes
data = simplejson.dumps({
'id': user.pk,
'username': user.username,
'email': user.email,
'avatar': avatar,
})
# encode the data to base64
message = base64.b64encode(data)
# generate a timestamp for signing the message
timestamp = int(time.time())
# generate our hmac signature
sig = hmac.HMAC(DISQUS_SECRET_KEY, '%s %s' % (message, timestamp), hashlib.sha1).hexdigest()
# generate icon
icon = os.path.join(STATIC_URL, 'mod_drops/images/favicon.ico')
# return a script tag to insert the sso message
return """<script type="text/javascript">
var disqus_config = function() {
this.page.remote_auth_s3 = "%(message)s %(sig)s %(timestamp)s";
this.page.api_key = "%(pub_key)s";
this.sso = {
name: "Mod Drops Title",
width: "800",
height: "400",
icon: "%(icon)s",
};
}
</script>""" % dict(
message=message,
timestamp=timestamp,
sig=sig,
pub_key=DISQUS_PUBLIC_KEY,
icon=icon,
)
# use to customize button:
# button: "/test.gif",
# url: "http://moddrops.com/login/",
# logout: "http://moddrops.com/logout/",
|
henry808/mod_drops
|
image/disqus.py
|
Python
|
mit
| 1,752
|
#!/usr/bin/env python
import sys
import os
import atexit
import signal
import time
import inspect
import logging
import pyaas
pyaas.argparser.add_argument('daemon',
metavar='(start|stop|restart)',
help='Control the state of the service')
pyaas.argparser.add_argument('--instance',
help='Name an instance to allow multiple copies to run')
class Daemonize(object):
STDIN = os.path.devnull
STDOUT = os.path.devnull
STDERR = os.path.devnull
def __init__(self, entry, *args, **kwds):
self.entry = entry
self.args = args
self.kwds = kwds
script = pyaas.util.getParent()
# get the filename of the caller
script = os.path.basename(script)
instance = pyaas.args.instance or 'server'
self.pidfile = '/tmp/pyaas-{}-{}-{}.pid'.format(script, entry.func_name, instance)
if 'start' == pyaas.args.daemon:
self.start()
elif 'stop' == pyaas.args.daemon:
self.stop()
elif 'restart' == pyaas.args.daemon:
self.restart()
else:
raise pyaas.error('Unknown daemon option')
def daemonize(self):
if pyaas.args.debug:
logging.debug('Staying in the foreground')
return
try:
pid = os.fork()
except OSError as e:
logging.critical('EXCEPTION: os.fork: %d (%s)', e.errno, e.strerror)
sys.exit(-1)
if pid > 0:
sys.exit(0)
# clear environment
os.chdir("/")
os.setsid()
os.umask(0)
# fork again
try:
pid = os.fork()
except OSError as e:
logging.critical('EXCEPTION: os.fork: %d (%s)', e.errno, e.strerror)
sys.exit(-1)
if pid > 0:
sys.exit(0)
# redirect file handles
sys.stdout.flush()
sys.stderr.flush()
stdin = open(self.STDIN, 'r' )
stdout = open(self.STDOUT, 'a+' )
stderr = open(self.STDERR, 'a+', 0)
os.dup2(stdin.fileno(), sys.stdin.fileno() )
os.dup2(stdout.fileno(), sys.stdout.fileno())
os.dup2(stderr.fileno(), sys.stderr.fileno())
# write pidfile
#atexit.register(self.delpid)
def atexit_callback():
try:
os.remove(self.pidfile)
except Exception as e:
logging.warn('Unable to remove PID file: %s', self.pidfile)
def signal_callback(signum, frame):
if signum == signal.SIGTERM:
sys.exit(0)
atexit.register(atexit_callback)
signal.signal(signal.SIGTERM, signal_callback)
with open(self.pidfile, 'w') as fp:
fp.write('{}'.format(os.getpid()))
def _getpid(self):
pid = None
if os.path.isfile(self.pidfile):
try:
pid = open(self.pidfile, 'r')
pid = int(pid.read())
except IOError:
# TODO: complain not being able to read file
pid = None
except ValueError:
# TODO: complain about invalid value
pid = None
return pid
def start(self):
pid = self._getpid()
if pid:
logging.error('Daemon appears to be running')
sys.exit(-1)
# Start the daemon
self.daemonize()
pyaas.module.load()
self.entry(*self.args, **self.kwds)
def stop(self):
pid = self._getpid()
if not pid:
logging.info('Daemon appears to not be running')
return
try:
os.kill(pid, signal.SIGTERM)
except OSError as e:
if e.errno != 3:
logging.warn('Error stopping process: %s', e)
return
count = 0
while count < 100:
try:
os.kill(pid, 0)
time.sleep(0.1)
count += 1
except OSError as e:
if e.errno != 3:
logging.warn('Error stopping process: %s', e)
return
else:
break
if count == 100:
logging.warn('Timeout stopping process')
return
if os.path.exists(self.pidfile):
os.remove(self.pidfile)
def restart(self):
self.stop()
self.start()
|
moertle/pyaas
|
pyaas/daemon.py
|
Python
|
mit
| 4,433
|
#!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/SWIG/live.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Test SWIG behavior with a live, installed SWIG.
"""
import os.path
import sys
import TestSCons
# swig-python expects specific filenames.
# the platform specific suffix won't necessarily work.
if sys.platform == 'win32':
_dll = '.pyd'
else:
_dll = '.so'
test = TestSCons.TestSCons()
swig = test.where_is('swig')
if not swig:
test.skip_test('Can not find installed "swig", skipping test.\n')
python, python_include, python_libpath, python_lib = \
test.get_platform_python_info()
Python_h = os.path.join(python_include, 'Python.h')
if not os.path.exists(Python_h):
test.skip_test('Can not find %s, skipping test.\n' % Python_h)
# handle testing on other platforms:
ldmodule_prefix = '_'
# On Windows, build a 32-bit exe if on 32-bit python.
if sys.platform == 'win32' and sys.maxsize <= 2**32:
swig_arch_var="TARGET_ARCH='x86',"
else:
swig_arch_var=""
test.write("wrapper.py",
"""import os
import sys
open('%s', 'wb').write("wrapper.py\\n")
os.system(" ".join(sys.argv[1:]))
""" % test.workpath('wrapper.out').replace('\\', '\\\\'))
test.write('SConstruct', """\
foo = Environment(SWIGFLAGS='-python',
LIBPATH=[r'%(python_libpath)s'],
CPPPATH=[r'%(python_include)s'],
LDMODULEPREFIX='%(ldmodule_prefix)s',
LDMODULESUFFIX='%(_dll)s',
SWIG=[r'%(swig)s'],
%(swig_arch_var)s
LIBS='%(python_lib)s',
)
swig = foo.Dictionary('SWIG')
bar = foo.Clone(SWIG = [r'%(python)s', 'wrapper.py', swig])
foo.LoadableModule(target = 'foo', source = ['foo.c', 'foo.i'])
bar.LoadableModule(target = 'bar', source = ['bar.c', 'bar.i'])
""" % locals())
test.write("foo.c", """\
char *
foo_string()
{
return "This is foo.c!";
}
""")
test.write("foo.i", """\
%module foo
%{
/* Put header files here (optional) */
/*
* This duplication shouldn't be necessary, I guess, but it seems
* to suppress "cast to pointer from integer of different size"
* warning messages on some systems.
*/
extern char *foo_string();
%}
extern char *foo_string();
""")
test.write("bar.c", """\
char *
bar_string()
{
return "This is bar.c!";
}
""")
test.write("bar.i", """\
%module \t bar
%{
/* Put header files here (optional) */
/*
* This duplication shouldn't be necessary, I guess, but it seems
* to suppress "cast to pointer from integer of different size"
* warning messages on some systems.
*/
extern char *bar_string();
%}
extern char *bar_string();
""")
test.run(arguments = ldmodule_prefix+'foo' + _dll)
test.must_not_exist(test.workpath('wrapper.out'))
test.run(program = python, stdin = """\
import foo
print foo.foo_string()
""", stdout="""\
This is foo.c!
""")
test.up_to_date(arguments = ldmodule_prefix+'foo' + _dll)
test.run(arguments = ldmodule_prefix+'bar' + _dll)
test.must_match('wrapper.out', "wrapper.py\n")
test.run(program = python, stdin = """\
import foo
import bar
print foo.foo_string()
print bar.bar_string()
""", stdout="""\
This is foo.c!
This is bar.c!
""")
test.up_to_date(arguments = '.')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
EmanueleCannizzaro/scons
|
test/SWIG/live.py
|
Python
|
mit
| 4,455
|
"""
WSGI config for mmpl project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
from __future__ import absolute_import, unicode_literals
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mmpl.settings.production")
application = get_wsgi_application()
|
benjiboi214/mmpl-wagtail
|
site/mmpl/wsgi.py
|
Python
|
mit
| 455
|
#!/usr/bin/env python
'''
Based on:
Zenke, Friedemann, Everton J. Agnes, and Wulfram Gerstner.
"Diverse Synaptic Plasticity Mechanisms Orchestrated to Form and Retrieve Memories in Spiking Neural Networks."
Nature Communications 6 (April 21, 2015).
Part of Zenke's rule embedded in modified Brunel 2000 / Ostojic 2014 network
author: Aditya Gilra, Jun 2016.
in Brian2rc3 for CAMP 2016.
'''
#import modules and functions to be used
from brian2 import * # importing brian also does:
# 'from pylab import *' which imports:
# matplot like commands into the namespace, further
# also can use np. for numpy and mpl. for matplotlib
from data_utils import *
stand_alone = True
if stand_alone: set_device('cpp_standalone', build_on_run=False)
else:
#prefs.codegen.target = 'numpy'
#prefs.codegen.target = 'weave'
prefs.codegen.target = 'cython'
import random
import time
np.random.seed(100) # set seed for reproducibility of simulations
random.seed(100) # set seed for reproducibility of simulations
# ###########################################
# Simulation parameters
# ###########################################
simdt = 0.1*ms
simtime = 10*second
defaultclock.dt = simdt # set Brian's sim time step
dt = simdt/second # convert to value in seconds
# ###########################################
# Neuron model
# ###########################################
# equation: dv/dt=(1/taum)*(-(v-el))
# with spike when v>vt, reset to vr
vt = 20.*mV # Spiking threshold
taum = 20.*ms # Membrane time constant
vr = 10.*mV # Reset potential
muext0 = 24*mV # external input to each neuron
taur = 0.5*ms # Refractory period
taudelay = 0.75*ms # synaptic delay
eqs_neurons='''
muext : volt
dv/dt=-v/taum + muext/taum : volt
'''
# ###########################################
# Network parameters: numbers
# ###########################################
N = 4096+1024 # Total number of neurons
fexc = 0.8 # Fraction of exc neurons
NE = int(fexc*N) # Number of excitatory cells
NI = N-NE # Number of inhibitory cells
# ###########################################
# Network parameters: synapses
# ###########################################
rescale = 2 # rescale C and J to maintain total input
C = 1000/rescale # Number of incoming connections on each neuron (exc or inh)
J = 0.01*mV*rescale # exc strength is J (in mV as we add to voltage)
# Critical J is ~ 0.45 mV in paper for N = 10000, C = 1000
g = 5.0 # -gJ is the inh strength. For exc-inh balance g>~f(1-f)=4
# ###########################################
# Network parameters: synaptic plasticity
# ###########################################
wmax = 10. # hard bound on synaptic weight
Apre_tau = 20*ms # STDP Apre LTP time constant; tauplus
Apost_tau = 20*ms # STDP Apost LTD time constant; tauminus
Apre0 = 1.0 # incr in Apre, on pre-spikes; Aplus for LTP
# at spike coincidence, delta w = -Apre0*eta
Apost0 = 1.0 # incr in Apost on post-spikes; Aminus for LTD
eta = 5e-2 # learning rate
Apostslow0 = 1.0 # incr in Apostslow on post spike
Apostslow_tau = 100*ms
stdp_eqns = ''' wsyn : 1
dApre/dt=-Apre/Apre_tau : 1 (event-driven)
dApost/dt=-Apost/Apost_tau : 1 (event-driven)
'''
w0 = 1.0 # reference weight
beta = 50 # LTP decay factor
alpha = 5 # LTD curvature factor
pre_eqns = 'Apre+=Apre0; wsyn = clip(wsyn - Apost*log(1+wsyn/w0*alpha)/log(1+alpha), 0,inf)'
post_eqns = 'Apost+=Apost0; wsyn = clip(wsyn + Apre*exp(-wsyn/w0/beta), 0,inf)'
# ###########################################
# Initialize neuron (sub)groups
# ###########################################
P=NeuronGroup(N,model=eqs_neurons,\
threshold='v>=vt',reset='v=vr',refractory=taur,method='euler')
P.v = uniform(0.,vt/mV,N)*mV
PE = P[:NE]
PI = P[NE:]
# ###########################################
# Connecting the network
# ###########################################
sparseness = C/float(N)
# E to E connections
#conEE = Synapses(PE,PE,'wsyn:1',on_pre='v_post+=wsyn*J',method='euler')
conEE = Synapses(PE,PE,stdp_eqns,\
on_pre=pre_eqns,on_post=post_eqns,\
method='euler')
#conEE.connect(condition='i!=j',p=sparseness)
# need exact connection indices for weight monitor in standalone mode
conEE_idxs_pre = []
conEE_idxs_post = []
Ce = int(fexc*C)
for k in range(NE):
conEE_idxs_pre.extend(Ce*[k])
idxs = range(NE)
idxs.remove(k) # no autapses i.e. no self-connections
l = np.random.permutation(idxs)[:Ce]
conEE_idxs_post.extend(l)
conEE_idxs_assembly = where(array(conEE_idxs_post)[:Ce*400]<400)[0]
conEE_idxs_cross = where(array(conEE_idxs_post)[:Ce*400]>400)[0]
conEE_idxs_bgnd = where(array(conEE_idxs_post)[Ce*400:]>400)[0]
conEE.connect(i=conEE_idxs_pre,j=conEE_idxs_post)
conEE.delay = taudelay
conEE.wsyn = 1.
# E to I connections
conIE = Synapses(PE,PI,'wsyn:1',on_pre='v_post+=wsyn*J',method='euler')
conIE.connect(p=sparseness)
conIE.delay = taudelay
conIE.wsyn = 1
# I to E connections
conEI = Synapses(PI,PE,'wsyn:1',on_pre='v_post+=wsyn*J',method='euler')
conEI.connect(p=sparseness)
conEI.delay = taudelay
conEI.wsyn = -g
# I to I connections
conII = Synapses(PI,PI,'wsyn:1',on_pre='v_post+=wsyn*J',method='euler')
conII.connect(condition='i!=j',p=sparseness)
conII.delay = taudelay
conII.wsyn = -g
# ###########################################
# Stimuli
# ###########################################
P.muext = muext0
## 400 neurons (~10%) receive stimulus current to increase firing
#Pstim = P[:400]
#Pstim.muext = muext0 + 7*mV
# ###########################################
# Setting up monitors
# ###########################################
Nmon = N
sm = SpikeMonitor(P)
# Population monitor
popm = PopulationRateMonitor(P)
# voltage monitor
sm_vm = StateMonitor(P,'v',record=range(10)+range(NE,NE+10))
# weights monitor
wm = StateMonitor(conEE,'wsyn', record=range(Ce*NE), dt=simtime/20.)
# ###########################################
# Simulate
# ###########################################
print "Setup complete, running for",simtime,"at dt =",dt,"s."
t1 = time.time()
run(simtime,report='text')
device.build(directory='output', compile=True, run=True, debug=False)
print 'inittime + runtime, t = ', time.time() - t1
#print "For g,J =",g,J,"mean exc rate =",\
# sm_e.num_spikes/float(NE)/(simtime/second),'Hz.'
#print "For g,J =",g,J,"mean inh rate =",\
# sm_i.num_spikes/float(NI)/(simtime/second),'Hz.'
# ###########################################
# Make plots
# ###########################################
# always convert spikemon.t and spikemon.i to array-s before indexing
# spikemon.i[] indexing is extremely slow!
spiket = array(sm.t/second) # take spiketimes of all neurons
spikei = array(sm.i)
fig = figure()
# raster plot
subplot(231)
plot(sm.t,sm.i,',')
title(str(N)+" exc & inh neurons")
xlim([simtime/second-1,simtime/second])
xlabel("")
print "plotting firing rates"
subplot(232)
tau=50e-3
sigma = tau/2.
# firing rates
timeseries = arange(0,simtime/second+dt,dt)
rate = np.zeros(int(simtime/simdt))
for nrni in range(400):
rate += rate_from_spiketrain(spiket,spikei,simtime/second,sigma,dt,nrni)
plot(timeseries[:len(rate)],rate/400.,'r')
rate = np.zeros(int(simtime/simdt))
for nrni in range(400,800):
rate += rate_from_spiketrain(spiket,spikei,simtime/second,sigma,dt,nrni)
plot(timeseries[:len(rate)],rate/400.,'b')
title("exc rates: assembly (r), bgnd (b)")
ylabel("Hz")
ylim(0,300)
subplot(233)
hist(wm.wsyn[:,-1],bins=500,edgecolor='none')
xlabel('weight')
ylabel('count')
subplot(235)
num_to_plot = 10
for nrni in range(NE,NE+num_to_plot):
rate = rate_from_spiketrain(spiket,spikei,simtime/second,sigma,dt,nrni)
plot(timeseries[:len(rate)],rate)
#print mean(rate),len(sm_i[nrni])
#rates.append(rate)
title(str(num_to_plot)+" inh rates")
ylim(0,300)
#print "Mean rate = ",mean(rates)
xlabel("time (s)")
ylabel("Hz")
print "plotting weights"
subplot(236)
plot(wm.t/second,mean(wm.wsyn[conEE_idxs_assembly,:],axis=0),color='r')
plot(wm.t/second,mean(wm.wsyn[conEE_idxs_cross,:],axis=0),color='m')
plot(wm.t/second,mean(wm.wsyn[conEE_idxs_bgnd,:],axis=0),color='b')
title("assembly weights (cross=m)")
ylabel("arb")
xlabel("time (s)")
print conEE.wsyn
fig.tight_layout()
show()
|
h-mayorquin/camp_india_2016
|
tutorials/LTPinnetworks2/Step3c_Zenke_etal_2014.py
|
Python
|
mit
| 8,540
|
def user():
response.title = auth_title()
return dict(form=auth())
|
tessercat/ddj
|
controllers/auth.py
|
Python
|
mit
| 75
|
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
import argparse,sys,os
parser = argparse.ArgumentParser()
parser.add_argument("--ue4path", help="path to unreal engine base directory")
args = parser.parse_args()
ue4path=args.ue4path
if ue4path is None:
ue4path=os.environ.get('UE4PATH',None)
if ue4path is None:
print('Error no unreal engine path defined use env. variable UE4PATH or --ue4path',file=sys.stderr)
sys.exit(-1)
import sysconfig,os
libdir=sysconfig.get_config_var('LIBPL')
libfile=sysconfig.get_config_var('LDLIBRARY')
include_path=sysconfig.get_path('include')
file_dir=os.path.abspath(os.path.dirname(__file__))
fd=open('Private/PyConfig.h','w')
print('//this is autogenerated file by build.py Dont Edit!!!',file=fd)
print('#pragma once',file=fd)
#print('#include "PyServerPrivatePCH.h"',file=fd)
#print('extern "C" {',file=fd)
assert(os.path.isfile(libdir+'/'+libfile))
print('#define PYTHON_LIB "'+libdir+'/'+libfile+'"',file=fd)
print('#define SYSPATH "'+file_dir+'/Private"',file=fd)
assert(os.path.isfile(include_path+'/Python.h'))
print('#include "'+include_path+'/Python.h"',file=fd)
fd.close()
#print('}',file=fd)
assert(os.system("python3 -m compileall .")==0) #saves time incase of syntax errors in python files
#trying to guess project name and project file
project_dir=os.path.abspath(file_dir+'/../../../../')
import glob
ret=glob.glob(project_dir+'/*.uproject')
if len(ret)==0:
print("Error: canot find project file in ",project_dir,file=sys.stderr)
sys.exit(-1)
project_file=ret[0]
project_name=os.path.splitext(os.path.basename(project_file))[0]
print('found project file',project_file)
os.system("mono "+ue4path+'/Engine/Binaries/DotNET/UnrealBuildTool.exe '+project_name+' Development Linux -project="'+project_file+\
'" -editorrecompile -progress -noubtmakefiles -NoHotReloadFromIDE')
fd=open('run.sh','w')
print('#!/bin/bash',file=fd)
print('#Tish is auto generated script Don\'t Edit!!!',file=fd)
print('cd '+ue4path,file=fd)
print('Engine/Binaries/Linux/UE4Editor "'+project_file+'" -nocore -project='+project_file,file=fd)
fd.close()
assert(os.system("chmod +x ./run.sh")==0)
# mono /local/ori/GameEngines/UnrealEngine/Engine/Binaries/DotNET/UnrealBuildTool.exe testplugin Development Linux -project="/local/learn/ur4/testplugin/testplugin.uproject" -editorrecompile -progress -noubtmakefiles -NoHotReloadFromIDE
|
orig74/UE4PyServer
|
Source/PyServer/build.py
|
Python
|
mit
| 2,388
|
'''
This code is based on github.com/MagicStack/vmbench/blob/master/servers/asyncio_http_server.py
'''
import asyncio
import uvloop
from protocol import *
def httptools_server(loop, addr):
return loop.create_server(lambda: HttpProtocol(loop=loop), *addr)
if __name__ == '__main__':
loop = uvloop.new_event_loop()
asyncio.set_event_loop(loop)
loop.set_debug(False)
server = loop.run_until_complete(httptools_server(loop, ('127.0.0.1', 15000)))
try:
loop.run_forever()
finally:
server.close()
loop.close()
|
rickiepark/openbidder
|
openbidder/server.py
|
Python
|
mit
| 562
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._deleted_web_apps_operations import build_get_deleted_web_app_by_location_request, build_list_by_location_request, build_list_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DeletedWebAppsOperations:
"""DeletedWebAppsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.web.v2019_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.DeletedWebAppCollection"]:
"""Get all deleted apps for a subscription.
Description for Get all deleted apps for a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeletedWebAppCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2019_08_01.models.DeletedWebAppCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedWebAppCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DeletedWebAppCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/deletedSites'} # type: ignore
@distributed_trace
def list_by_location(
self,
location: str,
**kwargs: Any
) -> AsyncIterable["_models.DeletedWebAppCollection"]:
"""Get all deleted apps for a subscription at location.
Description for Get all deleted apps for a subscription at location.
:param location:
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DeletedWebAppCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2019_08_01.models.DeletedWebAppCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedWebAppCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_location_request(
location=location,
subscription_id=self._config.subscription_id,
template_url=self.list_by_location.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_location_request(
location=location,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("DeletedWebAppCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_location.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/locations/{location}/deletedSites'} # type: ignore
@distributed_trace_async
async def get_deleted_web_app_by_location(
self,
location: str,
deleted_site_id: str,
**kwargs: Any
) -> "_models.DeletedSite":
"""Get deleted app for a subscription at location.
Description for Get deleted app for a subscription at location.
:param location:
:type location: str
:param deleted_site_id: The numeric ID of the deleted app, e.g. 12345.
:type deleted_site_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DeletedSite, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2019_08_01.models.DeletedSite
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DeletedSite"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_deleted_web_app_by_location_request(
location=location,
deleted_site_id=deleted_site_id,
subscription_id=self._config.subscription_id,
template_url=self.get_deleted_web_app_by_location.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('DeletedSite', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_deleted_web_app_by_location.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/locations/{location}/deletedSites/{deletedSiteId}'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2019_08_01/aio/operations/_deleted_web_apps_operations.py
|
Python
|
mit
| 10,665
|
class PaginatorFactory(object):
def __init__(self, http_client):
self.http_client = http_client
def make(self, uri, data=None, union_key=None):
return Paginator(self.http_client, uri, data, union_key)
class Paginator(object):
def __init__(self, http_client, uri, data=None, union_key=None):
self.http_client = http_client
self.union_key = union_key
self.uri = uri
if data is None:
data = {}
self.data = data
def page(self, page=1):
if self.data:
data = dict(self.data)
else:
data = self.data
data['page_index'] = page - 1
return self.http_client.request(self.uri, data)
def all(self):
if not self.union_key:
raise ValueError("Union key parameter is missing")
union_result = []
if self.data:
data = dict(self.data)
else:
data = self.data
page_num = 1
page_index = 0
while page_index < page_num:
data['page_index'] = page_index
page_result = self.http_client.request(self.uri, data)
page_num = page_result.get('page_num')
if page_num is None:
return page_result.get(self.union_key, [])
page_index = page_result.get('page_index') + 1
page_result = page_result.get(self.union_key, [])
union_result.extend(page_result)
return union_result
|
webhue/appannie
|
appannie/paginator.py
|
Python
|
mit
| 1,489
|
#!/usr/bin/env python
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from distutils.command.install import INSTALL_SCHEMES
from os.path import dirname, join, abspath
from setuptools import setup
from setuptools.command.install import install
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
setup_args = {
'cmdclass': {'install': install},
'name': 'selenium',
'version': "3.7.0",
'license': 'Apache 2.0',
'description': 'Python bindings for Selenium',
'long_description': open(join(abspath(dirname(__file__)), "README.rst")).read(),
'url': 'https://github.com/SeleniumHQ/selenium/',
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'],
'package_dir': {
'selenium': 'selenium',
'selenium.common': 'selenium/common',
'selenium.webdriver': 'selenium/webdriver',
},
'packages': ['selenium',
'selenium.common',
'selenium.webdriver',
'selenium.webdriver.android',
'selenium.webdriver.chrome',
'selenium.webdriver.common',
'selenium.webdriver.common.html5',
'selenium.webdriver.support',
'selenium.webdriver.firefox',
'selenium.webdriver.ie',
'selenium.webdriver.edge',
'selenium.webdriver.opera',
'selenium.webdriver.phantomjs',
'selenium.webdriver.remote',
'selenium.webdriver.support', ],
'package_data': {
'selenium.webdriver.firefox': ['*.xpi', 'webdriver_prefs.json'],
'selenium.webdriver.remote': ['getAttribute.js', 'isDisplayed.js'],
},
'data_files': [('selenium/webdriver/firefox/x86', ['selenium/webdriver/firefox/x86/x_ignore_nofocus.so']),
('selenium/webdriver/firefox/amd64', ['selenium/webdriver/firefox/amd64/x_ignore_nofocus.so']),
('selenium/webdriver/remote', ['selenium/webdriver/remote/getAttribute.js']),
('selenium/webdriver/remote', ['selenium/webdriver/remote/isDisplayed.js'])],
'include_package_data': True,
'zip_safe': False
}
setup(**setup_args)
|
AndrewCMartin/idb
|
selenium/selenium-3.7.0/setup.py
|
Python
|
mit
| 3,806
|
"""
homeassistant.remote
~~~~~~~~~~~~~~~~~~~~
A module containing drop in replacements for core parts that will interface
with a remote instance of home assistant.
If a connection error occurs while communicating with the API a
HomeAssistantError will be raised.
"""
import threading
import logging
import json
import enum
import urllib.parse
import requests
import homeassistant as ha
SERVER_PORT = 8123
AUTH_HEADER = "HA-access"
URL_API = "/api/"
URL_API_STATES = "/api/states"
URL_API_STATES_ENTITY = "/api/states/{}"
URL_API_EVENTS = "/api/events"
URL_API_EVENTS_EVENT = "/api/events/{}"
URL_API_SERVICES = "/api/services"
URL_API_SERVICES_SERVICE = "/api/services/{}/{}"
URL_API_EVENT_FORWARD = "/api/event_forwarding"
METHOD_GET = "get"
METHOD_POST = "post"
# pylint: disable=no-init, invalid-name
class APIStatus(enum.Enum):
""" Represents API status. """
OK = "ok"
INVALID_PASSWORD = "invalid_password"
CANNOT_CONNECT = "cannot_connect"
UNKNOWN = "unknown"
def __str__(self):
return self.value
class API(object):
""" Object to pass around Home Assistant API location and credentials. """
# pylint: disable=too-few-public-methods
def __init__(self, host, api_password, port=None):
self.host = host
self.port = port or SERVER_PORT
self.api_password = api_password
self.base_url = "http://{}:{}".format(host, self.port)
self.status = None
self._headers = {AUTH_HEADER: api_password}
def validate_api(self, force_validate=False):
""" Tests if we can communicate with the API. """
if self.status is None or force_validate:
self.status = validate_api(self)
return self.status == APIStatus.OK
def __call__(self, method, path, data=None):
""" Makes a call to the Home Assistant api. """
if data is not None:
data = json.dumps(data, cls=JSONEncoder)
url = urllib.parse.urljoin(self.base_url, path)
try:
if method == METHOD_GET:
return requests.get(
url, params=data, timeout=5, headers=self._headers)
else:
return requests.request(
method, url, data=data, timeout=5, headers=self._headers)
except requests.exceptions.ConnectionError:
logging.getLogger(__name__).exception("Error connecting to server")
raise ha.HomeAssistantError("Error connecting to server")
except requests.exceptions.Timeout:
error = "Timeout when talking to {}".format(self.host)
logging.getLogger(__name__).exception(error)
raise ha.HomeAssistantError(error)
class HomeAssistant(ha.HomeAssistant):
""" Home Assistant that forwards work. """
# pylint: disable=super-init-not-called
def __init__(self, remote_api, local_api=None):
if not remote_api.validate_api():
raise ha.HomeAssistantError(
"Remote API at {}:{} not valid: {}".format(
remote_api.host, remote_api.port, remote_api.status))
self.remote_api = remote_api
self.local_api = local_api
self._pool = pool = ha.create_worker_pool()
self.bus = EventBus(remote_api, pool)
self.services = ha.ServiceRegistry(self.bus, pool)
self.states = StateMachine(self.bus, self.remote_api)
def start(self):
# If there is no local API setup but we do want to connect with remote
# We create a random password and set up a local api
if self.local_api is None:
import homeassistant.components.http as http
import random
# pylint: disable=too-many-format-args
random_password = '%030x'.format(random.randrange(16**30))
http.setup(self, random_password)
ha.Timer(self)
# Setup that events from remote_api get forwarded to local_api
connect_remote_events(self.remote_api, self.local_api)
self.bus.fire(ha.EVENT_HOMEASSISTANT_START,
origin=ha.EventOrigin.remote)
class EventBus(ha.EventBus):
""" EventBus implementation that forwards fire_event to remote API. """
def __init__(self, api, pool=None):
super().__init__(pool)
self._api = api
def fire(self, event_type, event_data=None, origin=ha.EventOrigin.local):
""" Forward local events to remote target,
handles remote event as usual. """
# All local events that are not TIME_CHANGED are forwarded to API
if origin == ha.EventOrigin.local and \
event_type != ha.EVENT_TIME_CHANGED:
fire_event(self._api, event_type, event_data)
else:
super().fire(event_type, event_data, origin)
class EventForwarder(object):
""" Listens for events and forwards to specified APIs. """
def __init__(self, hass, restrict_origin=None):
self.hass = hass
self.restrict_origin = restrict_origin
self.logger = logging.getLogger(__name__)
# We use a tuple (host, port) as key to ensure
# that we do not forward to the same host twice
self._targets = {}
self._lock = threading.Lock()
def connect(self, api):
"""
Attach to a HA instance and forward events.
Will overwrite old target if one exists with same host/port.
"""
with self._lock:
if len(self._targets) == 0:
# First target we get, setup listener for events
self.hass.bus.listen(ha.MATCH_ALL, self._event_listener)
key = (api.host, api.port)
self._targets[key] = api
def disconnect(self, api):
""" Removes target from being forwarded to. """
with self._lock:
key = (api.host, api.port)
did_remove = self._targets.pop(key, None) is None
if len(self._targets) == 0:
# Remove event listener if no forwarding targets present
self.hass.bus.remove_listener(ha.MATCH_ALL,
self._event_listener)
return did_remove
def _event_listener(self, event):
""" Listen and forwards all events. """
with self._lock:
# We don't forward time events or, if enabled, non-local events
if event.event_type == ha.EVENT_TIME_CHANGED or \
(self.restrict_origin and event.origin != self.restrict_origin):
return
for api in self._targets.values():
fire_event(api, event.event_type, event.data, self.logger)
class StateMachine(ha.StateMachine):
"""
Fires set events to an API.
Uses state_change events to track states.
"""
def __init__(self, bus, api):
super().__init__(None)
self.logger = logging.getLogger(__name__)
self._api = api
self.mirror()
bus.listen(ha.EVENT_STATE_CHANGED, self._state_changed_listener)
def set(self, entity_id, new_state, attributes=None):
""" Calls set_state on remote API . """
set_state(self._api, entity_id, new_state, attributes)
def mirror(self):
""" Discards current data and mirrors the remote state machine. """
self._states = {state.entity_id: state for state
in get_states(self._api, self.logger)}
def _state_changed_listener(self, event):
""" Listens for state changed events and applies them. """
self._states[event.data['entity_id']] = event.data['new_state']
class JSONEncoder(json.JSONEncoder):
""" JSONEncoder that supports Home Assistant objects. """
def default(self, obj): # pylint: disable=method-hidden
""" Checks if Home Assistat object and encodes if possible.
Else hand it off to original method. """
if isinstance(obj, ha.State):
return obj.as_dict()
return json.JSONEncoder.default(self, obj)
def validate_api(api):
""" Makes a call to validate API. """
try:
req = api(METHOD_GET, URL_API)
if req.status_code == 200:
return APIStatus.OK
elif req.status_code == 401:
return APIStatus.INVALID_PASSWORD
else:
return APIStatus.UNKNOWN
except ha.HomeAssistantError:
return APIStatus.CANNOT_CONNECT
def connect_remote_events(from_api, to_api):
""" Sets up from_api to forward all events to to_api. """
data = {'host': to_api.host, 'api_password': to_api.api_password}
if to_api.port is not None:
data['port'] = to_api.port
try:
from_api(METHOD_POST, URL_API_EVENT_FORWARD, data)
except ha.HomeAssistantError:
pass
def disconnect_remote_events(from_api, to_api):
""" Disconnects forwarding events from from_api to to_api. """
data = {'host': to_api.host, '_METHOD': 'DELETE'}
if to_api.port is not None:
data['port'] = to_api.port
try:
from_api(METHOD_POST, URL_API_EVENT_FORWARD, data)
except ha.HomeAssistantError:
pass
def get_event_listeners(api, logger=None):
""" List of events that is being listened for. """
try:
req = api(METHOD_GET, URL_API_EVENTS)
return req.json() if req.status_code == 200 else {}
except (ha.HomeAssistantError, ValueError):
# ValueError if req.json() can't parse the json
if logger:
logger.exception("Bus:Got unexpected result")
return {}
def fire_event(api, event_type, data=None, logger=None):
""" Fire an event at remote API. """
try:
req = api(METHOD_POST, URL_API_EVENTS_EVENT.format(event_type), data)
if req.status_code != 200 and logger:
logger.error(
"Error firing event: {} - {}".format(
req.status_code, req.text))
except ha.HomeAssistantError:
pass
def get_state(api, entity_id, logger=None):
""" Queries given API for state of entity_id. """
try:
req = api(METHOD_GET,
URL_API_STATES_ENTITY.format(entity_id))
# req.status_code == 422 if entity does not exist
return ha.State.from_dict(req.json()) \
if req.status_code == 200 else None
except (ha.HomeAssistantError, ValueError):
# ValueError if req.json() can't parse the json
if logger:
logger.exception("Error getting state")
return None
def get_states(api, logger=None):
""" Queries given API for all states. """
try:
req = api(METHOD_GET,
URL_API_STATES)
return [ha.State.from_dict(item) for
item in req.json()]
except (ha.HomeAssistantError, ValueError, AttributeError):
# ValueError if req.json() can't parse the json
if logger:
logger.exception("Error getting state")
return {}
def set_state(api, entity_id, new_state, attributes=None, logger=None):
""" Tells API to update state for entity_id. """
attributes = attributes or {}
data = {'state': new_state,
'attributes': attributes}
try:
req = api(METHOD_POST,
URL_API_STATES_ENTITY.format(entity_id),
data)
if req.status_code != 201 and logger:
logger.error(
"Error changing state: {} - {}".format(
req.status_code, req.text))
except ha.HomeAssistantError:
if logger:
logger.exception("Error setting state to server")
def is_state(api, entity_id, state, logger=None):
""" Queries API to see if entity_id is specified state. """
cur_state = get_state(api, entity_id, logger)
return cur_state and cur_state.state == state
def get_services(api, logger=None):
"""
Returns a list of dicts. Each dict has a string "domain" and
a list of strings "services".
"""
try:
req = api(METHOD_GET, URL_API_SERVICES)
return req.json() if req.status_code == 200 else {}
except (ha.HomeAssistantError, ValueError):
# ValueError if req.json() can't parse the json
if logger:
logger.exception("ServiceRegistry:Got unexpected result")
return {}
def call_service(api, domain, service, service_data=None, logger=None):
""" Calls a service at the remote API. """
try:
req = api(METHOD_POST,
URL_API_SERVICES_SERVICE.format(domain, service),
service_data)
if req.status_code != 200 and logger:
logger.error(
"Error calling service: {} - {}".format(
req.status_code, req.text))
except ha.HomeAssistantError:
if logger:
logger.exception("Error setting state to server")
|
EnTeQuAk/home-assistant
|
homeassistant/remote.py
|
Python
|
mit
| 12,864
|
import pytest
from authentise_services import errors
from authentise_services.session import Session
@pytest.mark.parametrize('status_code', [201,
pytest.mark.xfail((403), raises=errors.ResourceError),
])
def test_create_user(config, httpretty, status_code):
httpretty.register_uri(httpretty.POST,
"https://users.{}/users/".format(config.host),
status=status_code)
Session.create_user("herp", "derp", "herp", "herp@derp.com")
@pytest.mark.parametrize('status_code', [201,
pytest.mark.xfail((403), raises=errors.ResourceError),
])
def test_create_session(config, httpretty, status_code):
httpretty.register_uri(httpretty.POST,
"https://users.{}/sessions/".format(config.host),
forcing_headers={"Set-Cookie": "session=1234"},
status=status_code)
Session("herp", "derp")
|
DoWhileGeek/authentise-services
|
tests/test_session.py
|
Python
|
mit
| 1,008
|
from setuptools import setup
setup(name='more_ascii_art',
version='1.4',
description='Cool ASCII art with python',
url='http://github.com/PenetratingShot/Python-ASCII-Art',
author='Shreyas Lad and Talon Bragg',
author_email='slad0716@gmail.com',
license='MIT',
packages=['more_ascii_art'],
zip_safe=False)
|
PenetratingShot/Python-ASCII-Art
|
setup.py
|
Python
|
mit
| 355
|
# Lesson 8
import math
# We can use j on a numeric literal to turn it into an imaginary number
x = 1 + 2j
y = 3 - 4j
z = 3
print(x * y)
print(x / y)
print(y - z)
# Unfortunately, the math library is not built with imaginary numbers in mind.
# print(math.sqrt(-4))
|
JonTheBurger/python_class
|
chapter 3/lessons/complex.py
|
Python
|
mit
| 267
|
from RanobeHonyaku.database import db
class Volume(db.Model):
__tablename__ = "volumes"
id = db.Column(db.Integer(), primary_key=True)
# The title of the volume
title = db.Column(db.String(120))
# The series id that the volume is associated with
series_id = db.Column(db.Integer(), db.ForeignKey("series.id"))
# The volumes associated with the series
chapters = db.relationship("Chapter", backref="volumes", lazy="dynamic")
# The place the volume comes in
position = db.Column(db.Integer())
def __repr__(self):
return "<Volume ({0.title}) ({0.id})>".format(self)
|
Ranobe-Honyaku/Website
|
RanobeHonyaku/models/volume.py
|
Python
|
mit
| 624
|
#!/usr/bin/env python
"""
This application presents a 'console' prompt to the user asking for
subscribe commands which create SubscribeCOVRequests. The other commands are
for changing the type of reply to the confirmed COV notification that gets
sent.
"""
from bacpypes.debugging import bacpypes_debugging, ModuleLogger
from bacpypes.consolelogging import ConfigArgumentParser
from bacpypes.consolecmd import ConsoleCmd
from bacpypes.core import run, deferred, enable_sleeping
from bacpypes.iocb import IOCB
from bacpypes.pdu import Address
from bacpypes.apdu import SubscribeCOVRequest, SimpleAckPDU, RejectPDU, AbortPDU, SubscribeCOVPropertyRequest
from bacpypes.primitivedata import ObjectIdentifier
from bacpypes.app import BIPSimpleApplication
from bacpypes.local.device import LocalDeviceObject
from bacpypes.basetypes import PropertyReference
# some debugging
_debug = 0
_log = ModuleLogger(globals())
# globals
this_application = None
# how the application should respond
rsvp = (True, None, None)
#
# SubscribeCOVApplication
#
@bacpypes_debugging
class SubscribeCOVApplication(BIPSimpleApplication):
def __init__(self, *args):
if _debug:
SubscribeCOVApplication._debug("__init__ %r", args)
BIPSimpleApplication.__init__(self, *args)
def do_ConfirmedCOVNotificationRequest(self, apdu):
if _debug:
SubscribeCOVApplication._debug(
"do_ConfirmedCOVNotificationRequest %r", apdu
)
global rsvp
print("{} changed\n".format(apdu.monitoredObjectIdentifier))
for element in apdu.listOfValues:
element_value = element.value.tagList
if _debug:
SubscribeCOVApplication._debug(" - propertyIdentifier: %r", element.propertyIdentifier)
SubscribeCOVApplication._debug(" - value tag list: %r", element_value)
if len(element_value) == 1:
element_value = element_value[0].app_to_object().value
print(" {} is {}".format(element.propertyIdentifier, str(element_value)))
if rsvp[0]:
# success
response = SimpleAckPDU(context=apdu)
if _debug:
SubscribeCOVApplication._debug(" - simple_ack: %r", response)
elif rsvp[1]:
# reject
response = RejectPDU(reason=rsvp[1], context=apdu)
if _debug:
SubscribeCOVApplication._debug(" - reject: %r", response)
elif rsvp[2]:
# abort
response = AbortPDU(reason=rsvp[2], context=apdu)
if _debug:
SubscribeCOVApplication._debug(" - abort: %r", response)
# return the result
self.response(response)
def do_UnconfirmedCOVNotificationRequest(self, apdu):
if _debug:
SubscribeCOVApplication._debug(
"do_UnconfirmedCOVNotificationRequest %r", apdu
)
print("{} changed\n".format(apdu.monitoredObjectIdentifier))
for element in apdu.listOfValues:
element_value = element.value.tagList
if len(element_value) == 1:
element_value = element_value[0].app_to_object().value
print(" {} is {}".format(element.propertyIdentifier, str(element_value)))
#
# SubscribeCOVConsoleCmd
#
@bacpypes_debugging
class SubscribeCOVConsoleCmd(ConsoleCmd):
def do_subscribe(self, args):
"""subscribe addr proc_id obj_id [ confirmed ] [ lifetime ]
Generate a SubscribeCOVRequest and wait for the response.
"""
args = args.split()
if _debug:
SubscribeCOVConsoleCmd._debug("do_subscribe %r", args)
try:
addr, proc_id, obj_id = args[:3]
obj_id = ObjectIdentifier(obj_id).value
proc_id = int(proc_id)
if len(args) >= 4:
issue_confirmed = args[3]
if issue_confirmed == "-":
issue_confirmed = None
else:
issue_confirmed = issue_confirmed.lower() == "true"
if _debug:
SubscribeCOVConsoleCmd._debug(
" - issue_confirmed: %r", issue_confirmed
)
else:
issue_confirmed = None
if len(args) >= 5:
lifetime = args[4]
if lifetime == "-":
lifetime = None
else:
lifetime = int(lifetime)
if _debug:
SubscribeCOVConsoleCmd._debug(" - lifetime: %r", lifetime)
else:
lifetime = None
# build a request
# request = SubscribeCOVRequest(
# subscriberProcessIdentifier=proc_id, monitoredObjectIdentifier=obj_id
# )
request = SubscribeCOVPropertyRequest(
subscriberProcessIdentifier=proc_id,
monitoredObjectIdentifier=obj_id,
monitoredPropertyIdentifier=PropertyReference(propertyIdentifier=85),
covIncrement=2
)
request.pduDestination = Address(addr)
# optional parameters
if issue_confirmed is not None:
request.issueConfirmedNotifications = issue_confirmed
if lifetime is not None:
request.lifetime = lifetime
if _debug:
SubscribeCOVConsoleCmd._debug(" - request: %r", request)
# make an IOCB
iocb = IOCB(request)
if _debug:
SubscribeCOVConsoleCmd._debug(" - iocb: %r", iocb)
# give it to the application
deferred(this_application.request_io, iocb)
# wait for it to complete
iocb.wait()
# do something for success
if iocb.ioResponse:
if _debug:
SubscribeCOVConsoleCmd._debug(" - response: %r", iocb.ioResponse)
# do something for error/reject/abort
if iocb.ioError:
if _debug:
SubscribeCOVConsoleCmd._debug(" - error: %r", iocb.ioError)
except Exception as e:
SubscribeCOVConsoleCmd._exception("exception: %r", e)
def do_ack(self, args):
"""ack
When confirmed COV notification requests arrive, respond with a
simple acknowledgement.
"""
args = args.split()
if _debug:
SubscribeCOVConsoleCmd._debug("do_ack %r", args)
global rsvp
rsvp = (True, None, None)
def do_reject(self, args):
"""reject reason
When confirmed COV notification requests arrive, respond with a
reject PDU with the provided reason.
"""
args = args.split()
if _debug:
SubscribeCOVConsoleCmd._debug("do_reject %r", args)
global rsvp
rsvp = (False, args[0], None)
def do_abort(self, args):
"""abort reason
When confirmed COV notification requests arrive, respond with an
abort PDU with the provided reason.
"""
args = args.split()
if _debug:
SubscribeCOVConsoleCmd._debug("do_abort %r", args)
global rsvp
rsvp = (False, None, args[0])
#
# __main__
#
def main():
global this_application
# parse the command line arguments
args = ConfigArgumentParser(description=__doc__).parse_args()
if _debug:
_log.debug("initialization")
if _debug:
_log.debug(" - args: %r", args)
# make a device object
this_device = LocalDeviceObject(ini=args.ini)
if _debug:
_log.debug(" - this_device: %r", this_device)
# make a simple application
this_application = SubscribeCOVApplication(this_device, args.ini.address)
# make a console
this_console = SubscribeCOVConsoleCmd()
if _debug:
_log.debug(" - this_console: %r", this_console)
# enable sleeping will help with threads
enable_sleeping()
_log.debug("running")
run()
_log.debug("fini")
if __name__ == "__main__":
main()
|
JoelBender/bacpypes
|
samples/COVClient.py
|
Python
|
mit
| 8,225
|
import random
from copy import deepcopy
import pygame
from SystemPanic.Core import config
GameConfiguration = {
"players": None,
"enemies": None,
"player_missiles": None,
"enemy_missiles": None,
"background": None,
"level_generator": None,
"level_tiles": None,
"music": None,
}
def new_game_configuration():
return deepcopy(GameConfiguration)
def get_randomized_config(
backgrounds,
enemies,
missiles,
level_generators,
level_tiles,
music,
players
):
new_config = new_game_configuration()
new_config["background"] = pygame.transform.scale(
random.choice(backgrounds),
(
config.GAME_SURFACE_WIDTH,
config.GAME_SURFACE_HEIGHT
)
)
new_config["level_generator"] = random.choice(level_generators)()
new_config["music"] = random.choice(music)
new_config["level_tile"] = random.choice(level_tiles)
new_config["players"] = random.choice(players)
new_config["player_missiles"] = random.choice(missiles)
new_config["enemies"] = random.choice(enemies)
new_config["enemy_missiles"] = random.choice(missiles)
return new_config
|
xaroth8088/SystemPanic
|
SystemPanic/Core/game_configuration.py
|
Python
|
mit
| 1,210
|
#! /usr/local/bin/Python3.5
import boto3
polly = boto3.Session(profile_name="gekko1").client('polly')
response = polly.synthesize_speech(
OutputFormat='mp3',
SampleRate='8000',
Text='Salut, je m\'appelle Benjamin',
TextType='text',
VoiceId='Mathieu'
)
# print(type(response['AudioStream'].read()))
with open("test", 'wb') as file:
file.write(response['AudioStream'].read())
|
Mythridor/aws-scripting
|
Polly/first_ouput.py
|
Python
|
mit
| 403
|
from django.conf import settings
from django.db import models
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils import timezone
from icalendar import Event
from .file import File
from .helper import DefaultFields, ShortableNameFields
from .location import Location
from .organization import Organization
from .person import Person
PUBLICALITY = (
(0, "unknown"),
(1, "public"),
(2, "not public"),
# Most meeting consist of a public and a subsequent private part
(3, "splitted"),
)
class Meeting(DefaultFields, ShortableNameFields):
cancelled = models.BooleanField(default=False)
start = models.DateTimeField()
end = models.DateTimeField(null=True, blank=True)
location = models.ForeignKey(
Location, null=True, blank=True, on_delete=models.CASCADE
)
# There are cases where mutliple organizations have a joined official meeting
organizations = models.ManyToManyField(Organization, blank=True)
# Only applicable when there are participants without an organization
persons = models.ManyToManyField(Person, blank=True)
invitation = models.ForeignKey(
File,
null=True,
blank=True,
related_name="meeting_invitation",
on_delete=models.CASCADE,
)
results_protocol = models.ForeignKey(
File,
null=True,
blank=True,
related_name="meeting_results_protocol",
on_delete=models.CASCADE,
)
verbatim_protocol = models.ForeignKey(
File,
null=True,
blank=True,
related_name="meeting_verbatim_protocol",
on_delete=models.CASCADE,
)
# Sometimes there are additional files atttached to a meeting
auxiliary_files = models.ManyToManyField(
File, blank=True, related_name="meeting_auxiliary_files"
)
public = models.IntegerField(choices=PUBLICALITY, default=0, blank=True)
def as_ical_event(self) -> Event:
url = settings.ABSOLUTE_URI_BASE + reverse("meeting", args=[self.id])
event = Event()
event.add("uid", "meeting-{}@{}".format(self.id, settings.REAL_HOST))
event.add("summary", self.short_name)
event.add("description", self.name + "\n" + url)
# https://stackoverflow.com/questions/854036/html-in-ical-attachment
# https://docs.microsoft.com/en-us/openspecs/exchange_server_protocols/ms-oxcical/d7f285da-9c7a-4597-803b-b74193c898a8
html_desc = render_to_string(
"mainapp/meeting_ical_alt_desc.html", {"description": self.name, "url": url}
).strip()
event.add("X-ALT-DESC;FMTTYPE=text/html", html_desc)
event.add("dtstart", timezone.localtime(self.start))
event.add("url", url)
if self.end:
event.add("dtend", timezone.localtime(self.end))
if self.location and self.location.description:
event.add("location", self.location.description)
if self.cancelled:
event.add("method", "CANCEL")
event.add("status", "CANCELLED")
return event
def __str__(self):
return self.short_name
def get_default_link(self):
return reverse("meeting", args=[self.id])
def sort_date(self):
return self.start
|
meine-stadt-transparent/meine-stadt-transparent
|
mainapp/models/meeting.py
|
Python
|
mit
| 3,288
|
# -*- coding: utf-8 -*-
# This spider is used for HTML redirect crawling
import scrapy
from scrapy.selector import Selector
from scrapy.http import Request
from browser_simulator.items import BrowserSimulatorItem
class TestSpiderSpider(scrapy.Spider):
name = "html_redirect_spider"
# allowed_domains = ["yuyang.bid/"]
start_urls = ['https://yuyang.bid/CS6262_test/html_redirects/r1.html']
headers = {
'Connection': 'keep - alive',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36'
}
meta = {
'dont_redirect': True, # no redirect
'handle_httpstatus_list': [301, 302] # handle exceptions
}
def start_requests(self):
yield Request(self.start_urls[0], callback=self.parse, headers=self.headers, meta=self.meta)
def parse(self, response):
item = BrowserSimulatorItem()
# used for search redirect
selector = Selector(response)
item['raw_data'] = selector.xpath('//html/*').extract()
item['url'] = response.url
# now just get some header
item['header'] = response.headers.getlist('Set-Cookie')
item['body'] = selector.xpath('//body/*').extract()
# only do the html redirect now
tmp_redirect = selector.xpath('//meta[@http-equiv="refresh" and @content]/@content').extract()[0]
redirect_result = ''
if tmp_redirect:
redirect_result = self.redirect_handler(tmp_redirect, response.url)
yield Request(redirect_result, callback=self.parse, headers=self.headers, meta=self.meta)
item['redirect'] = redirect_result
all_links = selector.xpath('//a/@content').extract()
item['links'] = self.links_handler(all_links)
yield item
def redirect_handler(self, redirect, cur_url):
# e.g. '0.5;url=http://helloworld.com'
# e.g. '0.5;url=empty.exe'
# after split, only need 'empty.exe'
redirect_part = redirect.split('url=')[1]
redirect_head = ''
# it is already a link
if redirect_part.startswith('http'):
final_url = redirect_part
# if just a part of link
else:
url_pieces = cur_url.split('/')
for i in range(0, len(url_pieces) - 1):
if 0 == i:
if url_pieces[i].startswith('http'):
redirect_head += url_pieces[i]
redirect_head += '/'
redirect_head += '/'
else:
redirect_head += 'http://'
redirect_head += url_pieces[i]
redirect_head += '/'
elif 0 < len(url_pieces[i]):
redirect_head += url_pieces[i]
redirect_head += '/'
final_url = redirect_head + redirect_part
return final_url.encode('ascii', 'ignore')
def links_handler(self, links):
result = ''
for link in links:
result += link + ','
return result
|
skihyy/GT-Spring-2017-CS6262
|
browser_simulator/browser_simulator/spiders/html_redirect_spider.py
|
Python
|
mit
| 3,117
|
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.shortcuts import redirect, render
from studyroom.forms import CreateSessionForm
from studyroom.models import Session
def create_session(request):
if request.method == "POST":
form = CreateSessionForm(request.POST)
if Session.objects.filter(name=request.POST['name'], live=True).count() == 0:
if form.is_valid():
session = form.save()
return redirect(reverse('studyroom:gotosession', args=(session.id,)))
else:
return render(request, 'studyroom/create.html', {form: form})
else:
messages.error(request, "Already a session with same name is running.")
return render(request, 'studyroom/create.html', {form: form})
else:
form = CreateSessionForm()
return render(request, 'studyroom/create.html', {form: form})
def go_to_session(request, session_id):
return None
|
TejasM/wisely
|
wisely_project/studyroom/views.py
|
Python
|
mit
| 999
|
from evostream.default import api
from evostream.management.base import BaseEvoStreamCommand
class Command(BaseEvoStreamCommand):
help = 'Returns a complete list of group name aliases.'
requires_system_checks = False
def get_results(self, *args, **options):
return api.list_group_name_aliases()
|
tomi77/django-evostream
|
evostream/management/commands/listgroupnamealiases.py
|
Python
|
mit
| 319
|